Android MediaCodecVideoDecoder: Manage lifetime of texture frames

This CL should be the last one in a series to finally unblock camera texture capture.

The SurfaceTexture.updateTexImage() calls are moved from the video renderers into MediaCodecVideoDecoder, and the destructor of the texture frames will signal MediaCodecVideoDecoder that the frame has returned. This CL also removes the SurfaceTexture from the native handle and only exposes the texture matrix instead, because only the video source should access the SurfaceTexture.

BUG=webrtc:4993
R=glaznev@webrtc.org, perkj@webrtc.org

Review URL: https://codereview.webrtc.org/1378033003 .

Cr-Commit-Position: refs/heads/master@{#10203}
This commit is contained in:
Magnus Jedvert
2015-10-07 22:57:06 +02:00
parent 87962a9787
commit 91b348c702
8 changed files with 163 additions and 166 deletions

View File

@ -389,25 +389,10 @@ public class SurfaceViewRenderer extends SurfaceView
}
final long startTimeNs = System.nanoTime();
final float[] samplingMatrix;
if (frame.yuvFrame) {
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling
// matrix.
samplingMatrix = RendererCommon.verticalFlipMatrix();
} else {
// TODO(magjed): Move updateTexImage() to the video source instead.
SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
surfaceTexture.updateTexImage();
samplingMatrix = new float[16];
surfaceTexture.getTransformMatrix(samplingMatrix);
}
final float[] texMatrix;
synchronized (layoutLock) {
final float[] rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree);
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);

View File

@ -244,29 +244,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
if (isNewFrame) {
rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
if (pendingFrame.yuvFrame) {
rendererType = RendererType.RENDERER_YUV;
drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to
// the bottom-left corner. We correct this discrepancy by setting a vertical flip as
// sampling matrix.
final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
} else {
rendererType = RendererType.RENDERER_TEXTURE;
// External texture rendering. Update texture image to latest and make a deep copy of
// the external texture.
// TODO(magjed): Move updateTexImage() to the video source instead.
final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
surfaceTexture.updateTexImage();
final float[] samplingMatrix = new float[16];
surfaceTexture.getTransformMatrix(samplingMatrix);
rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
// External texture rendering. Make a deep copy of the external texture.
// Reallocate offscreen texture if necessary.
textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());