MediaCodecVideoEncoder, set timestamp on the encoder surface when drawing a texture.

BUG=webrtc:4993

Review URL: https://codereview.webrtc.org/1523843006

Cr-Commit-Position: refs/heads/master@{#11078}
This commit is contained in:
perkj
2015-12-18 00:34:37 -08:00
committed by Commit bot
parent fc96bd1f8b
commit 48477c1c6a
7 changed files with 45 additions and 21 deletions

View File

@ -61,7 +61,7 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
return;
}
EglBase eglBase = EglBase.create();
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
@ -81,7 +81,7 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
null));
encoder.release();
EglBase eglBase = EglBase.create();
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
eglBase.getEglBaseContext()));
@ -143,7 +143,7 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
final int height = 480;
final long presentationTs = 2;
final EglBase eglOesBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
eglOesBase.createDummyPbufferSurface();
eglOesBase.makeCurrent();
int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
@ -171,6 +171,7 @@ public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
}
assertTrue(info.index != -1);
assertTrue(info.buffer.capacity() > 0);
assertEquals(presentationTs, info.presentationTimestampUs);
encoder.releaseOutputBuffer(info.index);
encoder.release();

View File

@ -33,6 +33,7 @@ import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.view.Surface;
@ -42,20 +43,22 @@ import org.webrtc.Logging;
* Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
@TargetApi(17)
@TargetApi(18)
final class EglBase14 extends EglBase {
private static final String TAG = "EglBase14";
private static final int EGL14_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR1;
private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
private EGLContext eglContext;
private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
// time stamp on a surface is supported from 18 so we require 18.
public static boolean isEGL14Supported() {
Logging.d(TAG, "SDK version: " + CURRENT_SDK_VERSION
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION));
return (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION);
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
}
public static class Context extends EglBase.Context {
@ -201,6 +204,16 @@ final class EglBase14 extends EglBase {
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
public void swapBuffers(long timeStampNs) {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
// See https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
// Return an EGLDisplay, or die trying.
private static EGLDisplay getEglDisplay() {
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);

View File

@ -38,6 +38,7 @@ import javax.microedition.khronos.opengles.GL10;
import android.annotation.SuppressLint;
import android.graphics.Point;
import android.graphics.Rect;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
@ -58,7 +59,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
private static Runnable eglContextReady = null;
private static final String TAG = "VideoRendererGui";
private GLSurfaceView surface;
private static EGLContext eglContext = null;
private static EglBase.Context eglContext = null;
// Indicates if SurfaceView.Renderer.onSurfaceCreated was called.
// If true then for every newly created yuv image renderer createTexture()
// should be called. The variable is accessed on multiple threads and
@ -418,7 +419,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
}
public static synchronized EglBase.Context getEglBaseContext() {
return new EglBase10.Context(eglContext);
return eglContext;
}
/** Releases GLSurfaceView video renderer. */
@ -607,7 +608,12 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
// Store render EGL context.
synchronized (VideoRendererGui.class) {
eglContext = ((EGL10) EGLContext.getEGL()).eglGetCurrentContext();
if (EglBase14.isEGL14Supported()) {
eglContext = new EglBase14.Context(EGL14.eglGetCurrentContext());
} else {
eglContext = new EglBase10.Context(((EGL10) EGLContext.getEGL()).eglGetCurrentContext());
}
Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
}

View File

@ -278,7 +278,7 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(
*j_media_codec_video_encoder_class_,
"initEncode",
"(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
"IIIILorg/webrtc/EglBase$Context;)Z");
"IIIILorg/webrtc/EglBase14$Context;)Z");
j_get_input_buffers_method_ = GetMethodID(
jni,
*j_media_codec_video_encoder_class_,
@ -1122,7 +1122,7 @@ void MediaCodecVideoEncoderFactory::SetEGLContext(
egl_context_ = NULL;
} else {
jclass j_egl_context_class =
FindClass(jni, "org/webrtc/EglBase$Context");
FindClass(jni, "org/webrtc/EglBase14$Context");
if (!jni->IsInstanceOf(egl_context_, j_egl_context_class)) {
ALOGE << "Wrong EGL Context.";
jni->DeleteGlobalRef(egl_context_);

View File

@ -72,7 +72,6 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/IceCandidate");
#if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
LoadClass(jni, "android/graphics/SurfaceTexture");
LoadClass(jni, "javax/microedition/khronos/egl/EGLContext");
LoadClass(jni, "org/webrtc/CameraEnumerator");
LoadClass(jni, "org/webrtc/Camera2Enumerator");
LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
@ -80,6 +79,7 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
LoadClass(jni, "org/webrtc/EglBase");
LoadClass(jni, "org/webrtc/EglBase$Context");
LoadClass(jni, "org/webrtc/EglBase14$Context");
LoadClass(jni, "org/webrtc/NetworkMonitor");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");

View File

@ -1366,10 +1366,14 @@ JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
OwnedFactoryAndThreads* owned_factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
jclass j_eglbase14_context_class =
FindClass(jni, "org/webrtc/EglBase14$Context");
MediaCodecVideoEncoderFactory* encoder_factory =
static_cast<MediaCodecVideoEncoderFactory*>
(owned_factory->encoder_factory());
if (encoder_factory) {
if (encoder_factory &&
jni->IsInstanceOf(local_egl_context, j_eglbase14_context_class)) {
LOG(LS_INFO) << "Set EGL context for HW encoding.";
encoder_factory->SetEGLContext(jni, local_egl_context);
}
@ -1377,7 +1381,8 @@ JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
MediaCodecVideoDecoderFactory* decoder_factory =
static_cast<MediaCodecVideoDecoderFactory*>
(owned_factory->decoder_factory());
if (decoder_factory) {
if (decoder_factory &&
jni->IsInstanceOf(remote_egl_context, j_eglbase14_context_class)) {
LOG(LS_INFO) << "Set EGL context for HW decoding.";
decoder_factory->SetEGLContext(jni, remote_egl_context);
}

View File

@ -44,6 +44,7 @@ import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
// This class is an implementation detail of the Java PeerConnection API.
@ -75,7 +76,7 @@ public class MediaCodecVideoEncoder {
private Thread mediaCodecThread;
private MediaCodec mediaCodec;
private ByteBuffer[] outputBuffers;
private EglBase eglBase;
private EglBase14 eglBase;
private int width;
private int height;
private Surface inputSurface;
@ -270,7 +271,7 @@ public class MediaCodecVideoEncoder {
}
boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
EglBase.Context sharedContext) {
EglBase14.Context sharedContext) {
final boolean useSurface = sharedContext != null;
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
@ -325,7 +326,7 @@ public class MediaCodecVideoEncoder {
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (useSurface) {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_RECORDABLE);
eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
// Create an input surface and keep a reference since we must release the surface when done.
inputSurface = mediaCodec.createInputSurface();
eglBase.createSurface(inputSurface);
@ -388,9 +389,7 @@ public class MediaCodecVideoEncoder {
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
drawer.drawOes(oesTextureId, transformationMatrix, 0, 0, width, height);
// TODO(perkj): Do we have to call EGLExt.eglPresentationTimeANDROID ?
// If not, remove |presentationTimestampUs|.
eglBase.swapBuffers();
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
return true;
}
catch (RuntimeException e) {