Allow to print Java stack traces in Android camera, renderer and media codec.

R=wzh@webrtc.org

Review URL: https://codereview.webrtc.org/1396873002 .

Cr-Commit-Position: refs/heads/master@{#10227}
This commit is contained in:
Alex Glaznev
2015-10-08 12:59:21 -07:00
parent 1c0bb386b6
commit f0159a742f
5 changed files with 84 additions and 21 deletions

View File

@ -35,7 +35,6 @@ import android.opengl.EGLContext;
import android.os.Handler; import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.os.SystemClock; import android.os.SystemClock;
import android.text.StaticLayout;
import android.view.Surface; import android.view.Surface;
import android.view.WindowManager; import android.view.WindowManager;
@ -217,6 +216,18 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
return capturer; return capturer;
} }
public void printStackTrace() {
if (cameraThread != null) {
StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
if (cameraStackTraces.length > 0) {
Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
for (StackTraceElement stackTrace : cameraStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
// Switch camera to the next valid camera id. This can only be called while // Switch camera to the next valid camera id. This can only be called while
// the camera is running. // the camera is running.
public void switchCamera(final CameraSwitchHandler handler) { public void switchCamera(final CameraSwitchHandler handler) {

View File

@ -36,12 +36,10 @@ import javax.microedition.khronos.opengles.GL10;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.graphics.Point; import android.graphics.Point;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14; import android.opengl.EGL14;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLSurfaceView; import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import org.webrtc.Logging; import org.webrtc.Logging;
import org.webrtc.VideoRenderer.I420Frame; import org.webrtc.VideoRenderer.I420Frame;
@ -77,6 +75,9 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Current SDK version. // Current SDK version.
private static final int CURRENT_SDK_VERSION = private static final int CURRENT_SDK_VERSION =
android.os.Build.VERSION.SDK_INT; android.os.Build.VERSION.SDK_INT;
// Render and draw threads.
private static Thread renderFrameThread;
private static Thread drawThread;
private VideoRendererGui(GLSurfaceView surface) { private VideoRendererGui(GLSurfaceView surface) {
this.surface = surface; this.surface = surface;
@ -372,6 +373,9 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
VideoRenderer.renderFrameDone(frame); VideoRenderer.renderFrameDone(frame);
return; return;
} }
if (renderFrameThread == null) {
renderFrameThread = Thread.currentThread();
}
if (!seenFrame && rendererEvents != null) { if (!seenFrame && rendererEvents != null) {
Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame."); Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
rendererEvents.onFirstFrameRendered(); rendererEvents.onFirstFrameRendered();
@ -394,6 +398,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Skip rendering of this frame if previous frame was not rendered yet. // Skip rendering of this frame if previous frame was not rendered yet.
framesDropped++; framesDropped++;
VideoRenderer.renderFrameDone(frame); VideoRenderer.renderFrameDone(frame);
seenFrame = true;
return; return;
} }
pendingFrame = frame; pendingFrame = frame;
@ -430,6 +435,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
instance.yuvImageRenderers.clear(); instance.yuvImageRenderers.clear();
} }
renderFrameThread = null;
drawThread = null;
instance.surface = null; instance.surface = null;
eglContext = null; eglContext = null;
eglContextReady = null; eglContextReady = null;
@ -565,6 +572,26 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
} }
private static void printStackTrace(Thread thread, String threadName) {
if (thread != null) {
StackTraceElement[] stackTraces = thread.getStackTrace();
if (stackTraces.length > 0) {
Logging.d(TAG, threadName + " stacks trace:");
for (StackTraceElement stackTrace : stackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
public static synchronized void printStackTraces() {
if (instance == null) {
return;
}
printStackTrace(renderFrameThread, "Render frame thread");
printStackTrace(drawThread, "Draw thread");
}
@SuppressLint("NewApi") @SuppressLint("NewApi")
@Override @Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) { public void onSurfaceCreated(GL10 unused, EGLConfig config) {
@ -613,6 +640,9 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override @Override
public void onDrawFrame(GL10 unused) { public void onDrawFrame(GL10 unused) {
if (drawThread == null) {
drawThread = Thread.currentThread();
}
GLES20.glViewport(0, 0, screenWidth, screenHeight); GLES20.glViewport(0, 0, screenWidth, screenHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
synchronized (yuvImageRenderers) { synchronized (yuvImageRenderers) {

View File

@ -32,9 +32,6 @@ import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList; import android.media.MediaCodecList;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build; import android.os.Build;
import android.view.Surface; import android.view.Surface;
@ -65,7 +62,7 @@ public class MediaCodecVideoDecoder {
} }
private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout. private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
private Thread mediaCodecThread; private static Thread mediaCodecThread;
private MediaCodec mediaCodec; private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers; private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers; private ByteBuffer[] outputBuffers;
@ -173,6 +170,18 @@ public class MediaCodecVideoDecoder {
return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null; return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
} }
public static void printStackTrace() {
if (mediaCodecThread != null) {
StackTraceElement[] mediaCodecStackTraces = mediaCodecThread.getStackTrace();
if (mediaCodecStackTraces.length > 0) {
Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
private void checkOnMediaCodecThread() throws IllegalStateException { private void checkOnMediaCodecThread() throws IllegalStateException {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) { if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new IllegalStateException( throw new IllegalStateException(

View File

@ -25,7 +25,6 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
package org.webrtc; package org.webrtc;
import android.media.MediaCodec; import android.media.MediaCodec;
@ -62,7 +61,7 @@ public class MediaCodecVideoEncoder {
} }
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait. private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
private Thread mediaCodecThread; private static Thread mediaCodecThread;
private MediaCodec mediaCodec; private MediaCodec mediaCodec;
private ByteBuffer[] outputBuffers; private ByteBuffer[] outputBuffers;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8"; private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
@ -197,6 +196,18 @@ public class MediaCodecVideoEncoder {
} }
} }
public static void printStackTrace() {
if (mediaCodecThread != null) {
StackTraceElement[] mediaCodecStackTraces = mediaCodecThread.getStackTrace();
if (mediaCodecStackTraces.length > 0) {
Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
static MediaCodec createByCodecName(String codecName) { static MediaCodec createByCodecName(String codecName) {
try { try {
// In the L-SDK this call can throw IOException so in order to work in // In the L-SDK this call can throw IOException so in order to work in

View File

@ -148,19 +148,21 @@ public class PeerConnectionFactory {
nativeThreadsCallbacks(nativeFactory); nativeThreadsCallbacks(nativeFactory);
} }
private static void printStackTrace(Thread thread, String threadName) {
if (thread != null) {
StackTraceElement[] stackTraces = thread.getStackTrace();
if (stackTraces.length > 0) {
Logging.d(TAG, threadName + " stacks trace:");
for (StackTraceElement stackTrace : stackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
public static void printStackTraces() { public static void printStackTraces() {
if (workerThread != null) { printStackTrace(workerThread, "Worker thread");
Logging.d(TAG, "Worker thread stacks trace:"); printStackTrace(signalingThread, "Signaling thread");
for (StackTraceElement stackTrace : workerThread.getStackTrace()) {
Logging.d(TAG, stackTrace.toString());
}
}
if (signalingThread != null) {
Logging.d(TAG, "Signaling thread stacks trace:");
for (StackTraceElement stackTrace : signalingThread.getStackTrace()) {
Logging.d(TAG, stackTrace.toString());
}
}
} }
private static void onWorkerThreadReady() { private static void onWorkerThreadReady() {