Revert "Add option to print peer connection factory Java stack traces."

This reverts commit b68c5995d1ac84866da45a4ecbb180d8c704ad90.

Reason for reverting: It breaks some Android32 bots.

TBR=glaznev@google.com

Review URL: https://codereview.webrtc.org/1399473003 .

Cr-Commit-Position: refs/heads/master@{#10239}
This commit is contained in:
Alejandro Luebs
2015-10-09 15:46:09 -07:00
parent b68c5995d1
commit 69ddaefbb3
5 changed files with 22 additions and 89 deletions

View File

@ -35,6 +35,7 @@ import android.opengl.EGLContext;
import android.os.Handler; import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.os.SystemClock; import android.os.SystemClock;
import android.text.StaticLayout;
import android.view.Surface; import android.view.Surface;
import android.view.WindowManager; import android.view.WindowManager;
@ -216,18 +217,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
return capturer; return capturer;
} }
public void printStackTrace() {
if (cameraThread != null) {
StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
if (cameraStackTraces.length > 0) {
Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
for (StackTraceElement stackTrace : cameraStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
// Switch camera to the next valid camera id. This can only be called while // Switch camera to the next valid camera id. This can only be called while
// the camera is running. // the camera is running.
public void switchCamera(final CameraSwitchHandler handler) { public void switchCamera(final CameraSwitchHandler handler) {

View File

@ -36,10 +36,12 @@ import javax.microedition.khronos.opengles.GL10;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.graphics.Point; import android.graphics.Point;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14; import android.opengl.EGL14;
import android.opengl.EGLContext; import android.opengl.EGLContext;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLSurfaceView; import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import org.webrtc.Logging; import org.webrtc.Logging;
import org.webrtc.VideoRenderer.I420Frame; import org.webrtc.VideoRenderer.I420Frame;
@ -75,9 +77,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Current SDK version. // Current SDK version.
private static final int CURRENT_SDK_VERSION = private static final int CURRENT_SDK_VERSION =
android.os.Build.VERSION.SDK_INT; android.os.Build.VERSION.SDK_INT;
// Render and draw threads.
private static Thread renderFrameThread;
private static Thread drawThread;
private VideoRendererGui(GLSurfaceView surface) { private VideoRendererGui(GLSurfaceView surface) {
this.surface = surface; this.surface = surface;
@ -373,9 +372,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
VideoRenderer.renderFrameDone(frame); VideoRenderer.renderFrameDone(frame);
return; return;
} }
if (renderFrameThread == null) {
renderFrameThread = Thread.currentThread();
}
if (!seenFrame && rendererEvents != null) { if (!seenFrame && rendererEvents != null) {
Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame."); Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
rendererEvents.onFirstFrameRendered(); rendererEvents.onFirstFrameRendered();
@ -398,7 +394,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
// Skip rendering of this frame if previous frame was not rendered yet. // Skip rendering of this frame if previous frame was not rendered yet.
framesDropped++; framesDropped++;
VideoRenderer.renderFrameDone(frame); VideoRenderer.renderFrameDone(frame);
seenFrame = true;
return; return;
} }
pendingFrame = frame; pendingFrame = frame;
@ -435,8 +430,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
instance.yuvImageRenderers.clear(); instance.yuvImageRenderers.clear();
} }
renderFrameThread = null;
drawThread = null;
instance.surface = null; instance.surface = null;
eglContext = null; eglContext = null;
eglContextReady = null; eglContextReady = null;
@ -572,26 +565,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
} }
private static void printStackTrace(Thread thread, String threadName) {
if (thread != null) {
StackTraceElement[] stackTraces = thread.getStackTrace();
if (stackTraces.length > 0) {
Logging.d(TAG, threadName + " stacks trace:");
for (StackTraceElement stackTrace : stackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
public static synchronized void printStackTraces() {
if (instance == null) {
return;
}
printStackTrace(renderFrameThread, "Render frame thread");
printStackTrace(drawThread, "Draw thread");
}
@SuppressLint("NewApi") @SuppressLint("NewApi")
@Override @Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) { public void onSurfaceCreated(GL10 unused, EGLConfig config) {
@ -640,9 +613,6 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
@Override @Override
public void onDrawFrame(GL10 unused) { public void onDrawFrame(GL10 unused) {
if (drawThread == null) {
drawThread = Thread.currentThread();
}
GLES20.glViewport(0, 0, screenWidth, screenHeight); GLES20.glViewport(0, 0, screenWidth, screenHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
synchronized (yuvImageRenderers) { synchronized (yuvImageRenderers) {

View File

@ -32,6 +32,9 @@ import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList; import android.media.MediaCodecList;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build; import android.os.Build;
import android.view.Surface; import android.view.Surface;
@ -62,7 +65,7 @@ public class MediaCodecVideoDecoder {
} }
private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout. private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
private static Thread mediaCodecThread; private Thread mediaCodecThread;
private MediaCodec mediaCodec; private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers; private ByteBuffer[] inputBuffers;
private ByteBuffer[] outputBuffers; private ByteBuffer[] outputBuffers;
@ -172,18 +175,6 @@ public class MediaCodecVideoDecoder {
return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null; return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
} }
public static void printStackTrace() {
if (mediaCodecThread != null) {
StackTraceElement[] mediaCodecStackTraces = mediaCodecThread.getStackTrace();
if (mediaCodecStackTraces.length > 0) {
Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
private void checkOnMediaCodecThread() throws IllegalStateException { private void checkOnMediaCodecThread() throws IllegalStateException {
if (mediaCodecThread.getId() != Thread.currentThread().getId()) { if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
throw new IllegalStateException( throw new IllegalStateException(
@ -195,7 +186,7 @@ public class MediaCodecVideoDecoder {
// Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output. // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
private boolean initDecode( private boolean initDecode(
VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) { VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodec != null) { if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?"); throw new RuntimeException("Forgot to release()?");
} }
useSurface = (surfaceTextureHelper != null); useSurface = (surfaceTextureHelper != null);
@ -237,7 +228,6 @@ public class MediaCodecVideoDecoder {
mediaCodec = mediaCodec =
MediaCodecVideoEncoder.createByCodecName(properties.codecName); MediaCodecVideoEncoder.createByCodecName(properties.codecName);
if (mediaCodec == null) { if (mediaCodec == null) {
Logging.e(TAG, "Can not create media decoder");
return false; return false;
} }
mediaCodec.configure(format, surface, null, 0); mediaCodec.configure(format, surface, null, 0);
@ -270,7 +260,6 @@ public class MediaCodecVideoDecoder {
surface = null; surface = null;
textureListener.release(); textureListener.release();
} }
Logging.d(TAG, "Java releaseDecoder done");
} }
// Dequeue an input buffer and return its index, -1 if no input buffer is // Dequeue an input buffer and return its index, -1 if no input buffer is

View File

@ -25,6 +25,7 @@
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ */
package org.webrtc; package org.webrtc;
import android.media.MediaCodec; import android.media.MediaCodec;
@ -61,7 +62,7 @@ public class MediaCodecVideoEncoder {
} }
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait. private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
private static Thread mediaCodecThread; private Thread mediaCodecThread;
private MediaCodec mediaCodec; private MediaCodec mediaCodec;
private ByteBuffer[] outputBuffers; private ByteBuffer[] outputBuffers;
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8"; private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
@ -198,18 +199,6 @@ public class MediaCodecVideoEncoder {
} }
} }
public static void printStackTrace() {
if (mediaCodecThread != null) {
StackTraceElement[] mediaCodecStackTraces = mediaCodecThread.getStackTrace();
if (mediaCodecStackTraces.length > 0) {
Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
Logging.d(TAG, stackTrace.toString());
}
}
}
}
static MediaCodec createByCodecName(String codecName) { static MediaCodec createByCodecName(String codecName) {
try { try {
// In the L-SDK this call can throw IOException so in order to work in // In the L-SDK this call can throw IOException so in order to work in
@ -226,7 +215,7 @@ public class MediaCodecVideoEncoder {
Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height + Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
". @ " + kbps + " kbps. Fps: " + fps + ". @ " + kbps + " kbps. Fps: " + fps +
". Color: 0x" + Integer.toHexString(colorFormat)); ". Color: 0x" + Integer.toHexString(colorFormat));
if (mediaCodec != null) { if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?"); throw new RuntimeException("Forgot to release()?");
} }
this.type = type; this.type = type;
@ -256,7 +245,6 @@ public class MediaCodecVideoEncoder {
Logging.d(TAG, " Format: " + format); Logging.d(TAG, " Format: " + format);
mediaCodec = createByCodecName(properties.codecName); mediaCodec = createByCodecName(properties.codecName);
if (mediaCodec == null) { if (mediaCodec == null) {
Logging.e(TAG, "Can not create media encoder");
return null; return null;
} }
mediaCodec.configure( mediaCodec.configure(
@ -310,7 +298,6 @@ public class MediaCodecVideoEncoder {
} }
mediaCodec = null; mediaCodec = null;
mediaCodecThread = null; mediaCodecThread = null;
Logging.d(TAG, "Java releaseEncoder done");
} }
private boolean setRates(int kbps, int frameRateIgnored) { private boolean setRates(int kbps, int frameRateIgnored) {

View File

@ -148,21 +148,19 @@ public class PeerConnectionFactory {
nativeThreadsCallbacks(nativeFactory); nativeThreadsCallbacks(nativeFactory);
} }
private static void printStackTrace(Thread thread, String threadName) { public static void printStackTraces() {
if (thread != null) { if (workerThread != null) {
StackTraceElement[] stackTraces = thread.getStackTrace(); Logging.d(TAG, "Worker thread stacks trace:");
if (stackTraces.length > 0) { for (StackTraceElement stackTrace : workerThread.getStackTrace()) {
Logging.d(TAG, threadName + " stacks trace:"); Logging.d(TAG, stackTrace.toString());
for (StackTraceElement stackTrace : stackTraces) { }
Logging.d(TAG, stackTrace.toString()); }
} if (signalingThread != null) {
Logging.d(TAG, "Signaling thread stacks trace:");
for (StackTraceElement stackTrace : signalingThread.getStackTrace()) {
Logging.d(TAG, stackTrace.toString());
} }
} }
}
public static void printStackTraces() {
printStackTrace(workerThread, "Worker thread");
printStackTrace(signalingThread, "Signaling thread");
} }
private static void onWorkerThreadReady() { private static void onWorkerThreadReady() {