Add option to print peer connection factory Java stack traces.
Removing static declaration for media codec thread to allow running multiple HW codec instances. R=wzh@webrtc.org Review URL: https://codereview.webrtc.org/1393203005 . Cr-Commit-Position: refs/heads/master@{#10258}
This commit is contained in:
@ -35,7 +35,6 @@ import android.opengl.EGLContext;
|
|||||||
import android.os.Handler;
|
import android.os.Handler;
|
||||||
import android.os.HandlerThread;
|
import android.os.HandlerThread;
|
||||||
import android.os.SystemClock;
|
import android.os.SystemClock;
|
||||||
import android.text.StaticLayout;
|
|
||||||
import android.view.Surface;
|
import android.view.Surface;
|
||||||
import android.view.WindowManager;
|
import android.view.WindowManager;
|
||||||
|
|
||||||
@ -217,6 +216,18 @@ public class VideoCapturerAndroid extends VideoCapturer implements PreviewCallba
|
|||||||
return capturer;
|
return capturer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void printStackTrace() {
|
||||||
|
if (cameraThread != null) {
|
||||||
|
StackTraceElement[] cameraStackTraces = cameraThread.getStackTrace();
|
||||||
|
if (cameraStackTraces.length > 0) {
|
||||||
|
Logging.d(TAG, "VideoCapturerAndroid stacks trace:");
|
||||||
|
for (StackTraceElement stackTrace : cameraStackTraces) {
|
||||||
|
Logging.d(TAG, stackTrace.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Switch camera to the next valid camera id. This can only be called while
|
// Switch camera to the next valid camera id. This can only be called while
|
||||||
// the camera is running.
|
// the camera is running.
|
||||||
public void switchCamera(final CameraSwitchHandler handler) {
|
public void switchCamera(final CameraSwitchHandler handler) {
|
||||||
|
@ -36,12 +36,10 @@ import javax.microedition.khronos.opengles.GL10;
|
|||||||
import android.annotation.SuppressLint;
|
import android.annotation.SuppressLint;
|
||||||
import android.graphics.Point;
|
import android.graphics.Point;
|
||||||
import android.graphics.Rect;
|
import android.graphics.Rect;
|
||||||
import android.graphics.SurfaceTexture;
|
|
||||||
import android.opengl.EGL14;
|
import android.opengl.EGL14;
|
||||||
import android.opengl.EGLContext;
|
import android.opengl.EGLContext;
|
||||||
import android.opengl.GLES20;
|
import android.opengl.GLES20;
|
||||||
import android.opengl.GLSurfaceView;
|
import android.opengl.GLSurfaceView;
|
||||||
import android.opengl.Matrix;
|
|
||||||
|
|
||||||
import org.webrtc.Logging;
|
import org.webrtc.Logging;
|
||||||
import org.webrtc.VideoRenderer.I420Frame;
|
import org.webrtc.VideoRenderer.I420Frame;
|
||||||
@ -77,6 +75,9 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
// Current SDK version.
|
// Current SDK version.
|
||||||
private static final int CURRENT_SDK_VERSION =
|
private static final int CURRENT_SDK_VERSION =
|
||||||
android.os.Build.VERSION.SDK_INT;
|
android.os.Build.VERSION.SDK_INT;
|
||||||
|
// Render and draw threads.
|
||||||
|
private static Thread renderFrameThread;
|
||||||
|
private static Thread drawThread;
|
||||||
|
|
||||||
private VideoRendererGui(GLSurfaceView surface) {
|
private VideoRendererGui(GLSurfaceView surface) {
|
||||||
this.surface = surface;
|
this.surface = surface;
|
||||||
@ -372,6 +373,9 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
VideoRenderer.renderFrameDone(frame);
|
VideoRenderer.renderFrameDone(frame);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
if (renderFrameThread == null) {
|
||||||
|
renderFrameThread = Thread.currentThread();
|
||||||
|
}
|
||||||
if (!seenFrame && rendererEvents != null) {
|
if (!seenFrame && rendererEvents != null) {
|
||||||
Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
|
Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
|
||||||
rendererEvents.onFirstFrameRendered();
|
rendererEvents.onFirstFrameRendered();
|
||||||
@ -394,6 +398,7 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
// Skip rendering of this frame if previous frame was not rendered yet.
|
// Skip rendering of this frame if previous frame was not rendered yet.
|
||||||
framesDropped++;
|
framesDropped++;
|
||||||
VideoRenderer.renderFrameDone(frame);
|
VideoRenderer.renderFrameDone(frame);
|
||||||
|
seenFrame = true;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
pendingFrame = frame;
|
pendingFrame = frame;
|
||||||
@ -430,6 +435,8 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
}
|
}
|
||||||
instance.yuvImageRenderers.clear();
|
instance.yuvImageRenderers.clear();
|
||||||
}
|
}
|
||||||
|
renderFrameThread = null;
|
||||||
|
drawThread = null;
|
||||||
instance.surface = null;
|
instance.surface = null;
|
||||||
eglContext = null;
|
eglContext = null;
|
||||||
eglContextReady = null;
|
eglContextReady = null;
|
||||||
@ -565,6 +572,26 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void printStackTrace(Thread thread, String threadName) {
|
||||||
|
if (thread != null) {
|
||||||
|
StackTraceElement[] stackTraces = thread.getStackTrace();
|
||||||
|
if (stackTraces.length > 0) {
|
||||||
|
Logging.d(TAG, threadName + " stacks trace:");
|
||||||
|
for (StackTraceElement stackTrace : stackTraces) {
|
||||||
|
Logging.d(TAG, stackTrace.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static synchronized void printStackTraces() {
|
||||||
|
if (instance == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
printStackTrace(renderFrameThread, "Render frame thread");
|
||||||
|
printStackTrace(drawThread, "Draw thread");
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressLint("NewApi")
|
@SuppressLint("NewApi")
|
||||||
@Override
|
@Override
|
||||||
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
|
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
|
||||||
@ -613,6 +640,9 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onDrawFrame(GL10 unused) {
|
public void onDrawFrame(GL10 unused) {
|
||||||
|
if (drawThread == null) {
|
||||||
|
drawThread = Thread.currentThread();
|
||||||
|
}
|
||||||
GLES20.glViewport(0, 0, screenWidth, screenHeight);
|
GLES20.glViewport(0, 0, screenWidth, screenHeight);
|
||||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||||
synchronized (yuvImageRenderers) {
|
synchronized (yuvImageRenderers) {
|
||||||
|
@ -32,9 +32,6 @@ import android.media.MediaCodecInfo;
|
|||||||
import android.media.MediaCodecInfo.CodecCapabilities;
|
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||||
import android.media.MediaCodecList;
|
import android.media.MediaCodecList;
|
||||||
import android.media.MediaFormat;
|
import android.media.MediaFormat;
|
||||||
import android.opengl.EGLContext;
|
|
||||||
import android.opengl.GLES11Ext;
|
|
||||||
import android.opengl.GLES20;
|
|
||||||
import android.os.Build;
|
import android.os.Build;
|
||||||
import android.view.Surface;
|
import android.view.Surface;
|
||||||
|
|
||||||
@ -65,6 +62,7 @@ public class MediaCodecVideoDecoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
|
private static final int DEQUEUE_INPUT_TIMEOUT = 500000; // 500 ms timeout.
|
||||||
|
private static MediaCodecVideoDecoder instance;
|
||||||
private Thread mediaCodecThread;
|
private Thread mediaCodecThread;
|
||||||
private MediaCodec mediaCodec;
|
private MediaCodec mediaCodec;
|
||||||
private ByteBuffer[] inputBuffers;
|
private ByteBuffer[] inputBuffers;
|
||||||
@ -100,7 +98,7 @@ public class MediaCodecVideoDecoder {
|
|||||||
private Surface surface = null;
|
private Surface surface = null;
|
||||||
|
|
||||||
private MediaCodecVideoDecoder() {
|
private MediaCodecVideoDecoder() {
|
||||||
mediaCodecThread = null;
|
instance = this;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper struct for findVp8Decoder() below.
|
// Helper struct for findVp8Decoder() below.
|
||||||
@ -175,6 +173,18 @@ public class MediaCodecVideoDecoder {
|
|||||||
return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
|
return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void printStackTrace() {
|
||||||
|
if (instance != null && instance.mediaCodecThread != null) {
|
||||||
|
StackTraceElement[] mediaCodecStackTraces = instance.mediaCodecThread.getStackTrace();
|
||||||
|
if (mediaCodecStackTraces.length > 0) {
|
||||||
|
Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
|
||||||
|
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
|
||||||
|
Logging.d(TAG, stackTrace.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void checkOnMediaCodecThread() throws IllegalStateException {
|
private void checkOnMediaCodecThread() throws IllegalStateException {
|
||||||
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
|
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
|
||||||
throw new IllegalStateException(
|
throw new IllegalStateException(
|
||||||
@ -228,6 +238,7 @@ public class MediaCodecVideoDecoder {
|
|||||||
mediaCodec =
|
mediaCodec =
|
||||||
MediaCodecVideoEncoder.createByCodecName(properties.codecName);
|
MediaCodecVideoEncoder.createByCodecName(properties.codecName);
|
||||||
if (mediaCodec == null) {
|
if (mediaCodec == null) {
|
||||||
|
Logging.e(TAG, "Can not create media decoder");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
mediaCodec.configure(format, surface, null, 0);
|
mediaCodec.configure(format, surface, null, 0);
|
||||||
@ -255,11 +266,13 @@ public class MediaCodecVideoDecoder {
|
|||||||
}
|
}
|
||||||
mediaCodec = null;
|
mediaCodec = null;
|
||||||
mediaCodecThread = null;
|
mediaCodecThread = null;
|
||||||
|
instance = null;
|
||||||
if (useSurface) {
|
if (useSurface) {
|
||||||
surface.release();
|
surface.release();
|
||||||
surface = null;
|
surface = null;
|
||||||
textureListener.release();
|
textureListener.release();
|
||||||
}
|
}
|
||||||
|
Logging.d(TAG, "Java releaseDecoder done");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Dequeue an input buffer and return its index, -1 if no input buffer is
|
// Dequeue an input buffer and return its index, -1 if no input buffer is
|
||||||
|
@ -25,7 +25,6 @@
|
|||||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
import android.media.MediaCodec;
|
import android.media.MediaCodec;
|
||||||
@ -62,6 +61,7 @@ public class MediaCodecVideoEncoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
|
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
|
||||||
|
private static MediaCodecVideoEncoder instance = null;
|
||||||
private Thread mediaCodecThread;
|
private Thread mediaCodecThread;
|
||||||
private MediaCodec mediaCodec;
|
private MediaCodec mediaCodec;
|
||||||
private ByteBuffer[] outputBuffers;
|
private ByteBuffer[] outputBuffers;
|
||||||
@ -103,7 +103,7 @@ public class MediaCodecVideoEncoder {
|
|||||||
private ByteBuffer configData = null;
|
private ByteBuffer configData = null;
|
||||||
|
|
||||||
MediaCodecVideoEncoder() {
|
MediaCodecVideoEncoder() {
|
||||||
mediaCodecThread = null;
|
instance = this;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper struct for findHwEncoder() below.
|
// Helper struct for findHwEncoder() below.
|
||||||
@ -199,6 +199,18 @@ public class MediaCodecVideoEncoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static void printStackTrace() {
|
||||||
|
if (instance != null && instance.mediaCodecThread != null) {
|
||||||
|
StackTraceElement[] mediaCodecStackTraces = instance.mediaCodecThread.getStackTrace();
|
||||||
|
if (mediaCodecStackTraces.length > 0) {
|
||||||
|
Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
|
||||||
|
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
|
||||||
|
Logging.d(TAG, stackTrace.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static MediaCodec createByCodecName(String codecName) {
|
static MediaCodec createByCodecName(String codecName) {
|
||||||
try {
|
try {
|
||||||
// In the L-SDK this call can throw IOException so in order to work in
|
// In the L-SDK this call can throw IOException so in order to work in
|
||||||
@ -245,6 +257,7 @@ public class MediaCodecVideoEncoder {
|
|||||||
mediaCodec = createByCodecName(properties.codecName);
|
mediaCodec = createByCodecName(properties.codecName);
|
||||||
this.type = type;
|
this.type = type;
|
||||||
if (mediaCodec == null) {
|
if (mediaCodec == null) {
|
||||||
|
Logging.e(TAG, "Can not create media encoder");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
mediaCodec.configure(
|
mediaCodec.configure(
|
||||||
@ -302,6 +315,8 @@ public class MediaCodecVideoEncoder {
|
|||||||
}
|
}
|
||||||
mediaCodec = null;
|
mediaCodec = null;
|
||||||
mediaCodecThread = null;
|
mediaCodecThread = null;
|
||||||
|
instance = null;
|
||||||
|
Logging.d(TAG, "Java releaseEncoder done");
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean setRates(int kbps, int frameRateIgnored) {
|
private boolean setRates(int kbps, int frameRateIgnored) {
|
||||||
|
@ -148,19 +148,21 @@ public class PeerConnectionFactory {
|
|||||||
nativeThreadsCallbacks(nativeFactory);
|
nativeThreadsCallbacks(nativeFactory);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static void printStackTrace(Thread thread, String threadName) {
|
||||||
|
if (thread != null) {
|
||||||
|
StackTraceElement[] stackTraces = thread.getStackTrace();
|
||||||
|
if (stackTraces.length > 0) {
|
||||||
|
Logging.d(TAG, threadName + " stacks trace:");
|
||||||
|
for (StackTraceElement stackTrace : stackTraces) {
|
||||||
|
Logging.d(TAG, stackTrace.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public static void printStackTraces() {
|
public static void printStackTraces() {
|
||||||
if (workerThread != null) {
|
printStackTrace(workerThread, "Worker thread");
|
||||||
Logging.d(TAG, "Worker thread stacks trace:");
|
printStackTrace(signalingThread, "Signaling thread");
|
||||||
for (StackTraceElement stackTrace : workerThread.getStackTrace()) {
|
|
||||||
Logging.d(TAG, stackTrace.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (signalingThread != null) {
|
|
||||||
Logging.d(TAG, "Signaling thread stacks trace:");
|
|
||||||
for (StackTraceElement stackTrace : signalingThread.getStackTrace()) {
|
|
||||||
Logging.d(TAG, stackTrace.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void onWorkerThreadReady() {
|
private static void onWorkerThreadReady() {
|
||||||
|
Reference in New Issue
Block a user