Patchset 1 is a pure
revert of "Revert of "Android MediaCodecVideoDecoder: Manage lifetime of texture frames" https://codereview.webrtc.org/1378033003/ Following patchsets move the responsibility of calculating the decode time to Java. TESTED= Apprtc loopback using H264 and VP8 on N5, N6, N7, S5 Review URL: https://codereview.webrtc.org/1422963003 Cr-Commit-Position: refs/heads/master@{#10597}
This commit is contained in:
@ -107,9 +107,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
|||||||
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
|
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void DISABLED_testStartVideoCapturerUsingTextures() throws InterruptedException {
|
public void testStartVideoCapturerUsingTextures() throws InterruptedException {
|
||||||
VideoCapturerAndroid capturer =
|
VideoCapturerAndroid capturer =
|
||||||
VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
||||||
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
|
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
|
||||||
@ -150,9 +149,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
|||||||
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
|
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void DISABLED_testSwitchVideoCapturerUsingTextures() throws InterruptedException {
|
public void testSwitchVideoCapturerUsingTextures() throws InterruptedException {
|
||||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
||||||
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
|
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
|
||||||
}
|
}
|
||||||
@ -194,9 +192,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
|||||||
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
|
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
|
|
||||||
@SmallTest
|
@SmallTest
|
||||||
public void DISABLED_testStopRestartVideoSourceUsingTextures() throws InterruptedException {
|
public void testStopRestartVideoSourceUsingTextures() throws InterruptedException {
|
||||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
||||||
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
|
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
|
||||||
}
|
}
|
||||||
@ -285,9 +282,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
|||||||
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
|
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
|
|
||||||
@MediumTest
|
@MediumTest
|
||||||
public void DISABLED_testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
|
public void testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
|
||||||
final VideoCapturerAndroid capturer =
|
final VideoCapturerAndroid capturer =
|
||||||
VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
|
||||||
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
|
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
|
||||||
|
|||||||
@ -30,9 +30,7 @@ package org.webrtc;
|
|||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.content.res.Resources.NotFoundException;
|
import android.content.res.Resources.NotFoundException;
|
||||||
import android.graphics.Point;
|
import android.graphics.Point;
|
||||||
import android.graphics.SurfaceTexture;
|
|
||||||
import android.opengl.GLES20;
|
import android.opengl.GLES20;
|
||||||
import android.opengl.Matrix;
|
|
||||||
import android.os.Handler;
|
import android.os.Handler;
|
||||||
import android.os.HandlerThread;
|
import android.os.HandlerThread;
|
||||||
import android.util.AttributeSet;
|
import android.util.AttributeSet;
|
||||||
@ -457,25 +455,10 @@ public class SurfaceViewRenderer extends SurfaceView
|
|||||||
}
|
}
|
||||||
|
|
||||||
final long startTimeNs = System.nanoTime();
|
final long startTimeNs = System.nanoTime();
|
||||||
final float[] samplingMatrix;
|
|
||||||
if (frame.yuvFrame) {
|
|
||||||
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
|
|
||||||
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
|
|
||||||
// bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling
|
|
||||||
// matrix.
|
|
||||||
samplingMatrix = RendererCommon.verticalFlipMatrix();
|
|
||||||
} else {
|
|
||||||
// TODO(magjed): Move updateTexImage() to the video source instead.
|
|
||||||
SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
|
|
||||||
surfaceTexture.updateTexImage();
|
|
||||||
samplingMatrix = new float[16];
|
|
||||||
surfaceTexture.getTransformMatrix(samplingMatrix);
|
|
||||||
}
|
|
||||||
|
|
||||||
final float[] texMatrix;
|
final float[] texMatrix;
|
||||||
synchronized (layoutLock) {
|
synchronized (layoutLock) {
|
||||||
final float[] rotatedSamplingMatrix =
|
final float[] rotatedSamplingMatrix =
|
||||||
RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree);
|
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
|
||||||
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
|
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
|
||||||
mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
|
mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
|
||||||
texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
|
texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
|
||||||
|
|||||||
@ -38,7 +38,6 @@ import javax.microedition.khronos.opengles.GL10;
|
|||||||
import android.annotation.SuppressLint;
|
import android.annotation.SuppressLint;
|
||||||
import android.graphics.Point;
|
import android.graphics.Point;
|
||||||
import android.graphics.Rect;
|
import android.graphics.Rect;
|
||||||
import android.graphics.SurfaceTexture;
|
|
||||||
import android.opengl.GLES20;
|
import android.opengl.GLES20;
|
||||||
import android.opengl.GLSurfaceView;
|
import android.opengl.GLSurfaceView;
|
||||||
|
|
||||||
@ -241,29 +240,15 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (isNewFrame) {
|
if (isNewFrame) {
|
||||||
|
rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
|
||||||
|
pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
|
||||||
if (pendingFrame.yuvFrame) {
|
if (pendingFrame.yuvFrame) {
|
||||||
rendererType = RendererType.RENDERER_YUV;
|
rendererType = RendererType.RENDERER_YUV;
|
||||||
drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
|
drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
|
||||||
pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
|
pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
|
||||||
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
|
|
||||||
// top-left corner of the image, but in glTexImage2D() the first element corresponds to
|
|
||||||
// the bottom-left corner. We correct this discrepancy by setting a vertical flip as
|
|
||||||
// sampling matrix.
|
|
||||||
final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
|
|
||||||
rotatedSamplingMatrix =
|
|
||||||
RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
|
|
||||||
} else {
|
} else {
|
||||||
rendererType = RendererType.RENDERER_TEXTURE;
|
rendererType = RendererType.RENDERER_TEXTURE;
|
||||||
// External texture rendering. Update texture image to latest and make a deep copy of
|
// External texture rendering. Make a deep copy of the external texture.
|
||||||
// the external texture.
|
|
||||||
// TODO(magjed): Move updateTexImage() to the video source instead.
|
|
||||||
final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
|
|
||||||
surfaceTexture.updateTexImage();
|
|
||||||
final float[] samplingMatrix = new float[16];
|
|
||||||
surfaceTexture.getTransformMatrix(samplingMatrix);
|
|
||||||
rotatedSamplingMatrix =
|
|
||||||
RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
|
|
||||||
|
|
||||||
// Reallocate offscreen texture if necessary.
|
// Reallocate offscreen texture if necessary.
|
||||||
textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
|
textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());
|
||||||
|
|
||||||
|
|||||||
@ -33,6 +33,7 @@
|
|||||||
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
|
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
|
||||||
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
||||||
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
|
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
|
||||||
|
#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
|
||||||
#include "webrtc/base/bind.h"
|
#include "webrtc/base/bind.h"
|
||||||
#include "webrtc/base/checks.h"
|
#include "webrtc/base/checks.h"
|
||||||
#include "webrtc/base/logging.h"
|
#include "webrtc/base/logging.h"
|
||||||
@ -112,7 +113,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
|
|||||||
bool use_surface_;
|
bool use_surface_;
|
||||||
VideoCodec codec_;
|
VideoCodec codec_;
|
||||||
webrtc::I420BufferPool decoded_frame_pool_;
|
webrtc::I420BufferPool decoded_frame_pool_;
|
||||||
NativeHandleImpl native_handle_;
|
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
|
||||||
DecodedImageCallback* callback_;
|
DecodedImageCallback* callback_;
|
||||||
int frames_received_; // Number of frames received by decoder.
|
int frames_received_; // Number of frames received by decoder.
|
||||||
int frames_decoded_; // Number of frames decoded by decoder.
|
int frames_decoded_; // Number of frames decoded by decoder.
|
||||||
@ -123,8 +124,6 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
|
|||||||
uint32_t max_pending_frames_; // Maximum number of pending input frames
|
uint32_t max_pending_frames_; // Maximum number of pending input frames
|
||||||
std::vector<int32_t> timestamps_;
|
std::vector<int32_t> timestamps_;
|
||||||
std::vector<int64_t> ntp_times_ms_;
|
std::vector<int64_t> ntp_times_ms_;
|
||||||
std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
|
|
||||||
// decoder input.
|
|
||||||
|
|
||||||
// State that is constant for the lifetime of this object once the ctor
|
// State that is constant for the lifetime of this object once the ctor
|
||||||
// returns.
|
// returns.
|
||||||
@ -135,7 +134,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
|
|||||||
jmethodID j_release_method_;
|
jmethodID j_release_method_;
|
||||||
jmethodID j_dequeue_input_buffer_method_;
|
jmethodID j_dequeue_input_buffer_method_;
|
||||||
jmethodID j_queue_input_buffer_method_;
|
jmethodID j_queue_input_buffer_method_;
|
||||||
jmethodID j_dequeue_output_buffer_method_;
|
jmethodID j_dequeue_byte_buffer_method_;
|
||||||
|
jmethodID j_dequeue_texture_buffer_method_;
|
||||||
jmethodID j_return_decoded_byte_buffer_method_;
|
jmethodID j_return_decoded_byte_buffer_method_;
|
||||||
// MediaCodecVideoDecoder fields.
|
// MediaCodecVideoDecoder fields.
|
||||||
jfieldID j_input_buffers_field_;
|
jfieldID j_input_buffers_field_;
|
||||||
@ -145,20 +145,21 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
|
|||||||
jfieldID j_height_field_;
|
jfieldID j_height_field_;
|
||||||
jfieldID j_stride_field_;
|
jfieldID j_stride_field_;
|
||||||
jfieldID j_slice_height_field_;
|
jfieldID j_slice_height_field_;
|
||||||
jfieldID j_surface_texture_field_;
|
|
||||||
// MediaCodecVideoDecoder.DecodedTextureBuffer fields.
|
// MediaCodecVideoDecoder.DecodedTextureBuffer fields.
|
||||||
jfieldID j_textureID_field_;
|
jfieldID j_texture_id_field_;
|
||||||
|
jfieldID j_transform_matrix_field_;
|
||||||
jfieldID j_texture_presentation_timestamp_us_field_;
|
jfieldID j_texture_presentation_timestamp_us_field_;
|
||||||
// MediaCodecVideoDecoder.DecodedByteBuffer fields.
|
jfieldID j_texture_decode_time_ms_field_;
|
||||||
|
jfieldID j_texture_frame_delay_ms_field_;
|
||||||
|
// MediaCodecVideoDecoder.DecodedOutputBuffer fields.
|
||||||
jfieldID j_info_index_field_;
|
jfieldID j_info_index_field_;
|
||||||
jfieldID j_info_offset_field_;
|
jfieldID j_info_offset_field_;
|
||||||
jfieldID j_info_size_field_;
|
jfieldID j_info_size_field_;
|
||||||
jfieldID j_info_presentation_timestamp_us_field_;
|
jfieldID j_info_presentation_timestamp_us_field_;
|
||||||
|
jfieldID j_byte_buffer_decode_time_ms_field_;
|
||||||
|
|
||||||
// Global references; must be deleted in Release().
|
// Global references; must be deleted in Release().
|
||||||
std::vector<jobject> input_buffers_;
|
std::vector<jobject> input_buffers_;
|
||||||
jobject surface_texture_;
|
|
||||||
jobject previous_surface_texture_;
|
|
||||||
|
|
||||||
// Render EGL context - owned by factory, should not be allocated/destroyed
|
// Render EGL context - owned by factory, should not be allocated/destroyed
|
||||||
// by VideoDecoder.
|
// by VideoDecoder.
|
||||||
@ -172,8 +173,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
|||||||
key_frame_required_(true),
|
key_frame_required_(true),
|
||||||
inited_(false),
|
inited_(false),
|
||||||
sw_fallback_required_(false),
|
sw_fallback_required_(false),
|
||||||
surface_texture_(NULL),
|
|
||||||
previous_surface_texture_(NULL),
|
|
||||||
codec_thread_(new Thread()),
|
codec_thread_(new Thread()),
|
||||||
j_media_codec_video_decoder_class_(
|
j_media_codec_video_decoder_class_(
|
||||||
jni,
|
jni,
|
||||||
@ -192,19 +191,22 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
|||||||
j_init_decode_method_ = GetMethodID(
|
j_init_decode_method_ = GetMethodID(
|
||||||
jni, *j_media_codec_video_decoder_class_, "initDecode",
|
jni, *j_media_codec_video_decoder_class_, "initDecode",
|
||||||
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
|
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
|
||||||
"IILjavax/microedition/khronos/egl/EGLContext;)Z");
|
"IILorg/webrtc/SurfaceTextureHelper;)Z");
|
||||||
j_release_method_ =
|
j_release_method_ =
|
||||||
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
|
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
|
||||||
j_dequeue_input_buffer_method_ = GetMethodID(
|
j_dequeue_input_buffer_method_ = GetMethodID(
|
||||||
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
|
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
|
||||||
j_queue_input_buffer_method_ = GetMethodID(
|
j_queue_input_buffer_method_ = GetMethodID(
|
||||||
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
|
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
|
||||||
j_dequeue_output_buffer_method_ = GetMethodID(
|
j_dequeue_byte_buffer_method_ = GetMethodID(
|
||||||
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
|
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
|
||||||
"(I)Ljava/lang/Object;");
|
"(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;");
|
||||||
|
j_dequeue_texture_buffer_method_ = GetMethodID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
|
||||||
|
"(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
|
||||||
j_return_decoded_byte_buffer_method_ =
|
j_return_decoded_byte_buffer_method_ =
|
||||||
GetMethodID(jni, *j_media_codec_video_decoder_class_,
|
GetMethodID(jni, *j_media_codec_video_decoder_class_,
|
||||||
"returnDecodedByteBuffer", "(I)V");
|
"returnDecodedOutputBuffer", "(I)V");
|
||||||
|
|
||||||
j_input_buffers_field_ = GetFieldID(
|
j_input_buffers_field_ = GetFieldID(
|
||||||
jni, *j_media_codec_video_decoder_class_,
|
jni, *j_media_codec_video_decoder_class_,
|
||||||
@ -222,28 +224,32 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
|||||||
jni, *j_media_codec_video_decoder_class_, "stride", "I");
|
jni, *j_media_codec_video_decoder_class_, "stride", "I");
|
||||||
j_slice_height_field_ = GetFieldID(
|
j_slice_height_field_ = GetFieldID(
|
||||||
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
|
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
|
||||||
j_surface_texture_field_ = GetFieldID(
|
|
||||||
jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
|
|
||||||
"Landroid/graphics/SurfaceTexture;");
|
|
||||||
|
|
||||||
jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
|
jclass j_decoded_texture_buffer_class = FindClass(jni,
|
||||||
"org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
|
"org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
|
||||||
j_textureID_field_ = GetFieldID(
|
j_texture_id_field_ = GetFieldID(
|
||||||
jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
|
jni, j_decoded_texture_buffer_class, "textureID", "I");
|
||||||
j_texture_presentation_timestamp_us_field_ =
|
j_transform_matrix_field_ = GetFieldID(
|
||||||
GetFieldID(jni, j_decoder_decoded_texture_buffer_class,
|
jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
|
||||||
"presentationTimestampUs", "J");
|
j_texture_presentation_timestamp_us_field_ = GetFieldID(
|
||||||
|
jni, j_decoded_texture_buffer_class, "presentationTimestampUs", "J");
|
||||||
|
j_texture_decode_time_ms_field_ = GetFieldID(
|
||||||
|
jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
|
||||||
|
j_texture_frame_delay_ms_field_ = GetFieldID(
|
||||||
|
jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
|
||||||
|
|
||||||
jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
|
jclass j_decoded_output_buffer_class = FindClass(jni,
|
||||||
"org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
|
"org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
|
||||||
j_info_index_field_ = GetFieldID(
|
j_info_index_field_ = GetFieldID(
|
||||||
jni, j_decoder_decoded_byte_buffer_class, "index", "I");
|
jni, j_decoded_output_buffer_class, "index", "I");
|
||||||
j_info_offset_field_ = GetFieldID(
|
j_info_offset_field_ = GetFieldID(
|
||||||
jni, j_decoder_decoded_byte_buffer_class, "offset", "I");
|
jni, j_decoded_output_buffer_class, "offset", "I");
|
||||||
j_info_size_field_ = GetFieldID(
|
j_info_size_field_ = GetFieldID(
|
||||||
jni, j_decoder_decoded_byte_buffer_class, "size", "I");
|
jni, j_decoded_output_buffer_class, "size", "I");
|
||||||
j_info_presentation_timestamp_us_field_ = GetFieldID(
|
j_info_presentation_timestamp_us_field_ = GetFieldID(
|
||||||
jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J");
|
jni, j_decoded_output_buffer_class, "presentationTimestampUs", "J");
|
||||||
|
j_byte_buffer_decode_time_ms_field_ = GetFieldID(
|
||||||
|
jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
|
||||||
|
|
||||||
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
|
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
|
||||||
use_surface_ = (render_egl_context_ != NULL);
|
use_surface_ = (render_egl_context_ != NULL);
|
||||||
@ -255,14 +261,6 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
|
|||||||
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
|
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
|
||||||
// Call Release() to ensure no more callbacks to us after we are deleted.
|
// Call Release() to ensure no more callbacks to us after we are deleted.
|
||||||
Release();
|
Release();
|
||||||
// Delete global references.
|
|
||||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
|
||||||
if (previous_surface_texture_ != NULL) {
|
|
||||||
jni->DeleteGlobalRef(previous_surface_texture_);
|
|
||||||
}
|
|
||||||
if (surface_texture_ != NULL) {
|
|
||||||
jni->DeleteGlobalRef(surface_texture_);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
|
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
|
||||||
@ -313,6 +311,11 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
|||||||
frames_received_ = 0;
|
frames_received_ = 0;
|
||||||
frames_decoded_ = 0;
|
frames_decoded_ = 0;
|
||||||
|
|
||||||
|
if (use_surface_) {
|
||||||
|
surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
|
||||||
|
jni, render_egl_context_);
|
||||||
|
}
|
||||||
|
|
||||||
jobject j_video_codec_enum = JavaEnumFromIndex(
|
jobject j_video_codec_enum = JavaEnumFromIndex(
|
||||||
jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
|
jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
|
||||||
bool success = jni->CallBooleanMethod(
|
bool success = jni->CallBooleanMethod(
|
||||||
@ -321,7 +324,8 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
|||||||
j_video_codec_enum,
|
j_video_codec_enum,
|
||||||
codec_.width,
|
codec_.width,
|
||||||
codec_.height,
|
codec_.height,
|
||||||
use_surface_ ? render_egl_context_ : nullptr);
|
use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
|
||||||
|
: nullptr);
|
||||||
if (CheckException(jni) || !success) {
|
if (CheckException(jni) || !success) {
|
||||||
ALOGE << "Codec initialization error - fallback to SW codec.";
|
ALOGE << "Codec initialization error - fallback to SW codec.";
|
||||||
sw_fallback_required_ = true;
|
sw_fallback_required_ = true;
|
||||||
@ -348,7 +352,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
|||||||
current_decoding_time_ms_ = 0;
|
current_decoding_time_ms_ = 0;
|
||||||
timestamps_.clear();
|
timestamps_.clear();
|
||||||
ntp_times_ms_.clear();
|
ntp_times_ms_.clear();
|
||||||
frame_rtc_times_ms_.clear();
|
|
||||||
|
|
||||||
jobjectArray input_buffers = (jobjectArray)GetObjectField(
|
jobjectArray input_buffers = (jobjectArray)GetObjectField(
|
||||||
jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
|
jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
|
||||||
@ -365,15 +368,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (use_surface_) {
|
|
||||||
jobject surface_texture = GetObjectField(
|
|
||||||
jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
|
|
||||||
if (previous_surface_texture_ != NULL) {
|
|
||||||
jni->DeleteGlobalRef(previous_surface_texture_);
|
|
||||||
}
|
|
||||||
previous_surface_texture_ = surface_texture_;
|
|
||||||
surface_texture_ = jni->NewGlobalRef(surface_texture);
|
|
||||||
}
|
|
||||||
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
|
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
|
||||||
|
|
||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
@ -399,6 +393,7 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
|
|||||||
}
|
}
|
||||||
input_buffers_.clear();
|
input_buffers_.clear();
|
||||||
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
|
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
|
||||||
|
surface_texture_helper_ = nullptr;
|
||||||
inited_ = false;
|
inited_ = false;
|
||||||
rtc::MessageQueueManager::Clear(this);
|
rtc::MessageQueueManager::Clear(this);
|
||||||
if (CheckException(jni)) {
|
if (CheckException(jni)) {
|
||||||
@ -508,7 +503,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
|
|||||||
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
|
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
|
||||||
ALOGV("Received: %d. Decoded: %d. Wait for output...",
|
ALOGV("Received: %d. Decoded: %d. Wait for output...",
|
||||||
frames_received_, frames_decoded_);
|
frames_received_, frames_decoded_);
|
||||||
if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
|
if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) {
|
||||||
ALOGE << "DeliverPendingOutputs error. Frames received: " <<
|
ALOGE << "DeliverPendingOutputs error. Frames received: " <<
|
||||||
frames_received_ << ". Frames decoded: " << frames_decoded_;
|
frames_received_ << ". Frames decoded: " << frames_decoded_;
|
||||||
return ProcessHWErrorOnCodecThread();
|
return ProcessHWErrorOnCodecThread();
|
||||||
@ -553,7 +548,6 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
|
|||||||
current_bytes_ += inputImage._length;
|
current_bytes_ += inputImage._length;
|
||||||
timestamps_.push_back(inputImage._timeStamp);
|
timestamps_.push_back(inputImage._timeStamp);
|
||||||
ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
|
ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
|
||||||
frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
|
|
||||||
|
|
||||||
// Feed input to decoder.
|
// Feed input to decoder.
|
||||||
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
|
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
|
||||||
@ -576,16 +570,18 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
||||||
JNIEnv* jni, int dequeue_timeout_us) {
|
JNIEnv* jni, int dequeue_timeout_ms) {
|
||||||
if (frames_received_ <= frames_decoded_) {
|
if (frames_received_ <= frames_decoded_) {
|
||||||
// No need to query for output buffers - decoder is drained.
|
// No need to query for output buffers - decoder is drained.
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
// Get decoder output.
|
// Get decoder output.
|
||||||
jobject j_decoder_output_buffer = jni->CallObjectMethod(
|
jobject j_decoder_output_buffer =
|
||||||
*j_media_codec_video_decoder_,
|
jni->CallObjectMethod(*j_media_codec_video_decoder_,
|
||||||
j_dequeue_output_buffer_method_,
|
use_surface_ ? j_dequeue_texture_buffer_method_
|
||||||
dequeue_timeout_us);
|
: j_dequeue_byte_buffer_method_,
|
||||||
|
dequeue_timeout_ms);
|
||||||
|
|
||||||
if (CheckException(jni)) {
|
if (CheckException(jni)) {
|
||||||
ALOGE << "dequeueOutputBuffer() error";
|
ALOGE << "dequeueOutputBuffer() error";
|
||||||
return false;
|
return false;
|
||||||
@ -605,19 +601,30 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
|||||||
j_slice_height_field_);
|
j_slice_height_field_);
|
||||||
|
|
||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
|
||||||
long output_timestamps_ms = 0;
|
int64_t output_timestamps_ms = 0;
|
||||||
|
int decode_time_ms = 0;
|
||||||
|
int64_t frame_delayed_ms = 0;
|
||||||
if (use_surface_) {
|
if (use_surface_) {
|
||||||
// Extract data from Java DecodedTextureBuffer.
|
// Extract data from Java DecodedTextureBuffer.
|
||||||
const int texture_id =
|
const int texture_id =
|
||||||
GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
|
GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
|
||||||
const int64_t timestamp_us =
|
if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
|
||||||
GetLongField(jni, j_decoder_output_buffer,
|
const jfloatArray j_transform_matrix =
|
||||||
j_texture_presentation_timestamp_us_field_);
|
reinterpret_cast<jfloatArray>(GetObjectField(
|
||||||
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
|
jni, j_decoder_output_buffer, j_transform_matrix_field_));
|
||||||
// Create webrtc::VideoFrameBuffer with native texture handle.
|
const int64_t timestamp_us =
|
||||||
native_handle_.SetTextureObject(surface_texture_, texture_id);
|
GetLongField(jni, j_decoder_output_buffer,
|
||||||
frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>(
|
j_texture_presentation_timestamp_us_field_);
|
||||||
&native_handle_, width, height);
|
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
|
||||||
|
decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
|
||||||
|
j_texture_decode_time_ms_field_);
|
||||||
|
frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer,
|
||||||
|
j_texture_frame_delay_ms_field_);
|
||||||
|
|
||||||
|
// Create webrtc::VideoFrameBuffer with native texture handle.
|
||||||
|
frame_buffer = surface_texture_helper_->CreateTextureFrame(
|
||||||
|
width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix));
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// Extract data from Java ByteBuffer and create output yuv420 frame -
|
// Extract data from Java ByteBuffer and create output yuv420 frame -
|
||||||
// for non surface decoding only.
|
// for non surface decoding only.
|
||||||
@ -630,6 +637,8 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
|||||||
const int64_t timestamp_us = GetLongField(
|
const int64_t timestamp_us = GetLongField(
|
||||||
jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
|
jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
|
||||||
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
|
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
|
||||||
|
decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
|
||||||
|
j_byte_buffer_decode_time_ms_field_);
|
||||||
|
|
||||||
if (output_buffer_size < width * height * 3 / 2) {
|
if (output_buffer_size < width * height * 3 / 2) {
|
||||||
ALOGE << "Insufficient output buffer size: " << output_buffer_size;
|
ALOGE << "Insufficient output buffer size: " << output_buffer_size;
|
||||||
@ -687,7 +696,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
|||||||
j_return_decoded_byte_buffer_method_,
|
j_return_decoded_byte_buffer_method_,
|
||||||
output_buffer_index);
|
output_buffer_index);
|
||||||
if (CheckException(jni)) {
|
if (CheckException(jni)) {
|
||||||
ALOGE << "returnDecodedByteBuffer error";
|
ALOGE << "returnDecodedOutputBuffer error";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -702,26 +711,24 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
|||||||
decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
|
decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
|
||||||
ntp_times_ms_.erase(ntp_times_ms_.begin());
|
ntp_times_ms_.erase(ntp_times_ms_.begin());
|
||||||
}
|
}
|
||||||
int64_t frame_decoding_time_ms = 0;
|
|
||||||
if (frame_rtc_times_ms_.size() > 0) {
|
|
||||||
frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
|
|
||||||
frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
|
|
||||||
}
|
|
||||||
if (frames_decoded_ < kMaxDecodedLogFrames) {
|
if (frames_decoded_ < kMaxDecodedLogFrames) {
|
||||||
ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width <<
|
ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width <<
|
||||||
" x " << height << ". " << stride << " x " << slice_height <<
|
" x " << height << ". " << stride << " x " << slice_height <<
|
||||||
". Color: " << color_format << ". TS:" << (int)output_timestamps_ms <<
|
". Color: " << color_format << ". TS:" << (int)output_timestamps_ms <<
|
||||||
". DecTime: " << (int)frame_decoding_time_ms;
|
". DecTime: " << (int)decode_time_ms <<
|
||||||
|
". DelayTime: " << (int)frame_delayed_ms;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate and print decoding statistics - every 3 seconds.
|
// Calculate and print decoding statistics - every 3 seconds.
|
||||||
frames_decoded_++;
|
frames_decoded_++;
|
||||||
current_frames_++;
|
current_frames_++;
|
||||||
current_decoding_time_ms_ += frame_decoding_time_ms;
|
current_decoding_time_ms_ += decode_time_ms;
|
||||||
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
|
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
|
||||||
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
|
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
|
||||||
current_frames_ > 0) {
|
current_frames_ > 0) {
|
||||||
ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
|
ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: "
|
||||||
|
<< frames_received_ << ". Bitrate: " <<
|
||||||
(current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
|
(current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
|
||||||
((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
|
((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
|
||||||
<< ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
|
<< ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
|
||||||
@ -732,12 +739,15 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
|||||||
current_decoding_time_ms_ = 0;
|
current_decoding_time_ms_ = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Callback - output decoded frame.
|
// |.IsZeroSize())| returns true when a frame has been dropped.
|
||||||
const int32_t callback_status = callback_->Decoded(decoded_frame);
|
if (!decoded_frame.IsZeroSize()) {
|
||||||
if (callback_status > 0) {
|
// Callback - output decoded frame.
|
||||||
ALOGE << "callback error";
|
const int32_t callback_status =
|
||||||
|
callback_->Decoded(decoded_frame, decode_time_ms);
|
||||||
|
if (callback_status > 0) {
|
||||||
|
ALOGE << "callback error";
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -180,16 +180,10 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
|
|||||||
buffer, rotation, timestamp_ns);
|
buffer, rotation, timestamp_ns);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::OnTextureFrame(
|
void AndroidVideoCapturerJni::OnTextureFrame(int width,
|
||||||
int width,
|
int height,
|
||||||
int height,
|
int64_t timestamp_ns,
|
||||||
int64_t timestamp_ns,
|
const NativeHandleImpl& handle) {
|
||||||
const NativeTextureHandleImpl& handle) {
|
|
||||||
// TODO(magjed): Fix this. See bug webrtc:4993.
|
|
||||||
RTC_NOTREACHED()
|
|
||||||
<< "The rest of the stack for Android expects the native "
|
|
||||||
"handle to be a NativeHandleImpl with a SurfaceTexture, not a "
|
|
||||||
"NativeTextureHandleImpl";
|
|
||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
|
||||||
new rtc::RefCountedObject<AndroidTextureBuffer>(
|
new rtc::RefCountedObject<AndroidTextureBuffer>(
|
||||||
width, height, handle,
|
width, height, handle,
|
||||||
@ -234,8 +228,8 @@ JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
|
|||||||
jlong j_timestamp) {
|
jlong j_timestamp) {
|
||||||
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
|
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
|
||||||
->OnTextureFrame(j_width, j_height, j_timestamp,
|
->OnTextureFrame(j_width, j_height, j_timestamp,
|
||||||
NativeTextureHandleImpl(jni, j_oes_texture_id,
|
NativeHandleImpl(jni, j_oes_texture_id,
|
||||||
j_transform_matrix));
|
j_transform_matrix));
|
||||||
}
|
}
|
||||||
|
|
||||||
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
|
JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeCapturerStarted)
|
||||||
|
|||||||
@ -39,7 +39,7 @@
|
|||||||
|
|
||||||
namespace webrtc_jni {
|
namespace webrtc_jni {
|
||||||
|
|
||||||
class NativeTextureHandleImpl;
|
class NativeHandleImpl;
|
||||||
|
|
||||||
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
|
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
|
||||||
// The purpose of the delegate is to hide the JNI specifics from the C++ only
|
// The purpose of the delegate is to hide the JNI specifics from the C++ only
|
||||||
@ -61,7 +61,7 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
|
|||||||
void OnMemoryBufferFrame(void* video_frame, int length, int width,
|
void OnMemoryBufferFrame(void* video_frame, int length, int width,
|
||||||
int height, int rotation, int64_t timestamp_ns);
|
int height, int rotation, int64_t timestamp_ns);
|
||||||
void OnTextureFrame(int width, int height, int64_t timestamp_ns,
|
void OnTextureFrame(int width, int height, int64_t timestamp_ns,
|
||||||
const NativeTextureHandleImpl& handle);
|
const NativeHandleImpl& handle);
|
||||||
void OnOutputFormatRequest(int width, int height, int fps);
|
void OnOutputFormatRequest(int width, int height, int fps);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|||||||
@ -85,7 +85,7 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
|
|||||||
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
|
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
|
||||||
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
|
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
|
||||||
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
|
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
|
||||||
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
|
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
|
||||||
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
|
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
|
||||||
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
|
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@ -31,9 +31,9 @@
|
|||||||
|
|
||||||
namespace webrtc_jni {
|
namespace webrtc_jni {
|
||||||
|
|
||||||
NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
|
NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
|
||||||
jint j_oes_texture_id,
|
jint j_oes_texture_id,
|
||||||
jfloatArray j_transform_matrix)
|
jfloatArray j_transform_matrix)
|
||||||
: oes_texture_id(j_oes_texture_id) {
|
: oes_texture_id(j_oes_texture_id) {
|
||||||
RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
|
RTC_CHECK_EQ(16, jni->GetArrayLength(j_transform_matrix));
|
||||||
jfloat* transform_matrix_ptr =
|
jfloat* transform_matrix_ptr =
|
||||||
@ -44,38 +44,10 @@ NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
|
|||||||
jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
|
jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
NativeHandleImpl::NativeHandleImpl() : texture_object_(NULL), texture_id_(-1) {}
|
|
||||||
|
|
||||||
void* NativeHandleImpl::GetHandle() {
|
|
||||||
return texture_object_;
|
|
||||||
}
|
|
||||||
|
|
||||||
int NativeHandleImpl::GetTextureId() {
|
|
||||||
return texture_id_;
|
|
||||||
}
|
|
||||||
|
|
||||||
void NativeHandleImpl::SetTextureObject(void* texture_object, int texture_id) {
|
|
||||||
texture_object_ = reinterpret_cast<jobject>(texture_object);
|
|
||||||
texture_id_ = texture_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
JniNativeHandleBuffer::JniNativeHandleBuffer(void* native_handle,
|
|
||||||
int width,
|
|
||||||
int height)
|
|
||||||
: NativeHandleBuffer(native_handle, width, height) {}
|
|
||||||
|
|
||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
|
|
||||||
JniNativeHandleBuffer::NativeToI420Buffer() {
|
|
||||||
// TODO(pbos): Implement before using this in the encoder pipeline (or
|
|
||||||
// remove the RTC_CHECK() in VideoCapture).
|
|
||||||
RTC_NOTREACHED();
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
AndroidTextureBuffer::AndroidTextureBuffer(
|
AndroidTextureBuffer::AndroidTextureBuffer(
|
||||||
int width,
|
int width,
|
||||||
int height,
|
int height,
|
||||||
const NativeTextureHandleImpl& native_handle,
|
const NativeHandleImpl& native_handle,
|
||||||
const rtc::Callback0<void>& no_longer_used)
|
const rtc::Callback0<void>& no_longer_used)
|
||||||
: webrtc::NativeHandleBuffer(&native_handle_, width, height),
|
: webrtc::NativeHandleBuffer(&native_handle_, width, height),
|
||||||
native_handle_(native_handle),
|
native_handle_(native_handle),
|
||||||
|
|||||||
@ -36,51 +36,26 @@
|
|||||||
namespace webrtc_jni {
|
namespace webrtc_jni {
|
||||||
|
|
||||||
// Wrapper for texture object.
|
// Wrapper for texture object.
|
||||||
struct NativeTextureHandleImpl {
|
struct NativeHandleImpl {
|
||||||
NativeTextureHandleImpl(JNIEnv* jni,
|
NativeHandleImpl(JNIEnv* jni,
|
||||||
jint j_oes_texture_id,
|
jint j_oes_texture_id,
|
||||||
jfloatArray j_transform_matrix);
|
jfloatArray j_transform_matrix);
|
||||||
|
|
||||||
const int oes_texture_id;
|
const int oes_texture_id;
|
||||||
float sampling_matrix[16];
|
float sampling_matrix[16];
|
||||||
};
|
};
|
||||||
|
|
||||||
// Native handle for SurfaceTexture + texture id.
|
|
||||||
class NativeHandleImpl {
|
|
||||||
public:
|
|
||||||
NativeHandleImpl();
|
|
||||||
|
|
||||||
void* GetHandle();
|
|
||||||
int GetTextureId();
|
|
||||||
void SetTextureObject(void* texture_object, int texture_id);
|
|
||||||
|
|
||||||
private:
|
|
||||||
jobject texture_object_;
|
|
||||||
int32_t texture_id_;
|
|
||||||
};
|
|
||||||
|
|
||||||
class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer {
|
|
||||||
public:
|
|
||||||
JniNativeHandleBuffer(void* native_handle, int width, int height);
|
|
||||||
|
|
||||||
// TODO(pbos): Override destructor to release native handle, at the moment the
|
|
||||||
// native handle is not released based on refcount.
|
|
||||||
|
|
||||||
private:
|
|
||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> NativeToI420Buffer() override;
|
|
||||||
};
|
|
||||||
|
|
||||||
class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
|
class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
|
||||||
public:
|
public:
|
||||||
AndroidTextureBuffer(int width,
|
AndroidTextureBuffer(int width,
|
||||||
int height,
|
int height,
|
||||||
const NativeTextureHandleImpl& native_handle,
|
const NativeHandleImpl& native_handle,
|
||||||
const rtc::Callback0<void>& no_longer_used);
|
const rtc::Callback0<void>& no_longer_used);
|
||||||
~AndroidTextureBuffer();
|
~AndroidTextureBuffer();
|
||||||
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
|
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
NativeTextureHandleImpl native_handle_;
|
NativeHandleImpl native_handle_;
|
||||||
rtc::Callback0<void> no_longer_used_cb_;
|
rtc::Callback0<void> no_longer_used_cb_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@ -773,7 +773,7 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
|||||||
jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
|
jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
|
||||||
j_texture_frame_ctor_id_(GetMethodID(
|
j_texture_frame_ctor_id_(GetMethodID(
|
||||||
jni, *j_frame_class_, "<init>",
|
jni, *j_frame_class_, "<init>",
|
||||||
"(IIILjava/lang/Object;IJ)V")),
|
"(IIII[FJ)V")),
|
||||||
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
|
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
|
||||||
CHECK_EXCEPTION(jni);
|
CHECK_EXCEPTION(jni);
|
||||||
}
|
}
|
||||||
@ -829,13 +829,13 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
|||||||
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
|
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
|
||||||
NativeHandleImpl* handle =
|
NativeHandleImpl* handle =
|
||||||
reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
|
reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
|
||||||
jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
|
jfloatArray sampling_matrix = jni()->NewFloatArray(16);
|
||||||
int texture_id = handle->GetTextureId();
|
jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
|
||||||
return jni()->NewObject(
|
return jni()->NewObject(
|
||||||
*j_frame_class_, j_texture_frame_ctor_id_,
|
*j_frame_class_, j_texture_frame_ctor_id_,
|
||||||
frame->GetWidth(), frame->GetHeight(),
|
frame->GetWidth(), frame->GetHeight(),
|
||||||
static_cast<int>(frame->GetVideoRotation()),
|
static_cast<int>(frame->GetVideoRotation()),
|
||||||
texture_object, texture_id, javaShallowCopy(frame));
|
handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame));
|
||||||
}
|
}
|
||||||
|
|
||||||
JNIEnv* jni() {
|
JNIEnv* jni() {
|
||||||
|
|||||||
@ -70,7 +70,7 @@ void SurfaceTextureHelper::ReturnTextureFrame() const {
|
|||||||
|
|
||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
|
||||||
SurfaceTextureHelper::CreateTextureFrame(int width, int height,
|
SurfaceTextureHelper::CreateTextureFrame(int width, int height,
|
||||||
const NativeTextureHandleImpl& native_handle) {
|
const NativeHandleImpl& native_handle) {
|
||||||
return new rtc::RefCountedObject<AndroidTextureBuffer>(
|
return new rtc::RefCountedObject<AndroidTextureBuffer>(
|
||||||
width, height, native_handle,
|
width, height, native_handle,
|
||||||
rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
|
rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
|
||||||
|
|||||||
@ -66,7 +66,7 @@ class SurfaceTextureHelper : public rtc::RefCountInterface {
|
|||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
|
||||||
int width,
|
int width,
|
||||||
int height,
|
int height,
|
||||||
const NativeTextureHandleImpl& native_handle);
|
const NativeHandleImpl& native_handle);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
~SurfaceTextureHelper();
|
~SurfaceTextureHelper();
|
||||||
|
|||||||
@ -27,25 +27,24 @@
|
|||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
import android.graphics.SurfaceTexture;
|
|
||||||
import android.media.MediaCodec;
|
import android.media.MediaCodec;
|
||||||
import android.media.MediaCodecInfo;
|
import android.media.MediaCodecInfo;
|
||||||
import android.media.MediaCodecInfo.CodecCapabilities;
|
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||||
import android.media.MediaCodecList;
|
import android.media.MediaCodecList;
|
||||||
import android.media.MediaFormat;
|
import android.media.MediaFormat;
|
||||||
import android.opengl.GLES11Ext;
|
|
||||||
import android.opengl.GLES20;
|
|
||||||
import android.os.Build;
|
import android.os.Build;
|
||||||
|
import android.os.SystemClock;
|
||||||
import android.view.Surface;
|
import android.view.Surface;
|
||||||
|
|
||||||
import org.webrtc.Logging;
|
import org.webrtc.Logging;
|
||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
|
import java.util.Queue;
|
||||||
import javax.microedition.khronos.egl.EGLContext;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
|
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
|
||||||
// This class is an implementation detail of the Java PeerConnection API.
|
// This class is an implementation detail of the Java PeerConnection API.
|
||||||
@ -103,14 +102,21 @@ public class MediaCodecVideoDecoder {
|
|||||||
private int height;
|
private int height;
|
||||||
private int stride;
|
private int stride;
|
||||||
private int sliceHeight;
|
private int sliceHeight;
|
||||||
|
private boolean hasDecodedFirstFrame;
|
||||||
|
private final Queue<Long> decodeStartTimeMs = new LinkedList<Long>();
|
||||||
private boolean useSurface;
|
private boolean useSurface;
|
||||||
private int textureID = 0;
|
|
||||||
private SurfaceTexture surfaceTexture = null;
|
|
||||||
private Surface surface = null;
|
|
||||||
private EglBase eglBase;
|
|
||||||
|
|
||||||
private MediaCodecVideoDecoder() {
|
// The below variables are only used when decoding to a Surface.
|
||||||
}
|
private TextureListener textureListener;
|
||||||
|
// Max number of output buffers queued before starting to drop decoded frames.
|
||||||
|
private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
|
||||||
|
private int droppedFrames;
|
||||||
|
// |isWaitingForTexture| is true when waiting for the transition:
|
||||||
|
// MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable().
|
||||||
|
private boolean isWaitingForTexture;
|
||||||
|
private Surface surface = null;
|
||||||
|
private final Queue<DecodedOutputBuffer>
|
||||||
|
dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
|
||||||
|
|
||||||
// MediaCodec error handler - invoked when critical error happens which may prevent
|
// MediaCodec error handler - invoked when critical error happens which may prevent
|
||||||
// further use of media codec API. Now it means that one of media codec instances
|
// further use of media codec API. Now it means that one of media codec instances
|
||||||
@ -222,12 +228,13 @@ public class MediaCodecVideoDecoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pass null in |sharedContext| to configure the codec for ByteBuffer output.
|
// Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
|
||||||
private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
|
private boolean initDecode(
|
||||||
|
VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
|
||||||
if (mediaCodecThread != null) {
|
if (mediaCodecThread != null) {
|
||||||
throw new RuntimeException("Forgot to release()?");
|
throw new RuntimeException("Forgot to release()?");
|
||||||
}
|
}
|
||||||
useSurface = (sharedContext != null);
|
useSurface = (surfaceTextureHelper != null);
|
||||||
String mime = null;
|
String mime = null;
|
||||||
String[] supportedCodecPrefixes = null;
|
String[] supportedCodecPrefixes = null;
|
||||||
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
|
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
|
||||||
@ -249,9 +256,6 @@ public class MediaCodecVideoDecoder {
|
|||||||
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
|
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
|
||||||
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
|
". Color: 0x" + Integer.toHexString(properties.colorFormat) +
|
||||||
". Use Surface: " + useSurface);
|
". Use Surface: " + useSurface);
|
||||||
if (sharedContext != null) {
|
|
||||||
Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
|
|
||||||
}
|
|
||||||
runningInstance = this; // Decoder is now running and can be queried for stack traces.
|
runningInstance = this; // Decoder is now running and can be queried for stack traces.
|
||||||
mediaCodecThread = Thread.currentThread();
|
mediaCodecThread = Thread.currentThread();
|
||||||
try {
|
try {
|
||||||
@ -261,16 +265,8 @@ public class MediaCodecVideoDecoder {
|
|||||||
sliceHeight = height;
|
sliceHeight = height;
|
||||||
|
|
||||||
if (useSurface) {
|
if (useSurface) {
|
||||||
// Create shared EGL context.
|
textureListener = new TextureListener(surfaceTextureHelper);
|
||||||
eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
|
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
|
||||||
eglBase.createDummyPbufferSurface();
|
|
||||||
eglBase.makeCurrent();
|
|
||||||
|
|
||||||
// Create output surface
|
|
||||||
textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
|
||||||
Logging.d(TAG, "Video decoder TextureID = " + textureID);
|
|
||||||
surfaceTexture = new SurfaceTexture(textureID);
|
|
||||||
surface = new Surface(surfaceTexture);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
|
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
|
||||||
@ -289,6 +285,11 @@ public class MediaCodecVideoDecoder {
|
|||||||
colorFormat = properties.colorFormat;
|
colorFormat = properties.colorFormat;
|
||||||
outputBuffers = mediaCodec.getOutputBuffers();
|
outputBuffers = mediaCodec.getOutputBuffers();
|
||||||
inputBuffers = mediaCodec.getInputBuffers();
|
inputBuffers = mediaCodec.getInputBuffers();
|
||||||
|
decodeStartTimeMs.clear();
|
||||||
|
hasDecodedFirstFrame = false;
|
||||||
|
dequeuedSurfaceOutputBuffers.clear();
|
||||||
|
droppedFrames = 0;
|
||||||
|
isWaitingForTexture = false;
|
||||||
Logging.d(TAG, "Input buffers: " + inputBuffers.length +
|
Logging.d(TAG, "Input buffers: " + inputBuffers.length +
|
||||||
". Output buffers: " + outputBuffers.length);
|
". Output buffers: " + outputBuffers.length);
|
||||||
return true;
|
return true;
|
||||||
@ -299,7 +300,7 @@ public class MediaCodecVideoDecoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void release() {
|
private void release() {
|
||||||
Logging.d(TAG, "Java releaseDecoder");
|
Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
|
||||||
checkOnMediaCodecThread();
|
checkOnMediaCodecThread();
|
||||||
|
|
||||||
// Run Mediacodec stop() and release() on separate thread since sometime
|
// Run Mediacodec stop() and release() on separate thread since sometime
|
||||||
@ -337,11 +338,7 @@ public class MediaCodecVideoDecoder {
|
|||||||
if (useSurface) {
|
if (useSurface) {
|
||||||
surface.release();
|
surface.release();
|
||||||
surface = null;
|
surface = null;
|
||||||
Logging.d(TAG, "Delete video decoder TextureID " + textureID);
|
textureListener.release();
|
||||||
GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
|
|
||||||
textureID = 0;
|
|
||||||
eglBase.release();
|
|
||||||
eglBase = null;
|
|
||||||
}
|
}
|
||||||
Logging.d(TAG, "Java releaseDecoder done");
|
Logging.d(TAG, "Java releaseDecoder done");
|
||||||
}
|
}
|
||||||
@ -364,6 +361,7 @@ public class MediaCodecVideoDecoder {
|
|||||||
try {
|
try {
|
||||||
inputBuffers[inputBufferIndex].position(0);
|
inputBuffers[inputBufferIndex].position(0);
|
||||||
inputBuffers[inputBufferIndex].limit(size);
|
inputBuffers[inputBufferIndex].limit(size);
|
||||||
|
decodeStartTimeMs.add(SystemClock.elapsedRealtime());
|
||||||
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
|
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -373,57 +371,156 @@ public class MediaCodecVideoDecoder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper structs for dequeueOutputBuffer() below.
|
// Helper struct for dequeueOutputBuffer() below.
|
||||||
private static class DecodedByteBuffer {
|
private static class DecodedOutputBuffer {
|
||||||
public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) {
|
public DecodedOutputBuffer(int index, int offset, int size, long presentationTimestampUs,
|
||||||
|
long decodeTime, long endDecodeTime) {
|
||||||
this.index = index;
|
this.index = index;
|
||||||
this.offset = offset;
|
this.offset = offset;
|
||||||
this.size = size;
|
this.size = size;
|
||||||
this.presentationTimestampUs = presentationTimestampUs;
|
this.presentationTimestampUs = presentationTimestampUs;
|
||||||
|
this.decodeTimeMs = decodeTime;
|
||||||
|
this.endDecodeTimeMs = endDecodeTime;
|
||||||
}
|
}
|
||||||
|
|
||||||
private final int index;
|
private final int index;
|
||||||
private final int offset;
|
private final int offset;
|
||||||
private final int size;
|
private final int size;
|
||||||
private final long presentationTimestampUs;
|
private final long presentationTimestampUs;
|
||||||
|
// Number of ms it took to decode this frame.
|
||||||
|
private final long decodeTimeMs;
|
||||||
|
// System time when this frame finished decoding.
|
||||||
|
private final long endDecodeTimeMs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Helper struct for dequeueTextureBuffer() below.
|
||||||
private static class DecodedTextureBuffer {
|
private static class DecodedTextureBuffer {
|
||||||
private final int textureID;
|
private final int textureID;
|
||||||
|
private final float[] transformMatrix;
|
||||||
private final long presentationTimestampUs;
|
private final long presentationTimestampUs;
|
||||||
|
private final long decodeTimeMs;
|
||||||
|
// Interval from when the frame finished decoding until this buffer has been created.
|
||||||
|
// Since there is only one texture, this interval depend on the time from when
|
||||||
|
// a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
|
||||||
|
// so that the texture can be updated with the next decoded frame.
|
||||||
|
private final long frameDelayMs;
|
||||||
|
|
||||||
public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
|
// A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
|
||||||
|
// that was dropped.
|
||||||
|
public DecodedTextureBuffer(int textureID, float[] transformMatrix,
|
||||||
|
long presentationTimestampUs, long decodeTimeMs, long frameDelay) {
|
||||||
this.textureID = textureID;
|
this.textureID = textureID;
|
||||||
|
this.transformMatrix = transformMatrix;
|
||||||
this.presentationTimestampUs = presentationTimestampUs;
|
this.presentationTimestampUs = presentationTimestampUs;
|
||||||
|
this.decodeTimeMs = decodeTimeMs;
|
||||||
|
this.frameDelayMs = frameDelay;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or
|
// Poll based texture listener.
|
||||||
// DecodedTexturebuffer depending on |useSurface| configuration.
|
private static class TextureListener
|
||||||
|
implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||||
|
public static class TextureInfo {
|
||||||
|
private final int textureID;
|
||||||
|
private final float[] transformMatrix;
|
||||||
|
|
||||||
|
TextureInfo(int textureId, float[] transformMatrix) {
|
||||||
|
this.textureID = textureId;
|
||||||
|
this.transformMatrix = transformMatrix;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||||
|
private TextureInfo textureInfo;
|
||||||
|
// |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
|
||||||
|
private final Object newFrameLock = new Object();
|
||||||
|
|
||||||
|
public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
|
||||||
|
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||||
|
surfaceTextureHelper.setListener(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
|
||||||
|
@Override
|
||||||
|
public void onTextureFrameAvailable(
|
||||||
|
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||||
|
synchronized (newFrameLock) {
|
||||||
|
if (textureInfo != null) {
|
||||||
|
Logging.e(TAG,
|
||||||
|
"Unexpected onTextureFrameAvailable() called while already holding a texture.");
|
||||||
|
throw new IllegalStateException("Already holding a texture.");
|
||||||
|
}
|
||||||
|
// |timestampNs| is always zero on some Android versions.
|
||||||
|
textureInfo = new TextureInfo(oesTextureId, transformMatrix);
|
||||||
|
newFrameLock.notifyAll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dequeues and returns a TextureInfo if available, or null otherwise.
|
||||||
|
public TextureInfo dequeueTextureInfo(int timeoutMs) {
|
||||||
|
synchronized (newFrameLock) {
|
||||||
|
if (textureInfo == null && timeoutMs > 0) {
|
||||||
|
try {
|
||||||
|
newFrameLock.wait(timeoutMs);
|
||||||
|
} catch(InterruptedException e) {
|
||||||
|
// Restore the interrupted status by reinterrupting the thread.
|
||||||
|
Thread.currentThread().interrupt();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TextureInfo returnedInfo = textureInfo;
|
||||||
|
textureInfo = null;
|
||||||
|
return returnedInfo;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void release() {
|
||||||
|
// SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
|
||||||
|
// progress is done. Therefore, the call to disconnect() must be outside any synchronized
|
||||||
|
// statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
|
||||||
|
surfaceTextureHelper.disconnect();
|
||||||
|
synchronized (newFrameLock) {
|
||||||
|
if (textureInfo != null) {
|
||||||
|
surfaceTextureHelper.returnTextureFrame();
|
||||||
|
textureInfo = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
|
||||||
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
|
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
|
||||||
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
|
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
|
||||||
// upon codec error.
|
// upon codec error.
|
||||||
private Object dequeueOutputBuffer(int dequeueTimeoutUs)
|
private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
|
||||||
throws IllegalStateException, MediaCodec.CodecException {
|
|
||||||
checkOnMediaCodecThread();
|
checkOnMediaCodecThread();
|
||||||
|
if (decodeStartTimeMs.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
// Drain the decoder until receiving a decoded buffer or hitting
|
// Drain the decoder until receiving a decoded buffer or hitting
|
||||||
// MediaCodec.INFO_TRY_AGAIN_LATER.
|
// MediaCodec.INFO_TRY_AGAIN_LATER.
|
||||||
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
|
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
|
||||||
while (true) {
|
while (true) {
|
||||||
final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
|
final int result = mediaCodec.dequeueOutputBuffer(
|
||||||
|
info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
|
||||||
switch (result) {
|
switch (result) {
|
||||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
|
||||||
return null;
|
|
||||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||||
outputBuffers = mediaCodec.getOutputBuffers();
|
outputBuffers = mediaCodec.getOutputBuffers();
|
||||||
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
|
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
|
||||||
|
if (hasDecodedFirstFrame) {
|
||||||
|
throw new RuntimeException("Unexpected output buffer change event.");
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||||
MediaFormat format = mediaCodec.getOutputFormat();
|
MediaFormat format = mediaCodec.getOutputFormat();
|
||||||
Logging.d(TAG, "Decoder format changed: " + format.toString());
|
Logging.d(TAG, "Decoder format changed: " + format.toString());
|
||||||
|
int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
|
||||||
|
int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
|
||||||
|
if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
|
||||||
|
throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
|
||||||
|
height + ". New " + new_width + "*" + new_height);
|
||||||
|
}
|
||||||
width = format.getInteger(MediaFormat.KEY_WIDTH);
|
width = format.getInteger(MediaFormat.KEY_WIDTH);
|
||||||
height = format.getInteger(MediaFormat.KEY_HEIGHT);
|
height = format.getInteger(MediaFormat.KEY_HEIGHT);
|
||||||
|
|
||||||
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
|
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
|
||||||
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
|
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
|
||||||
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
|
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
|
||||||
@ -441,30 +538,87 @@ public class MediaCodecVideoDecoder {
|
|||||||
stride = Math.max(width, stride);
|
stride = Math.max(width, stride);
|
||||||
sliceHeight = Math.max(height, sliceHeight);
|
sliceHeight = Math.max(height, sliceHeight);
|
||||||
break;
|
break;
|
||||||
|
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||||
|
return null;
|
||||||
default:
|
default:
|
||||||
// Output buffer decoded.
|
hasDecodedFirstFrame = true;
|
||||||
if (useSurface) {
|
return new DecodedOutputBuffer(result, info.offset, info.size, info.presentationTimeUs,
|
||||||
mediaCodec.releaseOutputBuffer(result, true /* render */);
|
SystemClock.elapsedRealtime() - decodeStartTimeMs.remove(),
|
||||||
// TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
|
SystemClock.elapsedRealtime());
|
||||||
// frame.
|
}
|
||||||
return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
|
|
||||||
} else {
|
|
||||||
return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
|
||||||
|
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
|
||||||
|
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
|
||||||
|
// upon codec error.
|
||||||
|
private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
|
||||||
|
checkOnMediaCodecThread();
|
||||||
|
if (!useSurface) {
|
||||||
|
throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
|
||||||
|
}
|
||||||
|
|
||||||
|
DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
|
||||||
|
if (outputBuffer != null) {
|
||||||
|
if (dequeuedSurfaceOutputBuffers.size() >= Math.min(
|
||||||
|
MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)) {
|
||||||
|
++droppedFrames;
|
||||||
|
Logging.w(TAG, "Too many output buffers. Dropping frame. Total number of dropped frames: "
|
||||||
|
+ droppedFrames);
|
||||||
|
// Drop the newest frame. Don't drop the oldest since if |isWaitingForTexture|
|
||||||
|
// releaseOutputBuffer has already been called. Dropping the newest frame will lead to a
|
||||||
|
// shift of timestamps by one frame in MediaCodecVideoDecoder::DeliverPendingOutputs.
|
||||||
|
mediaCodec.releaseOutputBuffer(outputBuffer.index, false /* render */);
|
||||||
|
return new DecodedTextureBuffer(0, null, outputBuffer.presentationTimestampUs,
|
||||||
|
outputBuffer.decodeTimeMs,
|
||||||
|
SystemClock.elapsedRealtime() - outputBuffer.endDecodeTimeMs);
|
||||||
|
}
|
||||||
|
dequeuedSurfaceOutputBuffers.add(outputBuffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (dequeuedSurfaceOutputBuffers.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isWaitingForTexture) {
|
||||||
|
// Get the first frame in the queue and render to the decoder output surface.
|
||||||
|
mediaCodec.releaseOutputBuffer(dequeuedSurfaceOutputBuffers.peek().index, true /* render */);
|
||||||
|
isWaitingForTexture = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We are waiting for a frame to be rendered to the decoder surface.
|
||||||
|
// Check if it is ready now by waiting max |dequeueTimeoutMs|. There can only be one frame
|
||||||
|
// rendered at a time.
|
||||||
|
TextureListener.TextureInfo info = textureListener.dequeueTextureInfo(dequeueTimeoutMs);
|
||||||
|
if (info != null) {
|
||||||
|
isWaitingForTexture = false;
|
||||||
|
final DecodedOutputBuffer renderedBuffer =
|
||||||
|
dequeuedSurfaceOutputBuffers.remove();
|
||||||
|
if (!dequeuedSurfaceOutputBuffers.isEmpty()) {
|
||||||
|
// Get the next frame in the queue and render to the decoder output surface.
|
||||||
|
mediaCodec.releaseOutputBuffer(
|
||||||
|
dequeuedSurfaceOutputBuffers.peek().index, true /* render */);
|
||||||
|
isWaitingForTexture = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new DecodedTextureBuffer(info.textureID, info.transformMatrix,
|
||||||
|
renderedBuffer.presentationTimestampUs, renderedBuffer.decodeTimeMs,
|
||||||
|
SystemClock.elapsedRealtime() - renderedBuffer.endDecodeTimeMs);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
|
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
|
||||||
// non-surface decoding.
|
// non-surface decoding.
|
||||||
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
|
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
|
||||||
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
|
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
|
||||||
// MediaCodec.CodecException upon codec error.
|
// MediaCodec.CodecException upon codec error.
|
||||||
private void returnDecodedByteBuffer(int index)
|
private void returnDecodedOutputBuffer(int index)
|
||||||
throws IllegalStateException, MediaCodec.CodecException {
|
throws IllegalStateException, MediaCodec.CodecException {
|
||||||
checkOnMediaCodecThread();
|
checkOnMediaCodecThread();
|
||||||
if (useSurface) {
|
if (useSurface) {
|
||||||
throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding.");
|
throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
|
||||||
}
|
}
|
||||||
mediaCodec.releaseOutputBuffer(index, false /* render */);
|
mediaCodec.releaseOutputBuffer(index, false /* render */);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -46,7 +46,11 @@ public class VideoRenderer {
|
|||||||
public final int[] yuvStrides;
|
public final int[] yuvStrides;
|
||||||
public ByteBuffer[] yuvPlanes;
|
public ByteBuffer[] yuvPlanes;
|
||||||
public final boolean yuvFrame;
|
public final boolean yuvFrame;
|
||||||
public Object textureObject;
|
// Matrix that transforms standard coordinates to their proper sampling locations in
|
||||||
|
// the texture. This transform compensates for any properties of the video source that
|
||||||
|
// cause it to appear different from a normalized texture. This matrix does not take
|
||||||
|
// |rotationDegree| into account.
|
||||||
|
public final float[] samplingMatrix;
|
||||||
public int textureId;
|
public int textureId;
|
||||||
// Frame pointer in C++.
|
// Frame pointer in C++.
|
||||||
private long nativeFramePointer;
|
private long nativeFramePointer;
|
||||||
@ -70,19 +74,27 @@ public class VideoRenderer {
|
|||||||
if (rotationDegree % 90 != 0) {
|
if (rotationDegree % 90 != 0) {
|
||||||
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
|
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
|
||||||
}
|
}
|
||||||
|
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
|
||||||
|
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
|
||||||
|
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
|
||||||
|
// matrix.
|
||||||
|
samplingMatrix = new float[] {
|
||||||
|
1, 0, 0, 0,
|
||||||
|
0, -1, 0, 0,
|
||||||
|
0, 0, 1, 0,
|
||||||
|
0, 1, 0, 1};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Construct a texture frame of the given dimensions with data in SurfaceTexture
|
* Construct a texture frame of the given dimensions with data in SurfaceTexture
|
||||||
*/
|
*/
|
||||||
I420Frame(
|
I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix,
|
||||||
int width, int height, int rotationDegree,
|
long nativeFramePointer) {
|
||||||
Object textureObject, int textureId, long nativeFramePointer) {
|
|
||||||
this.width = width;
|
this.width = width;
|
||||||
this.height = height;
|
this.height = height;
|
||||||
this.yuvStrides = null;
|
this.yuvStrides = null;
|
||||||
this.yuvPlanes = null;
|
this.yuvPlanes = null;
|
||||||
this.textureObject = textureObject;
|
this.samplingMatrix = samplingMatrix;
|
||||||
this.textureId = textureId;
|
this.textureId = textureId;
|
||||||
this.yuvFrame = false;
|
this.yuvFrame = false;
|
||||||
this.rotationDegree = rotationDegree;
|
this.rotationDegree = rotationDegree;
|
||||||
@ -125,7 +137,6 @@ public class VideoRenderer {
|
|||||||
*/
|
*/
|
||||||
public static void renderFrameDone(I420Frame frame) {
|
public static void renderFrameDone(I420Frame frame) {
|
||||||
frame.yuvPlanes = null;
|
frame.yuvPlanes = null;
|
||||||
frame.textureObject = null;
|
|
||||||
frame.textureId = 0;
|
frame.textureId = 0;
|
||||||
if (frame.nativeFramePointer != 0) {
|
if (frame.nativeFramePointer != 0) {
|
||||||
releaseNativeFrame(frame.nativeFramePointer);
|
releaseNativeFrame(frame.nativeFramePointer);
|
||||||
|
|||||||
Reference in New Issue
Block a user