Revert of Android MediaCodecVideoDecoder: Manage lifetime of texture frames (patchset #12 id:320001 of https://codereview.webrtc.org/1422963003/ )

Reason for revert:
Causes fallback to SW decoder if a renderer is put in the background.

Original issue's description:
> Patchset 1 is a pure
> revert of "Revert of "Android MediaCodecVideoDecoder: Manage lifetime of texture frames" https://codereview.webrtc.org/1378033003/
>
> Following patchsets move the responsibility of calculating the decode time to Java.
>
> TESTED= Apprtc loopback using H264 and VP8 on N5, N6, N7, S5
>
> Committed: https://crrev.com/9cb8982e64f08d3d630bf7c3d2bcc78c10db88e2
> Cr-Commit-Position: refs/heads/master@{#10597}

TBR=magjed@webrtc.org,glaznev@webrtc.org
NOPRESUBMIT=true
NOTREECHECKS=true

Review URL: https://codereview.webrtc.org/1441363002 .

Cr-Commit-Position: refs/heads/master@{#10637}
This commit is contained in:
Per
2015-11-13 16:58:26 +01:00
parent f8506cbdd8
commit c01c25434b
14 changed files with 269 additions and 350 deletions

View File

@ -107,8 +107,9 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer); VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
} }
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest @SmallTest
public void testStartVideoCapturerUsingTextures() throws InterruptedException { public void DISABLED_testStartVideoCapturerUsingTextures() throws InterruptedException {
VideoCapturerAndroid capturer = VideoCapturerAndroid capturer =
VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT); VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer); VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
@ -149,8 +150,9 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.switchCamera(capturer); VideoCapturerAndroidTestFixtures.switchCamera(capturer);
} }
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest @SmallTest
public void testSwitchVideoCapturerUsingTextures() throws InterruptedException { public void DISABLED_testSwitchVideoCapturerUsingTextures() throws InterruptedException {
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT); VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
VideoCapturerAndroidTestFixtures.switchCamera(capturer); VideoCapturerAndroidTestFixtures.switchCamera(capturer);
} }
@ -192,8 +194,9 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer); VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
} }
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@SmallTest @SmallTest
public void testStopRestartVideoSourceUsingTextures() throws InterruptedException { public void DISABLED_testStopRestartVideoSourceUsingTextures() throws InterruptedException {
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT); VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer); VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
} }
@ -282,8 +285,9 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer); VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
} }
// TODO(perkj): Enable once VideoCapture to texture support has landed in C++.
@MediumTest @MediumTest
public void testReturnBufferLateEndToEndUsingTextures() throws InterruptedException { public void DISABLED_testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
final VideoCapturerAndroid capturer = final VideoCapturerAndroid capturer =
VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT); VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer); VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);

View File

@ -30,7 +30,9 @@ package org.webrtc;
import android.content.Context; import android.content.Context;
import android.content.res.Resources.NotFoundException; import android.content.res.Resources.NotFoundException;
import android.graphics.Point; import android.graphics.Point;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.Matrix;
import android.os.Handler; import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.util.AttributeSet; import android.util.AttributeSet;
@ -455,10 +457,25 @@ public class SurfaceViewRenderer extends SurfaceView
} }
final long startTimeNs = System.nanoTime(); final long startTimeNs = System.nanoTime();
final float[] samplingMatrix;
if (frame.yuvFrame) {
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling
// matrix.
samplingMatrix = RendererCommon.verticalFlipMatrix();
} else {
// TODO(magjed): Move updateTexImage() to the video source instead.
SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
surfaceTexture.updateTexImage();
samplingMatrix = new float[16];
surfaceTexture.getTransformMatrix(samplingMatrix);
}
final float[] texMatrix; final float[] texMatrix;
synchronized (layoutLock) { synchronized (layoutLock) {
final float[] rotatedSamplingMatrix = final float[] rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree); RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree);
final float[] layoutMatrix = RendererCommon.getLayoutMatrix( final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight); mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix); texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);

View File

@ -38,6 +38,7 @@ import javax.microedition.khronos.opengles.GL10;
import android.annotation.SuppressLint; import android.annotation.SuppressLint;
import android.graphics.Point; import android.graphics.Point;
import android.graphics.Rect; import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20; import android.opengl.GLES20;
import android.opengl.GLSurfaceView; import android.opengl.GLSurfaceView;
@ -240,15 +241,29 @@ public class VideoRendererGui implements GLSurfaceView.Renderer {
} }
if (isNewFrame) { if (isNewFrame) {
rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
if (pendingFrame.yuvFrame) { if (pendingFrame.yuvFrame) {
rendererType = RendererType.RENDERER_YUV; rendererType = RendererType.RENDERER_YUV;
drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height, drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
pendingFrame.yuvStrides, pendingFrame.yuvPlanes); pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to
// the bottom-left corner. We correct this discrepancy by setting a vertical flip as
// sampling matrix.
final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
} else { } else {
rendererType = RendererType.RENDERER_TEXTURE; rendererType = RendererType.RENDERER_TEXTURE;
// External texture rendering. Make a deep copy of the external texture. // External texture rendering. Update texture image to latest and make a deep copy of
// the external texture.
// TODO(magjed): Move updateTexImage() to the video source instead.
final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
surfaceTexture.updateTexImage();
final float[] samplingMatrix = new float[16];
surfaceTexture.getTransformMatrix(samplingMatrix);
rotatedSamplingMatrix =
RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
// Reallocate offscreen texture if necessary. // Reallocate offscreen texture if necessary.
textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight()); textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());

View File

@ -33,7 +33,6 @@
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h" #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
#include "talk/app/webrtc/java/jni/classreferenceholder.h" #include "talk/app/webrtc/java/jni/classreferenceholder.h"
#include "talk/app/webrtc/java/jni/native_handle_impl.h" #include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
#include "webrtc/base/bind.h" #include "webrtc/base/bind.h"
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/logging.h" #include "webrtc/base/logging.h"
@ -113,7 +112,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
bool use_surface_; bool use_surface_;
VideoCodec codec_; VideoCodec codec_;
webrtc::I420BufferPool decoded_frame_pool_; webrtc::I420BufferPool decoded_frame_pool_;
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; NativeHandleImpl native_handle_;
DecodedImageCallback* callback_; DecodedImageCallback* callback_;
int frames_received_; // Number of frames received by decoder. int frames_received_; // Number of frames received by decoder.
int frames_decoded_; // Number of frames decoded by decoder. int frames_decoded_; // Number of frames decoded by decoder.
@ -124,6 +123,8 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
uint32_t max_pending_frames_; // Maximum number of pending input frames uint32_t max_pending_frames_; // Maximum number of pending input frames
std::vector<int32_t> timestamps_; std::vector<int32_t> timestamps_;
std::vector<int64_t> ntp_times_ms_; std::vector<int64_t> ntp_times_ms_;
std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
// decoder input.
// State that is constant for the lifetime of this object once the ctor // State that is constant for the lifetime of this object once the ctor
// returns. // returns.
@ -134,8 +135,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jmethodID j_release_method_; jmethodID j_release_method_;
jmethodID j_dequeue_input_buffer_method_; jmethodID j_dequeue_input_buffer_method_;
jmethodID j_queue_input_buffer_method_; jmethodID j_queue_input_buffer_method_;
jmethodID j_dequeue_byte_buffer_method_; jmethodID j_dequeue_output_buffer_method_;
jmethodID j_dequeue_texture_buffer_method_;
jmethodID j_return_decoded_byte_buffer_method_; jmethodID j_return_decoded_byte_buffer_method_;
// MediaCodecVideoDecoder fields. // MediaCodecVideoDecoder fields.
jfieldID j_input_buffers_field_; jfieldID j_input_buffers_field_;
@ -145,21 +145,20 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
jfieldID j_height_field_; jfieldID j_height_field_;
jfieldID j_stride_field_; jfieldID j_stride_field_;
jfieldID j_slice_height_field_; jfieldID j_slice_height_field_;
jfieldID j_surface_texture_field_;
// MediaCodecVideoDecoder.DecodedTextureBuffer fields. // MediaCodecVideoDecoder.DecodedTextureBuffer fields.
jfieldID j_texture_id_field_; jfieldID j_textureID_field_;
jfieldID j_transform_matrix_field_;
jfieldID j_texture_presentation_timestamp_us_field_; jfieldID j_texture_presentation_timestamp_us_field_;
jfieldID j_texture_decode_time_ms_field_; // MediaCodecVideoDecoder.DecodedByteBuffer fields.
jfieldID j_texture_frame_delay_ms_field_;
// MediaCodecVideoDecoder.DecodedOutputBuffer fields.
jfieldID j_info_index_field_; jfieldID j_info_index_field_;
jfieldID j_info_offset_field_; jfieldID j_info_offset_field_;
jfieldID j_info_size_field_; jfieldID j_info_size_field_;
jfieldID j_info_presentation_timestamp_us_field_; jfieldID j_info_presentation_timestamp_us_field_;
jfieldID j_byte_buffer_decode_time_ms_field_;
// Global references; must be deleted in Release(). // Global references; must be deleted in Release().
std::vector<jobject> input_buffers_; std::vector<jobject> input_buffers_;
jobject surface_texture_;
jobject previous_surface_texture_;
// Render EGL context - owned by factory, should not be allocated/destroyed // Render EGL context - owned by factory, should not be allocated/destroyed
// by VideoDecoder. // by VideoDecoder.
@ -173,6 +172,8 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
key_frame_required_(true), key_frame_required_(true),
inited_(false), inited_(false),
sw_fallback_required_(false), sw_fallback_required_(false),
surface_texture_(NULL),
previous_surface_texture_(NULL),
codec_thread_(new Thread()), codec_thread_(new Thread()),
j_media_codec_video_decoder_class_( j_media_codec_video_decoder_class_(
jni, jni,
@ -191,22 +192,19 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
j_init_decode_method_ = GetMethodID( j_init_decode_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "initDecode", jni, *j_media_codec_video_decoder_class_, "initDecode",
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
"IILorg/webrtc/SurfaceTextureHelper;)Z"); "IILjavax/microedition/khronos/egl/EGLContext;)Z");
j_release_method_ = j_release_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
j_dequeue_input_buffer_method_ = GetMethodID( j_dequeue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
j_queue_input_buffer_method_ = GetMethodID( j_queue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
j_dequeue_byte_buffer_method_ = GetMethodID( j_dequeue_output_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
"(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;"); "(I)Ljava/lang/Object;");
j_dequeue_texture_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
"(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
j_return_decoded_byte_buffer_method_ = j_return_decoded_byte_buffer_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, GetMethodID(jni, *j_media_codec_video_decoder_class_,
"returnDecodedOutputBuffer", "(I)V"); "returnDecodedByteBuffer", "(I)V");
j_input_buffers_field_ = GetFieldID( j_input_buffers_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, jni, *j_media_codec_video_decoder_class_,
@ -224,32 +222,28 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
jni, *j_media_codec_video_decoder_class_, "stride", "I"); jni, *j_media_codec_video_decoder_class_, "stride", "I");
j_slice_height_field_ = GetFieldID( j_slice_height_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
j_surface_texture_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
"Landroid/graphics/SurfaceTexture;");
jclass j_decoded_texture_buffer_class = FindClass(jni, jclass j_decoder_decoded_texture_buffer_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
j_texture_id_field_ = GetFieldID( j_textureID_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "textureID", "I"); jni, j_decoder_decoded_texture_buffer_class, "textureID", "I");
j_transform_matrix_field_ = GetFieldID( j_texture_presentation_timestamp_us_field_ =
jni, j_decoded_texture_buffer_class, "transformMatrix", "[F"); GetFieldID(jni, j_decoder_decoded_texture_buffer_class,
j_texture_presentation_timestamp_us_field_ = GetFieldID( "presentationTimestampUs", "J");
jni, j_decoded_texture_buffer_class, "presentationTimestampUs", "J");
j_texture_decode_time_ms_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
j_texture_frame_delay_ms_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
jclass j_decoded_output_buffer_class = FindClass(jni, jclass j_decoder_decoded_byte_buffer_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
j_info_index_field_ = GetFieldID( j_info_index_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "index", "I"); jni, j_decoder_decoded_byte_buffer_class, "index", "I");
j_info_offset_field_ = GetFieldID( j_info_offset_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "offset", "I"); jni, j_decoder_decoded_byte_buffer_class, "offset", "I");
j_info_size_field_ = GetFieldID( j_info_size_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "size", "I"); jni, j_decoder_decoded_byte_buffer_class, "size", "I");
j_info_presentation_timestamp_us_field_ = GetFieldID( j_info_presentation_timestamp_us_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "presentationTimestampUs", "J"); jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J");
j_byte_buffer_decode_time_ms_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
use_surface_ = (render_egl_context_ != NULL); use_surface_ = (render_egl_context_ != NULL);
@ -261,6 +255,14 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
// Call Release() to ensure no more callbacks to us after we are deleted. // Call Release() to ensure no more callbacks to us after we are deleted.
Release(); Release();
// Delete global references.
JNIEnv* jni = AttachCurrentThreadIfNeeded();
if (previous_surface_texture_ != NULL) {
jni->DeleteGlobalRef(previous_surface_texture_);
}
if (surface_texture_ != NULL) {
jni->DeleteGlobalRef(surface_texture_);
}
} }
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst, int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
@ -311,11 +313,6 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
frames_received_ = 0; frames_received_ = 0;
frames_decoded_ = 0; frames_decoded_ = 0;
if (use_surface_) {
surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
jni, render_egl_context_);
}
jobject j_video_codec_enum = JavaEnumFromIndex( jobject j_video_codec_enum = JavaEnumFromIndex(
jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
bool success = jni->CallBooleanMethod( bool success = jni->CallBooleanMethod(
@ -324,8 +321,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
j_video_codec_enum, j_video_codec_enum,
codec_.width, codec_.width,
codec_.height, codec_.height,
use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() use_surface_ ? render_egl_context_ : nullptr);
: nullptr);
if (CheckException(jni) || !success) { if (CheckException(jni) || !success) {
ALOGE << "Codec initialization error - fallback to SW codec."; ALOGE << "Codec initialization error - fallback to SW codec.";
sw_fallback_required_ = true; sw_fallback_required_ = true;
@ -352,6 +348,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
current_decoding_time_ms_ = 0; current_decoding_time_ms_ = 0;
timestamps_.clear(); timestamps_.clear();
ntp_times_ms_.clear(); ntp_times_ms_.clear();
frame_rtc_times_ms_.clear();
jobjectArray input_buffers = (jobjectArray)GetObjectField( jobjectArray input_buffers = (jobjectArray)GetObjectField(
jni, *j_media_codec_video_decoder_, j_input_buffers_field_); jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
@ -368,6 +365,15 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
} }
} }
if (use_surface_) {
jobject surface_texture = GetObjectField(
jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
if (previous_surface_texture_ != NULL) {
jni->DeleteGlobalRef(previous_surface_texture_);
}
previous_surface_texture_ = surface_texture_;
surface_texture_ = jni->NewGlobalRef(surface_texture);
}
codec_thread_->PostDelayed(kMediaCodecPollMs, this); codec_thread_->PostDelayed(kMediaCodecPollMs, this);
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
@ -393,7 +399,6 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
} }
input_buffers_.clear(); input_buffers_.clear();
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
surface_texture_helper_ = nullptr;
inited_ = false; inited_ = false;
rtc::MessageQueueManager::Clear(this); rtc::MessageQueueManager::Clear(this);
if (CheckException(jni)) { if (CheckException(jni)) {
@ -503,7 +508,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
if (frames_received_ > frames_decoded_ + max_pending_frames_) { if (frames_received_ > frames_decoded_ + max_pending_frames_) {
ALOGV("Received: %d. Decoded: %d. Wait for output...", ALOGV("Received: %d. Decoded: %d. Wait for output...",
frames_received_, frames_decoded_); frames_received_, frames_decoded_);
if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs)) { if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
ALOGE << "DeliverPendingOutputs error. Frames received: " << ALOGE << "DeliverPendingOutputs error. Frames received: " <<
frames_received_ << ". Frames decoded: " << frames_decoded_; frames_received_ << ". Frames decoded: " << frames_decoded_;
return ProcessHWErrorOnCodecThread(); return ProcessHWErrorOnCodecThread();
@ -548,6 +553,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
current_bytes_ += inputImage._length; current_bytes_ += inputImage._length;
timestamps_.push_back(inputImage._timeStamp); timestamps_.push_back(inputImage._timeStamp);
ntp_times_ms_.push_back(inputImage.ntp_time_ms_); ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
// Feed input to decoder. // Feed input to decoder.
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_, bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
@ -570,18 +576,16 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
} }
bool MediaCodecVideoDecoder::DeliverPendingOutputs( bool MediaCodecVideoDecoder::DeliverPendingOutputs(
JNIEnv* jni, int dequeue_timeout_ms) { JNIEnv* jni, int dequeue_timeout_us) {
if (frames_received_ <= frames_decoded_) { if (frames_received_ <= frames_decoded_) {
// No need to query for output buffers - decoder is drained. // No need to query for output buffers - decoder is drained.
return true; return true;
} }
// Get decoder output. // Get decoder output.
jobject j_decoder_output_buffer = jobject j_decoder_output_buffer = jni->CallObjectMethod(
jni->CallObjectMethod(*j_media_codec_video_decoder_, *j_media_codec_video_decoder_,
use_surface_ ? j_dequeue_texture_buffer_method_ j_dequeue_output_buffer_method_,
: j_dequeue_byte_buffer_method_, dequeue_timeout_us);
dequeue_timeout_ms);
if (CheckException(jni)) { if (CheckException(jni)) {
ALOGE << "dequeueOutputBuffer() error"; ALOGE << "dequeueOutputBuffer() error";
return false; return false;
@ -601,30 +605,19 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
j_slice_height_field_); j_slice_height_field_);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer;
int64_t output_timestamps_ms = 0; long output_timestamps_ms = 0;
int decode_time_ms = 0;
int64_t frame_delayed_ms = 0;
if (use_surface_) { if (use_surface_) {
// Extract data from Java DecodedTextureBuffer. // Extract data from Java DecodedTextureBuffer.
const int texture_id = const int texture_id =
GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); GetIntField(jni, j_decoder_output_buffer, j_textureID_field_);
if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
const jfloatArray j_transform_matrix =
reinterpret_cast<jfloatArray>(GetObjectField(
jni, j_decoder_output_buffer, j_transform_matrix_field_));
const int64_t timestamp_us = const int64_t timestamp_us =
GetLongField(jni, j_decoder_output_buffer, GetLongField(jni, j_decoder_output_buffer,
j_texture_presentation_timestamp_us_field_); j_texture_presentation_timestamp_us_field_);
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
j_texture_decode_time_ms_field_);
frame_delayed_ms = GetLongField(jni, j_decoder_output_buffer,
j_texture_frame_delay_ms_field_);
// Create webrtc::VideoFrameBuffer with native texture handle. // Create webrtc::VideoFrameBuffer with native texture handle.
frame_buffer = surface_texture_helper_->CreateTextureFrame( native_handle_.SetTextureObject(surface_texture_, texture_id);
width, height, NativeHandleImpl(jni, texture_id, j_transform_matrix)); frame_buffer = new rtc::RefCountedObject<JniNativeHandleBuffer>(
} &native_handle_, width, height);
} else { } else {
// Extract data from Java ByteBuffer and create output yuv420 frame - // Extract data from Java ByteBuffer and create output yuv420 frame -
// for non surface decoding only. // for non surface decoding only.
@ -637,8 +630,6 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
const int64_t timestamp_us = GetLongField( const int64_t timestamp_us = GetLongField(
jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_); jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_);
output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec;
decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
j_byte_buffer_decode_time_ms_field_);
if (output_buffer_size < width * height * 3 / 2) { if (output_buffer_size < width * height * 3 / 2) {
ALOGE << "Insufficient output buffer size: " << output_buffer_size; ALOGE << "Insufficient output buffer size: " << output_buffer_size;
@ -696,7 +687,7 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
j_return_decoded_byte_buffer_method_, j_return_decoded_byte_buffer_method_,
output_buffer_index); output_buffer_index);
if (CheckException(jni)) { if (CheckException(jni)) {
ALOGE << "returnDecodedOutputBuffer error"; ALOGE << "returnDecodedByteBuffer error";
return false; return false;
} }
} }
@ -711,24 +702,26 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
decoded_frame.set_ntp_time_ms(ntp_times_ms_.front()); decoded_frame.set_ntp_time_ms(ntp_times_ms_.front());
ntp_times_ms_.erase(ntp_times_ms_.begin()); ntp_times_ms_.erase(ntp_times_ms_.begin());
} }
int64_t frame_decoding_time_ms = 0;
if (frame_rtc_times_ms_.size() > 0) {
frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
}
if (frames_decoded_ < kMaxDecodedLogFrames) { if (frames_decoded_ < kMaxDecodedLogFrames) {
ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width << ALOGD << "Decoder frame out # " << frames_decoded_ << ". " << width <<
" x " << height << ". " << stride << " x " << slice_height << " x " << height << ". " << stride << " x " << slice_height <<
". Color: " << color_format << ". TS:" << (int)output_timestamps_ms << ". Color: " << color_format << ". TS:" << (int)output_timestamps_ms <<
". DecTime: " << (int)decode_time_ms << ". DecTime: " << (int)frame_decoding_time_ms;
". DelayTime: " << (int)frame_delayed_ms;
} }
// Calculate and print decoding statistics - every 3 seconds. // Calculate and print decoding statistics - every 3 seconds.
frames_decoded_++; frames_decoded_++;
current_frames_++; current_frames_++;
current_decoding_time_ms_ += decode_time_ms; current_decoding_time_ms_ += frame_decoding_time_ms;
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_; int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs && if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
current_frames_ > 0) { current_frames_ > 0) {
ALOGD << "Decoded frames: " << frames_decoded_ << ". Received frames: " ALOGD << "Decoded frames: " << frames_decoded_ << ". Bitrate: " <<
<< frames_received_ << ". Bitrate: " <<
(current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " << (current_bytes_ * 8 / statistic_time_ms) << " kbps, fps: " <<
((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms) ((current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms)
<< ". decTime: " << (current_decoding_time_ms_ / current_frames_) << << ". decTime: " << (current_decoding_time_ms_ / current_frames_) <<
@ -739,15 +732,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
current_decoding_time_ms_ = 0; current_decoding_time_ms_ = 0;
} }
// |.IsZeroSize())| returns true when a frame has been dropped.
if (!decoded_frame.IsZeroSize()) {
// Callback - output decoded frame. // Callback - output decoded frame.
const int32_t callback_status = const int32_t callback_status = callback_->Decoded(decoded_frame);
callback_->Decoded(decoded_frame, decode_time_ms);
if (callback_status > 0) { if (callback_status > 0) {
ALOGE << "callback error"; ALOGE << "callback error";
} }
}
return true; return true;
} }

View File

@ -180,10 +180,16 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
buffer, rotation, timestamp_ns); buffer, rotation, timestamp_ns);
} }
void AndroidVideoCapturerJni::OnTextureFrame(int width, void AndroidVideoCapturerJni::OnTextureFrame(
int width,
int height, int height,
int64_t timestamp_ns, int64_t timestamp_ns,
const NativeHandleImpl& handle) { const NativeTextureHandleImpl& handle) {
// TODO(magjed): Fix this. See bug webrtc:4993.
RTC_NOTREACHED()
<< "The rest of the stack for Android expects the native "
"handle to be a NativeHandleImpl with a SurfaceTexture, not a "
"NativeTextureHandleImpl";
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer( rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
new rtc::RefCountedObject<AndroidTextureBuffer>( new rtc::RefCountedObject<AndroidTextureBuffer>(
width, height, handle, width, height, handle,
@ -228,7 +234,7 @@ JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
jlong j_timestamp) { jlong j_timestamp) {
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer) reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
->OnTextureFrame(j_width, j_height, j_timestamp, ->OnTextureFrame(j_width, j_height, j_timestamp,
NativeHandleImpl(jni, j_oes_texture_id, NativeTextureHandleImpl(jni, j_oes_texture_id,
j_transform_matrix)); j_transform_matrix));
} }

View File

@ -39,7 +39,7 @@
namespace webrtc_jni { namespace webrtc_jni {
class NativeHandleImpl; class NativeTextureHandleImpl;
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate. // AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
// The purpose of the delegate is to hide the JNI specifics from the C++ only // The purpose of the delegate is to hide the JNI specifics from the C++ only
@ -61,7 +61,7 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
void OnMemoryBufferFrame(void* video_frame, int length, int width, void OnMemoryBufferFrame(void* video_frame, int length, int width,
int height, int rotation, int64_t timestamp_ns); int height, int rotation, int64_t timestamp_ns);
void OnTextureFrame(int width, int height, int64_t timestamp_ns, void OnTextureFrame(int width, int height, int64_t timestamp_ns,
const NativeHandleImpl& handle); const NativeTextureHandleImpl& handle);
void OnOutputFormatRequest(int width, int height, int fps); void OnOutputFormatRequest(int width, int height, int fps);
protected: protected:

View File

@ -85,7 +85,7 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType"); LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
LoadClass(jni, "org/webrtc/SurfaceTextureHelper"); LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
#endif #endif

View File

@ -31,7 +31,7 @@
namespace webrtc_jni { namespace webrtc_jni {
NativeHandleImpl::NativeHandleImpl(JNIEnv* jni, NativeTextureHandleImpl::NativeTextureHandleImpl(JNIEnv* jni,
jint j_oes_texture_id, jint j_oes_texture_id,
jfloatArray j_transform_matrix) jfloatArray j_transform_matrix)
: oes_texture_id(j_oes_texture_id) { : oes_texture_id(j_oes_texture_id) {
@ -44,10 +44,38 @@ NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0); jni->ReleaseFloatArrayElements(j_transform_matrix, transform_matrix_ptr, 0);
} }
NativeHandleImpl::NativeHandleImpl() : texture_object_(NULL), texture_id_(-1) {}
void* NativeHandleImpl::GetHandle() {
return texture_object_;
}
int NativeHandleImpl::GetTextureId() {
return texture_id_;
}
void NativeHandleImpl::SetTextureObject(void* texture_object, int texture_id) {
texture_object_ = reinterpret_cast<jobject>(texture_object);
texture_id_ = texture_id;
}
JniNativeHandleBuffer::JniNativeHandleBuffer(void* native_handle,
int width,
int height)
: NativeHandleBuffer(native_handle, width, height) {}
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
JniNativeHandleBuffer::NativeToI420Buffer() {
// TODO(pbos): Implement before using this in the encoder pipeline (or
// remove the RTC_CHECK() in VideoCapture).
RTC_NOTREACHED();
return nullptr;
}
AndroidTextureBuffer::AndroidTextureBuffer( AndroidTextureBuffer::AndroidTextureBuffer(
int width, int width,
int height, int height,
const NativeHandleImpl& native_handle, const NativeTextureHandleImpl& native_handle,
const rtc::Callback0<void>& no_longer_used) const rtc::Callback0<void>& no_longer_used)
: webrtc::NativeHandleBuffer(&native_handle_, width, height), : webrtc::NativeHandleBuffer(&native_handle_, width, height),
native_handle_(native_handle), native_handle_(native_handle),

View File

@ -36,8 +36,8 @@
namespace webrtc_jni { namespace webrtc_jni {
// Wrapper for texture object. // Wrapper for texture object.
struct NativeHandleImpl { struct NativeTextureHandleImpl {
NativeHandleImpl(JNIEnv* jni, NativeTextureHandleImpl(JNIEnv* jni,
jint j_oes_texture_id, jint j_oes_texture_id,
jfloatArray j_transform_matrix); jfloatArray j_transform_matrix);
@ -45,17 +45,42 @@ struct NativeHandleImpl {
float sampling_matrix[16]; float sampling_matrix[16];
}; };
// Native handle for SurfaceTexture + texture id.
class NativeHandleImpl {
public:
NativeHandleImpl();
void* GetHandle();
int GetTextureId();
void SetTextureObject(void* texture_object, int texture_id);
private:
jobject texture_object_;
int32_t texture_id_;
};
class JniNativeHandleBuffer : public webrtc::NativeHandleBuffer {
public:
JniNativeHandleBuffer(void* native_handle, int width, int height);
// TODO(pbos): Override destructor to release native handle, at the moment the
// native handle is not released based on refcount.
private:
rtc::scoped_refptr<webrtc::VideoFrameBuffer> NativeToI420Buffer() override;
};
class AndroidTextureBuffer : public webrtc::NativeHandleBuffer { class AndroidTextureBuffer : public webrtc::NativeHandleBuffer {
public: public:
AndroidTextureBuffer(int width, AndroidTextureBuffer(int width,
int height, int height,
const NativeHandleImpl& native_handle, const NativeTextureHandleImpl& native_handle,
const rtc::Callback0<void>& no_longer_used); const rtc::Callback0<void>& no_longer_used);
~AndroidTextureBuffer(); ~AndroidTextureBuffer();
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override; rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
private: private:
NativeHandleImpl native_handle_; NativeTextureHandleImpl native_handle_;
rtc::Callback0<void> no_longer_used_cb_; rtc::Callback0<void> no_longer_used_cb_;
}; };

View File

@ -773,7 +773,7 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")), jni, *j_frame_class_, "<init>", "(III[I[Ljava/nio/ByteBuffer;J)V")),
j_texture_frame_ctor_id_(GetMethodID( j_texture_frame_ctor_id_(GetMethodID(
jni, *j_frame_class_, "<init>", jni, *j_frame_class_, "<init>",
"(IIII[FJ)V")), "(IIILjava/lang/Object;IJ)V")),
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) { j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
CHECK_EXCEPTION(jni); CHECK_EXCEPTION(jni);
} }
@ -829,13 +829,13 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) { jobject CricketToJavaTextureFrame(const cricket::VideoFrame* frame) {
NativeHandleImpl* handle = NativeHandleImpl* handle =
reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle()); reinterpret_cast<NativeHandleImpl*>(frame->GetNativeHandle());
jfloatArray sampling_matrix = jni()->NewFloatArray(16); jobject texture_object = reinterpret_cast<jobject>(handle->GetHandle());
jni()->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix); int texture_id = handle->GetTextureId();
return jni()->NewObject( return jni()->NewObject(
*j_frame_class_, j_texture_frame_ctor_id_, *j_frame_class_, j_texture_frame_ctor_id_,
frame->GetWidth(), frame->GetHeight(), frame->GetWidth(), frame->GetHeight(),
static_cast<int>(frame->GetVideoRotation()), static_cast<int>(frame->GetVideoRotation()),
handle->oes_texture_id, sampling_matrix, javaShallowCopy(frame)); texture_object, texture_id, javaShallowCopy(frame));
} }
JNIEnv* jni() { JNIEnv* jni() {

View File

@ -70,7 +70,7 @@ void SurfaceTextureHelper::ReturnTextureFrame() const {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> rtc::scoped_refptr<webrtc::VideoFrameBuffer>
SurfaceTextureHelper::CreateTextureFrame(int width, int height, SurfaceTextureHelper::CreateTextureFrame(int width, int height,
const NativeHandleImpl& native_handle) { const NativeTextureHandleImpl& native_handle) {
return new rtc::RefCountedObject<AndroidTextureBuffer>( return new rtc::RefCountedObject<AndroidTextureBuffer>(
width, height, native_handle, width, height, native_handle,
rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this)); rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));

View File

@ -66,7 +66,7 @@ class SurfaceTextureHelper : public rtc::RefCountInterface {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame( rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
int width, int width,
int height, int height,
const NativeHandleImpl& native_handle); const NativeTextureHandleImpl& native_handle);
protected: protected:
~SurfaceTextureHelper(); ~SurfaceTextureHelper();

View File

@ -33,19 +33,19 @@ import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities; import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList; import android.media.MediaCodecList;
import android.media.MediaFormat; import android.media.MediaFormat;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build; import android.os.Build;
import android.os.SystemClock;
import android.view.Surface; import android.view.Surface;
import org.webrtc.Logging; import org.webrtc.Logging;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.Queue;
import java.util.concurrent.TimeUnit; import javax.microedition.khronos.egl.EGLContext;
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder. // Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
// This class is an implementation detail of the Java PeerConnection API. // This class is an implementation detail of the Java PeerConnection API.
@ -104,21 +104,14 @@ public class MediaCodecVideoDecoder {
private int height; private int height;
private int stride; private int stride;
private int sliceHeight; private int sliceHeight;
private boolean hasDecodedFirstFrame;
private final Queue<Long> decodeStartTimeMs = new LinkedList<Long>();
private boolean useSurface; private boolean useSurface;
private int textureID = 0;
// The below variables are only used when decoding to a Surface. private SurfaceTexture surfaceTexture = null;
private TextureListener textureListener;
// Max number of output buffers queued before starting to drop decoded frames.
private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
private int droppedFrames;
// |isWaitingForTexture| is true when waiting for the transition:
// MediaCodec.releaseOutputBuffer() -> onTextureFrameAvailable().
private boolean isWaitingForTexture;
private Surface surface = null; private Surface surface = null;
private final Queue<DecodedOutputBuffer> private EglBase eglBase;
dequeuedSurfaceOutputBuffers = new LinkedList<DecodedOutputBuffer>();
private MediaCodecVideoDecoder() {
}
// MediaCodec error handler - invoked when critical error happens which may prevent // MediaCodec error handler - invoked when critical error happens which may prevent
// further use of media codec API. Now it means that one of media codec instances // further use of media codec API. Now it means that one of media codec instances
@ -230,13 +223,12 @@ public class MediaCodecVideoDecoder {
} }
} }
// Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output. // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
private boolean initDecode( private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
if (mediaCodecThread != null) { if (mediaCodecThread != null) {
throw new RuntimeException("Forgot to release()?"); throw new RuntimeException("Forgot to release()?");
} }
useSurface = (surfaceTextureHelper != null); useSurface = (sharedContext != null);
String mime = null; String mime = null;
String[] supportedCodecPrefixes = null; String[] supportedCodecPrefixes = null;
if (type == VideoCodecType.VIDEO_CODEC_VP8) { if (type == VideoCodecType.VIDEO_CODEC_VP8) {
@ -258,6 +250,9 @@ public class MediaCodecVideoDecoder {
Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height + Logging.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
". Color: 0x" + Integer.toHexString(properties.colorFormat) + ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
". Use Surface: " + useSurface); ". Use Surface: " + useSurface);
if (sharedContext != null) {
Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
}
runningInstance = this; // Decoder is now running and can be queried for stack traces. runningInstance = this; // Decoder is now running and can be queried for stack traces.
mediaCodecThread = Thread.currentThread(); mediaCodecThread = Thread.currentThread();
try { try {
@ -267,8 +262,16 @@ public class MediaCodecVideoDecoder {
sliceHeight = height; sliceHeight = height;
if (useSurface) { if (useSurface) {
textureListener = new TextureListener(surfaceTextureHelper); // Create shared EGL context.
surface = new Surface(surfaceTextureHelper.getSurfaceTexture()); eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
// Create output surface
textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
Logging.d(TAG, "Video decoder TextureID = " + textureID);
surfaceTexture = new SurfaceTexture(textureID);
surface = new Surface(surfaceTexture);
} }
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@ -287,11 +290,6 @@ public class MediaCodecVideoDecoder {
colorFormat = properties.colorFormat; colorFormat = properties.colorFormat;
outputBuffers = mediaCodec.getOutputBuffers(); outputBuffers = mediaCodec.getOutputBuffers();
inputBuffers = mediaCodec.getInputBuffers(); inputBuffers = mediaCodec.getInputBuffers();
decodeStartTimeMs.clear();
hasDecodedFirstFrame = false;
dequeuedSurfaceOutputBuffers.clear();
droppedFrames = 0;
isWaitingForTexture = false;
Logging.d(TAG, "Input buffers: " + inputBuffers.length + Logging.d(TAG, "Input buffers: " + inputBuffers.length +
". Output buffers: " + outputBuffers.length); ". Output buffers: " + outputBuffers.length);
return true; return true;
@ -302,7 +300,7 @@ public class MediaCodecVideoDecoder {
} }
private void release() { private void release() {
Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames); Logging.d(TAG, "Java releaseDecoder");
checkOnMediaCodecThread(); checkOnMediaCodecThread();
// Run Mediacodec stop() and release() on separate thread since sometime // Run Mediacodec stop() and release() on separate thread since sometime
@ -340,7 +338,11 @@ public class MediaCodecVideoDecoder {
if (useSurface) { if (useSurface) {
surface.release(); surface.release();
surface = null; surface = null;
textureListener.release(); Logging.d(TAG, "Delete video decoder TextureID " + textureID);
GLES20.glDeleteTextures(1, new int[] {textureID}, 0);
textureID = 0;
eglBase.release();
eglBase = null;
} }
Logging.d(TAG, "Java releaseDecoder done"); Logging.d(TAG, "Java releaseDecoder done");
} }
@ -363,7 +365,6 @@ public class MediaCodecVideoDecoder {
try { try {
inputBuffers[inputBufferIndex].position(0); inputBuffers[inputBufferIndex].position(0);
inputBuffers[inputBufferIndex].limit(size); inputBuffers[inputBufferIndex].limit(size);
decodeStartTimeMs.add(SystemClock.elapsedRealtime());
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
return true; return true;
} }
@ -373,156 +374,57 @@ public class MediaCodecVideoDecoder {
} }
} }
// Helper struct for dequeueOutputBuffer() below. // Helper structs for dequeueOutputBuffer() below.
private static class DecodedOutputBuffer { private static class DecodedByteBuffer {
public DecodedOutputBuffer(int index, int offset, int size, long presentationTimestampUs, public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) {
long decodeTime, long endDecodeTime) {
this.index = index; this.index = index;
this.offset = offset; this.offset = offset;
this.size = size; this.size = size;
this.presentationTimestampUs = presentationTimestampUs; this.presentationTimestampUs = presentationTimestampUs;
this.decodeTimeMs = decodeTime;
this.endDecodeTimeMs = endDecodeTime;
} }
private final int index; private final int index;
private final int offset; private final int offset;
private final int size; private final int size;
private final long presentationTimestampUs; private final long presentationTimestampUs;
// Number of ms it took to decode this frame.
private final long decodeTimeMs;
// System time when this frame finished decoding.
private final long endDecodeTimeMs;
} }
// Helper struct for dequeueTextureBuffer() below.
private static class DecodedTextureBuffer { private static class DecodedTextureBuffer {
private final int textureID; private final int textureID;
private final float[] transformMatrix;
private final long presentationTimestampUs; private final long presentationTimestampUs;
private final long decodeTimeMs;
// Interval from when the frame finished decoding until this buffer has been created.
// Since there is only one texture, this interval depend on the time from when
// a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
// so that the texture can be updated with the next decoded frame.
private final long frameDelayMs;
// A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame public DecodedTextureBuffer(int textureID, long presentationTimestampUs) {
// that was dropped.
public DecodedTextureBuffer(int textureID, float[] transformMatrix,
long presentationTimestampUs, long decodeTimeMs, long frameDelay) {
this.textureID = textureID; this.textureID = textureID;
this.transformMatrix = transformMatrix;
this.presentationTimestampUs = presentationTimestampUs; this.presentationTimestampUs = presentationTimestampUs;
this.decodeTimeMs = decodeTimeMs;
this.frameDelayMs = frameDelay;
} }
} }
// Poll based texture listener. // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or
private static class TextureListener // DecodedTexturebuffer depending on |useSurface| configuration.
implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
public static class TextureInfo {
private final int textureID;
private final float[] transformMatrix;
TextureInfo(int textureId, float[] transformMatrix) {
this.textureID = textureId;
this.transformMatrix = transformMatrix;
}
}
private final SurfaceTextureHelper surfaceTextureHelper;
private TextureInfo textureInfo;
// |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
private final Object newFrameLock = new Object();
public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
this.surfaceTextureHelper = surfaceTextureHelper;
surfaceTextureHelper.setListener(this);
}
// Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
synchronized (newFrameLock) {
if (textureInfo != null) {
Logging.e(TAG,
"Unexpected onTextureFrameAvailable() called while already holding a texture.");
throw new IllegalStateException("Already holding a texture.");
}
// |timestampNs| is always zero on some Android versions.
textureInfo = new TextureInfo(oesTextureId, transformMatrix);
newFrameLock.notifyAll();
}
}
// Dequeues and returns a TextureInfo if available, or null otherwise.
public TextureInfo dequeueTextureInfo(int timeoutMs) {
synchronized (newFrameLock) {
if (textureInfo == null && timeoutMs > 0) {
try {
newFrameLock.wait(timeoutMs);
} catch(InterruptedException e) {
// Restore the interrupted status by reinterrupting the thread.
Thread.currentThread().interrupt();
}
}
TextureInfo returnedInfo = textureInfo;
textureInfo = null;
return returnedInfo;
}
}
public void release() {
// SurfaceTextureHelper.disconnect() will block until any onTextureFrameAvailable() in
// progress is done. Therefore, the call to disconnect() must be outside any synchronized
// statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
surfaceTextureHelper.disconnect();
synchronized (newFrameLock) {
if (textureInfo != null) {
surfaceTextureHelper.returnTextureFrame();
textureInfo = null;
}
}
}
}
// Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error. // upon codec error.
private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) { private Object dequeueOutputBuffer(int dequeueTimeoutUs)
throws IllegalStateException, MediaCodec.CodecException {
checkOnMediaCodecThread(); checkOnMediaCodecThread();
if (decodeStartTimeMs.isEmpty()) {
return null;
}
// Drain the decoder until receiving a decoded buffer or hitting // Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER. // MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) { while (true) {
final int result = mediaCodec.dequeueOutputBuffer( final int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) { switch (result) {
case MediaCodec.INFO_TRY_AGAIN_LATER:
return null;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers(); outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
if (hasDecodedFirstFrame) {
throw new RuntimeException("Unexpected output buffer change event.");
}
break; break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaCodec.getOutputFormat(); MediaFormat format = mediaCodec.getOutputFormat();
Logging.d(TAG, "Decoder format changed: " + format.toString()); Logging.d(TAG, "Decoder format changed: " + format.toString());
int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
throw new RuntimeException("Unexpected size change. Configured " + width + "*" +
height + ". New " + new_width + "*" + new_height);
}
width = format.getInteger(MediaFormat.KEY_WIDTH); width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT); height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
@ -540,75 +442,18 @@ public class MediaCodecVideoDecoder {
stride = Math.max(width, stride); stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight); sliceHeight = Math.max(height, sliceHeight);
break; break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
return null;
default: default:
hasDecodedFirstFrame = true; // Output buffer decoded.
return new DecodedOutputBuffer(result, info.offset, info.size, info.presentationTimeUs, if (useSurface) {
SystemClock.elapsedRealtime() - decodeStartTimeMs.remove(), mediaCodec.releaseOutputBuffer(result, true /* render */);
SystemClock.elapsedRealtime()); // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture
// frame.
return new DecodedTextureBuffer(textureID, info.presentationTimeUs);
} else {
return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs);
} }
} }
} }
// Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
// upon codec error.
private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
if (!useSurface) {
throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
}
DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
if (outputBuffer != null) {
if (dequeuedSurfaceOutputBuffers.size() >= Math.min(
MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)) {
++droppedFrames;
Logging.w(TAG, "Too many output buffers. Dropping frame. Total number of dropped frames: "
+ droppedFrames);
// Drop the newest frame. Don't drop the oldest since if |isWaitingForTexture|
// releaseOutputBuffer has already been called. Dropping the newest frame will lead to a
// shift of timestamps by one frame in MediaCodecVideoDecoder::DeliverPendingOutputs.
mediaCodec.releaseOutputBuffer(outputBuffer.index, false /* render */);
return new DecodedTextureBuffer(0, null, outputBuffer.presentationTimestampUs,
outputBuffer.decodeTimeMs,
SystemClock.elapsedRealtime() - outputBuffer.endDecodeTimeMs);
}
dequeuedSurfaceOutputBuffers.add(outputBuffer);
}
if (dequeuedSurfaceOutputBuffers.isEmpty()) {
return null;
}
if (!isWaitingForTexture) {
// Get the first frame in the queue and render to the decoder output surface.
mediaCodec.releaseOutputBuffer(dequeuedSurfaceOutputBuffers.peek().index, true /* render */);
isWaitingForTexture = true;
}
// We are waiting for a frame to be rendered to the decoder surface.
// Check if it is ready now by waiting max |dequeueTimeoutMs|. There can only be one frame
// rendered at a time.
TextureListener.TextureInfo info = textureListener.dequeueTextureInfo(dequeueTimeoutMs);
if (info != null) {
isWaitingForTexture = false;
final DecodedOutputBuffer renderedBuffer =
dequeuedSurfaceOutputBuffers.remove();
if (!dequeuedSurfaceOutputBuffers.isEmpty()) {
// Get the next frame in the queue and render to the decoder output surface.
mediaCodec.releaseOutputBuffer(
dequeuedSurfaceOutputBuffers.peek().index, true /* render */);
isWaitingForTexture = true;
}
return new DecodedTextureBuffer(info.textureID, info.transformMatrix,
renderedBuffer.presentationTimestampUs, renderedBuffer.decodeTimeMs,
SystemClock.elapsedRealtime() - renderedBuffer.endDecodeTimeMs);
}
return null;
} }
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
@ -616,11 +461,11 @@ public class MediaCodecVideoDecoder {
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
// MediaCodec.CodecException upon codec error. // MediaCodec.CodecException upon codec error.
private void returnDecodedOutputBuffer(int index) private void returnDecodedByteBuffer(int index)
throws IllegalStateException, MediaCodec.CodecException { throws IllegalStateException, MediaCodec.CodecException {
checkOnMediaCodecThread(); checkOnMediaCodecThread();
if (useSurface) { if (useSurface) {
throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding."); throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding.");
} }
mediaCodec.releaseOutputBuffer(index, false /* render */); mediaCodec.releaseOutputBuffer(index, false /* render */);
} }

View File

@ -46,11 +46,7 @@ public class VideoRenderer {
public final int[] yuvStrides; public final int[] yuvStrides;
public ByteBuffer[] yuvPlanes; public ByteBuffer[] yuvPlanes;
public final boolean yuvFrame; public final boolean yuvFrame;
// Matrix that transforms standard coordinates to their proper sampling locations in public Object textureObject;
// the texture. This transform compensates for any properties of the video source that
// cause it to appear different from a normalized texture. This matrix does not take
// |rotationDegree| into account.
public final float[] samplingMatrix;
public int textureId; public int textureId;
// Frame pointer in C++. // Frame pointer in C++.
private long nativeFramePointer; private long nativeFramePointer;
@ -74,27 +70,19 @@ public class VideoRenderer {
if (rotationDegree % 90 != 0) { if (rotationDegree % 90 != 0) {
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree); throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
} }
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
// matrix.
samplingMatrix = new float[] {
1, 0, 0, 0,
0, -1, 0, 0,
0, 0, 1, 0,
0, 1, 0, 1};
} }
/** /**
* Construct a texture frame of the given dimensions with data in SurfaceTexture * Construct a texture frame of the given dimensions with data in SurfaceTexture
*/ */
I420Frame(int width, int height, int rotationDegree, int textureId, float[] samplingMatrix, I420Frame(
long nativeFramePointer) { int width, int height, int rotationDegree,
Object textureObject, int textureId, long nativeFramePointer) {
this.width = width; this.width = width;
this.height = height; this.height = height;
this.yuvStrides = null; this.yuvStrides = null;
this.yuvPlanes = null; this.yuvPlanes = null;
this.samplingMatrix = samplingMatrix; this.textureObject = textureObject;
this.textureId = textureId; this.textureId = textureId;
this.yuvFrame = false; this.yuvFrame = false;
this.rotationDegree = rotationDegree; this.rotationDegree = rotationDegree;
@ -137,6 +125,7 @@ public class VideoRenderer {
*/ */
public static void renderFrameDone(I420Frame frame) { public static void renderFrameDone(I420Frame frame) {
frame.yuvPlanes = null; frame.yuvPlanes = null;
frame.textureObject = null;
frame.textureId = 0; frame.textureId = 0;
if (frame.nativeFramePointer != 0) { if (frame.nativeFramePointer != 0) {
releaseNativeFrame(frame.nativeFramePointer); releaseNativeFrame(frame.nativeFramePointer);