diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc index a5a25f00cf..f859410acb 100644 --- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc +++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc @@ -134,7 +134,7 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, jmethodID j_dequeue_input_buffer_method_; jmethodID j_queue_input_buffer_method_; jmethodID j_dequeue_output_buffer_method_; - jmethodID j_release_output_buffer_method_; + jmethodID j_return_decoded_byte_buffer_method_; // MediaCodecVideoDecoder fields. jfieldID j_input_buffers_field_; jfieldID j_output_buffers_field_; @@ -144,8 +144,10 @@ class MediaCodecVideoDecoder : public webrtc::VideoDecoder, jfieldID j_stride_field_; jfieldID j_slice_height_field_; jfieldID j_surface_texture_field_; + // MediaCodecVideoDecoder.DecodedTextureBuffer fields. jfieldID j_textureID_field_; - // MediaCodecVideoDecoder.DecoderOutputBufferInfo fields. + jfieldID j_texture_presentation_timestamp_us_field_; + // MediaCodecVideoDecoder.DecodedByteBuffer fields. jfieldID j_info_index_field_; jfieldID j_info_offset_field_; jfieldID j_info_size_field_; @@ -197,9 +199,10 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z"); j_dequeue_output_buffer_method_ = GetMethodID( jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", - "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;"); - j_release_output_buffer_method_ = GetMethodID( - jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(I)V"); + "(I)Ljava/lang/Object;"); + j_return_decoded_byte_buffer_method_ = + GetMethodID(jni, *j_media_codec_video_decoder_class_, + "returnDecodedByteBuffer", "(I)V"); j_input_buffers_field_ = GetFieldID( jni, *j_media_codec_video_decoder_class_, @@ -217,22 +220,28 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder( jni, *j_media_codec_video_decoder_class_, "stride", "I"); j_slice_height_field_ = GetFieldID( jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); - j_textureID_field_ = GetFieldID( - jni, *j_media_codec_video_decoder_class_, "textureID", "I"); j_surface_texture_field_ = GetFieldID( jni, *j_media_codec_video_decoder_class_, "surfaceTexture", "Landroid/graphics/SurfaceTexture;"); - jclass j_decoder_output_buffer_info_class = FindClass(jni, - "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo"); + jclass j_decoder_decoded_texture_buffer_class = FindClass(jni, + "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); + j_textureID_field_ = GetFieldID( + jni, j_decoder_decoded_texture_buffer_class, "textureID", "I"); + j_texture_presentation_timestamp_us_field_ = + GetFieldID(jni, j_decoder_decoded_texture_buffer_class, + "presentationTimestampUs", "J"); + + jclass j_decoder_decoded_byte_buffer_class = FindClass(jni, + "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); j_info_index_field_ = GetFieldID( - jni, j_decoder_output_buffer_info_class, "index", "I"); + jni, j_decoder_decoded_byte_buffer_class, "index", "I"); j_info_offset_field_ = GetFieldID( - jni, j_decoder_output_buffer_info_class, "offset", "I"); + jni, j_decoder_decoded_byte_buffer_class, "offset", "I"); j_info_size_field_ = GetFieldID( - jni, j_decoder_output_buffer_info_class, "size", "I"); + jni, j_decoder_decoded_byte_buffer_class, "size", "I"); j_info_presentation_timestamp_us_field_ = GetFieldID( - jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J"); + jni, j_decoder_decoded_byte_buffer_class, "presentationTimestampUs", "J"); CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; use_surface_ = (render_egl_context_ != NULL); @@ -559,31 +568,19 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( return true; } // Get decoder output. - jobject j_decoder_output_buffer_info = jni->CallObjectMethod( + jobject j_decoder_output_buffer = jni->CallObjectMethod( *j_media_codec_video_decoder_, j_dequeue_output_buffer_method_, dequeue_timeout_us); if (CheckException(jni)) { + ALOGE("dequeueOutputBuffer() error"); return false; } - if (IsNull(jni, j_decoder_output_buffer_info)) { + if (IsNull(jni, j_decoder_output_buffer)) { + // No decoded frame ready. return true; } - // Extract output buffer info from Java DecoderOutputBufferInfo. - int output_buffer_index = - GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_); - RTC_CHECK_GE(output_buffer_index, 0); - int output_buffer_offset = - GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_); - int output_buffer_size = - GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_); - long output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer_info, - j_info_presentation_timestamp_us_field_) / rtc::kNumMicrosecsPerMillisec; - if (CheckException(jni)) { - return false; - } - // Get decoded video frame properties. int color_format = GetIntField(jni, *j_media_codec_video_decoder_, j_color_format_field_); @@ -592,17 +589,34 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); int slice_height = GetIntField(jni, *j_media_codec_video_decoder_, j_slice_height_field_); - int texture_id = GetIntField(jni, *j_media_codec_video_decoder_, - j_textureID_field_); rtc::scoped_refptr frame_buffer; + long output_timestamps_ms = 0; if (use_surface_) { + // Extract data from Java DecodedTextureBuffer. + const int texture_id = + GetIntField(jni, j_decoder_output_buffer, j_textureID_field_); + const int64_t timestamp_us = + GetLongField(jni, j_decoder_output_buffer, + j_texture_presentation_timestamp_us_field_); + output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; + // Create webrtc::VideoFrameBuffer with native texture handle. native_handle_.SetTextureObject(surface_texture_, texture_id); frame_buffer = new rtc::RefCountedObject( &native_handle_, width, height); } else { // Extract data from Java ByteBuffer and create output yuv420 frame - // for non surface decoding only. + const int output_buffer_index = + GetIntField(jni, j_decoder_output_buffer, j_info_index_field_); + const int output_buffer_offset = + GetIntField(jni, j_decoder_output_buffer, j_info_offset_field_); + const int output_buffer_size = + GetIntField(jni, j_decoder_output_buffer, j_info_size_field_); + const int64_t timestamp_us = GetLongField( + jni, j_decoder_output_buffer, j_info_presentation_timestamp_us_field_); + output_timestamps_ms = timestamp_us / rtc::kNumMicrosecsPerMillisec; + if (output_buffer_size < width * height * 3 / 2) { ALOGE("Insufficient output buffer size: %d", output_buffer_size); return false; @@ -653,6 +667,15 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( frame_buffer->stride(webrtc::kVPlane), width, height); } + // Return output byte buffer back to codec. + jni->CallVoidMethod( + *j_media_codec_video_decoder_, + j_return_decoded_byte_buffer_method_, + output_buffer_index); + if (CheckException(jni)) { + ALOGE("returnDecodedByteBuffer error"); + return false; + } } VideoFrame decoded_frame(frame_buffer, 0, 0, webrtc::kVideoRotation_0); @@ -674,16 +697,6 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( " DecTime: %lld", frames_decoded_, width, height, stride, slice_height, color_format, output_timestamps_ms, frame_decoding_time_ms); - // Return output buffer back to codec. - jni->CallVoidMethod( - *j_media_codec_video_decoder_, - j_release_output_buffer_method_, - output_buffer_index); - if (CheckException(jni)) { - ALOGE("releaseOutputBuffer error"); - return false; - } - // Calculate and print decoding statistics - every 3 seconds. frames_decoded_++; current_frames_++; diff --git a/talk/app/webrtc/java/jni/classreferenceholder.cc b/talk/app/webrtc/java/jni/classreferenceholder.cc index bc4339deab..426e76b719 100644 --- a/talk/app/webrtc/java/jni/classreferenceholder.cc +++ b/talk/app/webrtc/java/jni/classreferenceholder.cc @@ -81,7 +81,8 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) { LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder"); - LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo"); + LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); + LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedByteBuffer"); LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType"); LoadClass(jni, "org/webrtc/SurfaceTextureHelper"); jclass j_egl_base_class = GetClass("org/webrtc/EglBase"); diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java index 7221a36190..ef2055645d 100644 --- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java +++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java @@ -300,10 +300,9 @@ public class MediaCodecVideoDecoder { } } - // Helper struct for dequeueOutputBuffer() below. - private static class DecoderOutputBufferInfo { - public DecoderOutputBufferInfo( - int index, int offset, int size, long presentationTimestampUs) { + // Helper structs for dequeueOutputBuffer() below. + private static class DecodedByteBuffer { + public DecodedByteBuffer(int index, int offset, int size, long presentationTimestampUs) { this.index = index; this.offset = offset; this.size = size; @@ -316,11 +315,22 @@ public class MediaCodecVideoDecoder { private final long presentationTimestampUs; } - // Dequeue and return a DecoderOutputBufferInfo, or null if no decoded buffer is ready. + private static class DecodedTextureBuffer { + private final int textureID; + private final long presentationTimestampUs; + + public DecodedTextureBuffer(int textureID, long presentationTimestampUs) { + this.textureID = textureID; + this.presentationTimestampUs = presentationTimestampUs; + } + } + + // Returns null if no decoded buffer is available, and otherwise either a DecodedByteBuffer or + // DecodedTexturebuffer depending on |useSurface| configuration. // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException // upon codec error. - private DecoderOutputBufferInfo dequeueOutputBuffer(int dequeueTimeoutUs) + private Object dequeueOutputBuffer(int dequeueTimeoutUs) throws IllegalStateException, MediaCodec.CodecException { checkOnMediaCodecThread(); // Drain the decoder until receiving a decoded buffer or hitting @@ -359,18 +369,29 @@ public class MediaCodecVideoDecoder { break; default: // Output buffer decoded. - return new DecoderOutputBufferInfo( - result, info.offset, info.size, info.presentationTimeUs); + if (useSurface) { + mediaCodec.releaseOutputBuffer(result, true /* render */); + // TODO(magjed): Wait for SurfaceTexture.onFrameAvailable() before returning a texture + // frame. + return new DecodedTextureBuffer(textureID, info.presentationTimeUs); + } else { + return new DecodedByteBuffer(result, info.offset, info.size, info.presentationTimeUs); + } } } } - // Release a dequeued output buffer back to the codec for re-use. - // Throws IllegalStateException if the call is made on the wrong thread or if |mediaCodec| is not - // in the Executing state. Throws MediaCodec.CodecException upon codec error. - private void releaseOutputBuffer(int index) + // Release a dequeued output byte buffer back to the codec for re-use. Should only be called for + // non-surface decoding. + // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured + // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws + // MediaCodec.CodecException upon codec error. + private void returnDecodedByteBuffer(int index) throws IllegalStateException, MediaCodec.CodecException { checkOnMediaCodecThread(); - mediaCodec.releaseOutputBuffer(index, useSurface); + if (useSurface) { + throw new IllegalStateException("returnDecodedByteBuffer() called for surface decoding."); + } + mediaCodec.releaseOutputBuffer(index, false /* render */); } }