diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index f75cfdb072..f205c08c23 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -124,6 +124,8 @@ rtc_static_library("null_audio_jni") { generate_jni("generated_video_jni") { sources = [ "api/org/webrtc/EncodedImage.java", + "api/org/webrtc/MediaCodecVideoDecoder.java", + "api/org/webrtc/MediaCodecVideoEncoder.java", "api/org/webrtc/SurfaceTextureHelper.java", "api/org/webrtc/VideoCodecStatus.java", "api/org/webrtc/VideoDecoder.java", @@ -609,6 +611,7 @@ rtc_android_library("libjingle_peerconnection_java") { "src/java/org/webrtc/BaseBitrateAdjuster.java", "src/java/org/webrtc/BitrateAdjuster.java", "src/java/org/webrtc/CalledByNative.java", + "src/java/org/webrtc/CalledByNativeUnchecked.java", "src/java/org/webrtc/Camera1Session.java", "src/java/org/webrtc/Camera2Session.java", "src/java/org/webrtc/CameraCapturer.java", diff --git a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java b/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java index 82957edd8e..53308717ae 100644 --- a/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java +++ b/sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java @@ -49,7 +49,16 @@ public class MediaCodecVideoDecoder { private static final String FORMAT_KEY_CROP_BOTTOM = "crop-bottom"; // Tracks webrtc::VideoCodecType. - public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 } + public enum VideoCodecType { + VIDEO_CODEC_VP8, + VIDEO_CODEC_VP9, + VIDEO_CODEC_H264; + + @CalledByNative("VideoCodecType") + static VideoCodecType fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } // Timeout for input buffer dequeue. private static final int DEQUEUE_INPUT_TIMEOUT = 500000; @@ -144,21 +153,25 @@ public class MediaCodecVideoDecoder { } // Functions to query if HW decoding is supported. + @CalledByNativeUnchecked public static boolean isVp8HwSupported() { return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE) && (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null); } + @CalledByNativeUnchecked public static boolean isVp9HwSupported() { return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE) && (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null); } + @CalledByNativeUnchecked public static boolean isH264HwSupported() { return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE) && (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null); } + @CalledByNative public static boolean isH264HighProfileHwSupported() { if (hwDecoderDisabledTypes.contains(H264_MIME_TYPE)) { return false; @@ -265,6 +278,9 @@ public class MediaCodecVideoDecoder { return null; // No HW decoder. } + @CalledByNative + MediaCodecVideoDecoder() {} + private void checkOnMediaCodecThread() throws IllegalStateException { if (mediaCodecThread.getId() != Thread.currentThread().getId()) { throw new IllegalStateException("MediaCodecVideoDecoder previously operated on " @@ -273,6 +289,7 @@ public class MediaCodecVideoDecoder { } // Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output. + @CalledByNativeUnchecked private boolean initDecode( VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) { if (mediaCodecThread != null) { @@ -346,6 +363,7 @@ public class MediaCodecVideoDecoder { // Resets the decoder so it can start decoding frames with new resolution. // Flushes MediaCodec and clears decoder output buffers. + @CalledByNativeUnchecked private void reset(int width, int height) { if (mediaCodecThread == null || mediaCodec == null) { throw new RuntimeException("Incorrect reset call for non-initialized decoder."); @@ -362,6 +380,7 @@ public class MediaCodecVideoDecoder { droppedFrames = 0; } + @CalledByNativeUnchecked private void release() { Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames); checkOnMediaCodecThread(); @@ -408,6 +427,7 @@ public class MediaCodecVideoDecoder { // Dequeue an input buffer and return its index, -1 if no input buffer is // available, or -2 if the codec is no longer operative. + @CalledByNativeUnchecked private int dequeueInputBuffer() { checkOnMediaCodecThread(); try { @@ -418,6 +438,7 @@ public class MediaCodecVideoDecoder { } } + @CalledByNativeUnchecked private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs, long timeStampMs, long ntpTimeStamp) { checkOnMediaCodecThread(); @@ -475,6 +496,41 @@ public class MediaCodecVideoDecoder { private final long decodeTimeMs; // System time when this frame decoding finished. private final long endDecodeTimeMs; + + @CalledByNative("DecodedOutputBuffer") + int getIndex() { + return index; + } + + @CalledByNative("DecodedOutputBuffer") + int getOffset() { + return offset; + } + + @CalledByNative("DecodedOutputBuffer") + int getSize() { + return size; + } + + @CalledByNative("DecodedOutputBuffer") + long getPresentationTimestampMs() { + return presentationTimeStampMs; + } + + @CalledByNative("DecodedOutputBuffer") + long getTimestampMs() { + return timeStampMs; + } + + @CalledByNative("DecodedOutputBuffer") + long getNtpTimestampMs() { + return ntpTimeStampMs; + } + + @CalledByNative("DecodedOutputBuffer") + long getDecodeTimeMs() { + return decodeTimeMs; + } } // Helper struct for dequeueTextureBuffer() below. @@ -508,6 +564,41 @@ public class MediaCodecVideoDecoder { this.decodeTimeMs = decodeTimeMs; this.frameDelayMs = frameDelay; } + + @CalledByNative("DecodedTextureBuffer") + int getTextureId() { + return textureID; + } + + @CalledByNative("DecodedTextureBuffer") + float[] getTransformMatrix() { + return transformMatrix; + } + + @CalledByNative("DecodedTextureBuffer") + long getPresentationTimestampMs() { + return presentationTimeStampMs; + } + + @CalledByNative("DecodedTextureBuffer") + long getTimeStampMs() { + return timeStampMs; + } + + @CalledByNative("DecodedTextureBuffer") + long getNtpTimestampMs() { + return ntpTimeStampMs; + } + + @CalledByNative("DecodedTextureBuffer") + long getDecodeTimeMs() { + return decodeTimeMs; + } + + @CalledByNative("DecodedTextureBuffer") + long getFrameDelayMs() { + return frameDelayMs; + } } // Poll based texture listener. @@ -596,6 +687,7 @@ public class MediaCodecVideoDecoder { // Throws IllegalStateException if call is made on the wrong thread, if color format changes to an // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException // upon codec error. + @CalledByNativeUnchecked private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) { checkOnMediaCodecThread(); if (decodeStartTimeMs.isEmpty()) { @@ -679,6 +771,7 @@ public class MediaCodecVideoDecoder { // unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException // upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if // a frame can't be returned. + @CalledByNativeUnchecked private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) { checkOnMediaCodecThread(); if (!useSurface) { @@ -740,6 +833,7 @@ public class MediaCodecVideoDecoder { // Throws IllegalStateException if the call is made on the wrong thread, if codec is configured // for surface decoding, or if |mediaCodec| is not in the Executing state. Throws // MediaCodec.CodecException upon codec error. + @CalledByNativeUnchecked private void returnDecodedOutputBuffer(int index) throws IllegalStateException, MediaCodec.CodecException { checkOnMediaCodecThread(); @@ -748,4 +842,39 @@ public class MediaCodecVideoDecoder { } mediaCodec.releaseOutputBuffer(index, false /* render */); } + + @CalledByNative + ByteBuffer[] getInputBuffers() { + return inputBuffers; + } + + @CalledByNative + ByteBuffer[] getOutputBuffers() { + return outputBuffers; + } + + @CalledByNative + int getColorFormat() { + return colorFormat; + } + + @CalledByNative + int getWidth() { + return width; + } + + @CalledByNative + int getHeight() { + return height; + } + + @CalledByNative + int getStride() { + return stride; + } + + @CalledByNative + int getSliceHeight() { + return sliceHeight; + } } diff --git a/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java b/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java index b009893ba7..054930cafa 100644 --- a/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java +++ b/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java @@ -29,6 +29,8 @@ import java.util.List; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import org.webrtc.EglBase14; +import org.webrtc.VideoFrame; // Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder. // This class is an implementation detail of the Java PeerConnection API. @@ -43,7 +45,16 @@ public class MediaCodecVideoEncoder { private static final String TAG = "MediaCodecVideoEncoder"; // Tracks webrtc::VideoCodecType. - public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 } + public enum VideoCodecType { + VIDEO_CODEC_VP8, + VIDEO_CODEC_VP9, + VIDEO_CODEC_H264; + + @CalledByNative("VideoCodecType") + static VideoCodecType fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing. private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait. @@ -193,7 +204,7 @@ public class MediaCodecVideoEncoder { COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m}; private static final int[] supportedSurfaceColorList = {CodecCapabilities.COLOR_FormatSurface}; private VideoCodecType type; - private int colorFormat; // Used by native code. + private int colorFormat; // Variables used for dynamic bitrate adjustment. private BitrateAdjustmentType bitrateAdjustmentType = BitrateAdjustmentType.NO_ADJUSTMENT; @@ -242,6 +253,7 @@ public class MediaCodecVideoEncoder { } // Functions to query if HW encoding is supported. + @CalledByNative public static boolean isVp8HwSupported() { return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE) && (findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedColorList) != null); @@ -255,11 +267,13 @@ public class MediaCodecVideoEncoder { } } + @CalledByNative public static boolean isVp9HwSupported() { return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE) && (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null); } + @CalledByNative public static boolean isH264HwSupported() { return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE) && (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null); @@ -387,6 +401,9 @@ public class MediaCodecVideoEncoder { return null; // No HW encoder. } + @CalledByNative + MediaCodecVideoEncoder() {} + private void checkOnMediaCodecThread() { if (mediaCodecThread.getId() != Thread.currentThread().getId()) { throw new RuntimeException("MediaCodecVideoEncoder previously operated on " + mediaCodecThread @@ -416,6 +433,7 @@ public class MediaCodecVideoEncoder { } } + @CalledByNativeUnchecked boolean initEncode(VideoCodecType type, int profile, int width, int height, int kbps, int fps, EglBase14.Context sharedContext) { final boolean useSurface = sharedContext != null; @@ -535,6 +553,7 @@ public class MediaCodecVideoEncoder { return true; } + @CalledByNativeUnchecked ByteBuffer[] getInputBuffers() { ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers(); Logging.d(TAG, "Input buffers: " + inputBuffers.length); @@ -568,6 +587,7 @@ public class MediaCodecVideoEncoder { } } + @CalledByNativeUnchecked boolean encodeBuffer( boolean isKeyframe, int inputBuffer, int size, long presentationTimestampUs) { checkOnMediaCodecThread(); @@ -581,6 +601,7 @@ public class MediaCodecVideoEncoder { } } + @CalledByNativeUnchecked boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix, long presentationTimestampUs) { checkOnMediaCodecThread(); @@ -600,9 +621,9 @@ public class MediaCodecVideoEncoder { } /** - * Encodes a new style VideoFrame. Called by JNI. |bufferIndex| is -1 if we are not encoding in - * surface mode. + * Encodes a new style VideoFrame. |bufferIndex| is -1 if we are not encoding in surface mode. */ + @CalledByNativeUnchecked boolean encodeFrame(long nativeEncoder, boolean isKeyframe, VideoFrame frame, int bufferIndex) { checkOnMediaCodecThread(); try { @@ -637,7 +658,7 @@ public class MediaCodecVideoEncoder { if (dataV.capacity() < strideV * chromaHeight) { throw new RuntimeException("V-plane buffer size too small."); } - nativeFillBuffer( + fillNativeBuffer( nativeEncoder, bufferIndex, dataY, strideY, dataU, strideU, dataV, strideV); i420Buffer.release(); // I420 consists of one full-resolution and two half-resolution planes. @@ -652,6 +673,7 @@ public class MediaCodecVideoEncoder { } } + @CalledByNativeUnchecked void release() { Logging.d(TAG, "Java releaseEncoder"); checkOnMediaCodecThread(); @@ -733,6 +755,7 @@ public class MediaCodecVideoEncoder { Logging.d(TAG, "Java releaseEncoder done"); } + @CalledByNativeUnchecked private boolean setRates(int kbps, int frameRate) { checkOnMediaCodecThread(); @@ -775,6 +798,7 @@ public class MediaCodecVideoEncoder { // Dequeue an input buffer and return its index, -1 if no input buffer is // available, or -2 if the codec is no longer operative. + @CalledByNativeUnchecked int dequeueInputBuffer() { checkOnMediaCodecThread(); try { @@ -799,10 +823,31 @@ public class MediaCodecVideoEncoder { public final ByteBuffer buffer; public final boolean isKeyFrame; public final long presentationTimestampUs; + + @CalledByNative("OutputBufferInfo") + int getIndex() { + return index; + } + + @CalledByNative("OutputBufferInfo") + ByteBuffer getBuffer() { + return buffer; + } + + @CalledByNative("OutputBufferInfo") + boolean isKeyFrame() { + return isKeyFrame; + } + + @CalledByNative("OutputBufferInfo") + long getPresentationTimestampUs() { + return presentationTimestampUs; + } } // Dequeue and return an output buffer, or null if no output is ready. Return // a fake OutputBufferInfo with index -1 if the codec is no longer operable. + @CalledByNativeUnchecked OutputBufferInfo dequeueOutputBuffer() { checkOnMediaCodecThread(); try { @@ -925,6 +970,7 @@ public class MediaCodecVideoEncoder { // Release a dequeued output buffer back to the codec for re-use. Return // false if the codec is no longer operable. + @CalledByNativeUnchecked boolean releaseOutputBuffer(int index) { checkOnMediaCodecThread(); try { @@ -936,7 +982,17 @@ public class MediaCodecVideoEncoder { } } + @CalledByNative + int getColorFormat() { + return colorFormat; + } + + @CalledByNative + static boolean isTextureBuffer(VideoFrame.Buffer buffer) { + return buffer instanceof VideoFrame.TextureBuffer; + } + /** Fills an inputBuffer with the given index with data from the byte buffers. */ - private static native void nativeFillBuffer(long nativeEncoder, int inputBuffer, ByteBuffer dataY, + private static native void fillNativeBuffer(long nativeEncoder, int inputBuffer, ByteBuffer dataY, int strideY, ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV); } diff --git a/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java b/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java new file mode 100644 index 0000000000..8a00a7fadb --- /dev/null +++ b/sdk/android/src/java/org/webrtc/CalledByNativeUnchecked.java @@ -0,0 +1,33 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * @CalledByNativeUnchecked is used to generate JNI bindings that do not check for exceptions. + * It only makes sense to use this annotation on methods that declare a throws... spec. + * However, note that the exception received native side maybe an 'unchecked' (RuntimeExpception) + * such as NullPointerException, so the native code should differentiate these cases. + * Usage of this should be very rare; where possible handle exceptions in the Java side and use a + * return value to indicate success / failure. + */ +@Target(ElementType.METHOD) +@Retention(RetentionPolicy.CLASS) +public @interface CalledByNativeUnchecked { + /* + * If present, tells which inner class the method belongs to. + */ + public String value() default ""; +} diff --git a/sdk/android/src/jni/androidmediadecoder_jni.cc b/sdk/android/src/jni/androidmediadecoder_jni.cc index fb8b0993d9..59e63250d7 100644 --- a/sdk/android/src/jni/androidmediadecoder_jni.cc +++ b/sdk/android/src/jni/androidmediadecoder_jni.cc @@ -27,8 +27,8 @@ #include "rtc_base/scoped_ref_ptr.h" #include "rtc_base/thread.h" #include "rtc_base/timeutils.h" +#include "sdk/android/generated_video_jni/jni/MediaCodecVideoDecoder_jni.h" #include "sdk/android/src/jni/androidmediacodeccommon.h" -#include "sdk/android/src/jni/classreferenceholder.h" #include "sdk/android/src/jni/surfacetexturehelper_jni.h" #include "sdk/android/src/jni/videoframe.h" #include "third_party/libyuv/include/libyuv/convert.h" @@ -130,43 +130,10 @@ class MediaCodecVideoDecoder : public VideoDecoder, public rtc::MessageHandler { // returns. std::unique_ptr codec_thread_; // Thread on which to operate MediaCodec. - ScopedGlobalRef j_media_codec_video_decoder_class_; ScopedGlobalRef j_media_codec_video_decoder_; - jmethodID j_init_decode_method_; - jmethodID j_reset_method_; - jmethodID j_release_method_; - jmethodID j_dequeue_input_buffer_method_; - jmethodID j_queue_input_buffer_method_; - jmethodID j_dequeue_byte_buffer_method_; - jmethodID j_dequeue_texture_buffer_method_; - jmethodID j_return_decoded_byte_buffer_method_; - // MediaCodecVideoDecoder fields. - jfieldID j_input_buffers_field_; - jfieldID j_output_buffers_field_; - jfieldID j_color_format_field_; - jfieldID j_width_field_; - jfieldID j_height_field_; - jfieldID j_stride_field_; - jfieldID j_slice_height_field_; - // MediaCodecVideoDecoder.DecodedTextureBuffer fields. - jfieldID j_texture_id_field_; - jfieldID j_transform_matrix_field_; - jfieldID j_texture_presentation_timestamp_ms_field_; - jfieldID j_texture_timestamp_ms_field_; - jfieldID j_texture_ntp_timestamp_ms_field_; - jfieldID j_texture_decode_time_ms_field_; - jfieldID j_texture_frame_delay_ms_field_; - // MediaCodecVideoDecoder.DecodedOutputBuffer fields. - jfieldID j_info_index_field_; - jfieldID j_info_offset_field_; - jfieldID j_info_size_field_; - jfieldID j_presentation_timestamp_ms_field_; - jfieldID j_timestamp_ms_field_; - jfieldID j_ntp_timestamp_ms_field_; - jfieldID j_byte_buffer_decode_time_ms_field_; // Global references; must be deleted in Release(). - std::vector input_buffers_; + std::vector> input_buffers_; }; MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni, @@ -178,93 +145,12 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni, inited_(false), sw_fallback_required_(false), codec_thread_(Thread::Create()), - j_media_codec_video_decoder_class_( - jni, - FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")), j_media_codec_video_decoder_( jni, - jni->NewObject(*j_media_codec_video_decoder_class_, - GetMethodID(jni, - *j_media_codec_video_decoder_class_, - "", - "()V"))) { + Java_MediaCodecVideoDecoder_Constructor(jni)) { codec_thread_->SetName("MediaCodecVideoDecoder", NULL); RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder"; - j_init_decode_method_ = GetMethodID( - jni, *j_media_codec_video_decoder_class_, "initDecode", - "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;" - "IILorg/webrtc/SurfaceTextureHelper;)Z"); - j_reset_method_ = - GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V"); - j_release_method_ = - GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V"); - j_dequeue_input_buffer_method_ = GetMethodID( - jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I"); - j_queue_input_buffer_method_ = GetMethodID( - jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z"); - j_dequeue_byte_buffer_method_ = GetMethodID( - jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", - "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;"); - j_dequeue_texture_buffer_method_ = GetMethodID( - jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer", - "(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;"); - j_return_decoded_byte_buffer_method_ = - GetMethodID(jni, *j_media_codec_video_decoder_class_, - "returnDecodedOutputBuffer", "(I)V"); - - j_input_buffers_field_ = GetFieldID( - jni, *j_media_codec_video_decoder_class_, - "inputBuffers", "[Ljava/nio/ByteBuffer;"); - j_output_buffers_field_ = GetFieldID( - jni, *j_media_codec_video_decoder_class_, - "outputBuffers", "[Ljava/nio/ByteBuffer;"); - j_color_format_field_ = GetFieldID( - jni, *j_media_codec_video_decoder_class_, "colorFormat", "I"); - j_width_field_ = GetFieldID( - jni, *j_media_codec_video_decoder_class_, "width", "I"); - j_height_field_ = GetFieldID( - jni, *j_media_codec_video_decoder_class_, "height", "I"); - j_stride_field_ = GetFieldID( - jni, *j_media_codec_video_decoder_class_, "stride", "I"); - j_slice_height_field_ = GetFieldID( - jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I"); - - jclass j_decoded_texture_buffer_class = FindClass(jni, - "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); - j_texture_id_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "textureID", "I"); - j_transform_matrix_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "transformMatrix", "[F"); - j_texture_presentation_timestamp_ms_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "presentationTimeStampMs", "J"); - j_texture_timestamp_ms_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "timeStampMs", "J"); - j_texture_ntp_timestamp_ms_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J"); - j_texture_decode_time_ms_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J"); - j_texture_frame_delay_ms_field_ = GetFieldID( - jni, j_decoded_texture_buffer_class, "frameDelayMs", "J"); - - jclass j_decoded_output_buffer_class = FindClass(jni, - "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); - j_info_index_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "index", "I"); - j_info_offset_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "offset", "I"); - j_info_size_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "size", "I"); - j_presentation_timestamp_ms_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "presentationTimeStampMs", "J"); - j_timestamp_ms_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "timeStampMs", "J"); - j_ntp_timestamp_ms_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J"); - j_byte_buffer_decode_time_ms_field_ = GetFieldID( - jni, j_decoded_output_buffer_class, "decodeTimeMs", "J"); - - CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed"; use_surface_ = (render_egl_context_ != NULL); ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_; memset(&codec_, 0, sizeof(codec_)); @@ -347,13 +233,10 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { } } - jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( - jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_); - bool success = jni->CallBooleanMethod( - *j_media_codec_video_decoder_, - j_init_decode_method_, - j_video_codec_enum, - codec_.width, + jobject j_video_codec_enum = + Java_VideoCodecType_fromNativeIndex(jni, codecType_); + bool success = Java_MediaCodecVideoDecoder_initDecode( + jni, *j_media_codec_video_decoder_, j_video_codec_enum, codec_.width, codec_.height, use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper() : nullptr); @@ -380,19 +263,11 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() { } ALOGD << "Maximum amount of pending frames: " << max_pending_frames_; - jobjectArray input_buffers = (jobjectArray)GetObjectField( - jni, *j_media_codec_video_decoder_, j_input_buffers_field_); - size_t num_input_buffers = jni->GetArrayLength(input_buffers); - input_buffers_.resize(num_input_buffers); - for (size_t i = 0; i < num_input_buffers; ++i) { - input_buffers_[i] = - jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); - if (CheckException(jni)) { - ALOGE << "NewGlobalRef error - fallback to SW codec."; - sw_fallback_required_ = true; - return WEBRTC_VIDEO_CODEC_ERROR; - } - } + jobjectArray input_buffers = Java_MediaCodecVideoDecoder_getInputBuffers( + jni, *j_media_codec_video_decoder_); + input_buffers_ = JavaToNativeVector>( + jni, input_buffers, + [](JNIEnv* env, jobject o) { return ScopedGlobalRef(env, o); }); codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this); @@ -412,11 +287,8 @@ int32_t MediaCodecVideoDecoder::ResetDecodeOnCodecThread() { rtc::MessageQueueManager::Clear(this); ResetVariables(); - jni->CallVoidMethod( - *j_media_codec_video_decoder_, - j_reset_method_, - codec_.width, - codec_.height); + Java_MediaCodecVideoDecoder_reset(jni, *j_media_codec_video_decoder_, + codec_.width, codec_.height); if (CheckException(jni)) { ALOGE << "Soft reset error - fallback to SW codec."; @@ -445,11 +317,8 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() { ALOGD << "DecoderReleaseOnCodecThread: Frames received: " << frames_received_ << ". Frames decoded: " << frames_decoded_; ScopedLocalRefFrame local_ref_frame(jni); - for (size_t i = 0; i < input_buffers_.size(); i++) { - jni->DeleteGlobalRef(input_buffers_[i]); - } input_buffers_.clear(); - jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_); + Java_MediaCodecVideoDecoder_release(jni, *j_media_codec_video_decoder_); surface_texture_helper_ = nullptr; inited_ = false; rtc::MessageQueueManager::Clear(this); @@ -600,8 +469,8 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( } // Get input buffer. - int j_input_buffer_index = jni->CallIntMethod( - *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); + int j_input_buffer_index = Java_MediaCodecVideoDecoder_dequeueInputBuffer( + jni, *j_media_codec_video_decoder_); if (CheckException(jni) || j_input_buffer_index < 0) { ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index << ". Retry DeliverPendingOutputs."; @@ -613,8 +482,8 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( return ProcessHWErrorOnCodecThread(); } // Try dequeue input buffer one last time. - j_input_buffer_index = jni->CallIntMethod( - *j_media_codec_video_decoder_, j_dequeue_input_buffer_method_); + j_input_buffer_index = Java_MediaCodecVideoDecoder_dequeueInputBuffer( + jni, *j_media_codec_video_decoder_); if (CheckException(jni) || j_input_buffer_index < 0) { ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index; return ProcessHWErrorOnCodecThread(); @@ -622,7 +491,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( } // Copy encoded data to Java ByteBuffer. - jobject j_input_buffer = input_buffers_[j_input_buffer_index]; + jobject j_input_buffer = *input_buffers_[j_input_buffer_index]; uint8_t* buffer = reinterpret_cast(jni->GetDirectBufferAddress(j_input_buffer)); RTC_CHECK(buffer) << "Indirect buffer??"; @@ -664,14 +533,10 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread( pending_frame_qps_.push_back(qp); // Feed input to decoder. - bool success = jni->CallBooleanMethod( - *j_media_codec_video_decoder_, - j_queue_input_buffer_method_, - j_input_buffer_index, - inputImage._length, - presentation_timestamp_us, - static_cast (inputImage._timeStamp), - inputImage.ntp_time_ms_); + bool success = Java_MediaCodecVideoDecoder_queueInputBuffer( + jni, *j_media_codec_video_decoder_, j_input_buffer_index, + inputImage._length, presentation_timestamp_us, + static_cast(inputImage._timeStamp), inputImage.ntp_time_ms_); if (CheckException(jni) || !success) { ALOGE << "queueInputBuffer error"; return ProcessHWErrorOnCodecThread(); @@ -695,11 +560,9 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( } // Get decoder output. jobject j_decoder_output_buffer = - jni->CallObjectMethod(*j_media_codec_video_decoder_, - use_surface_ ? j_dequeue_texture_buffer_method_ - : j_dequeue_byte_buffer_method_, - dequeue_timeout_ms); - + (use_surface_ ? &Java_MediaCodecVideoDecoder_dequeueTextureBuffer + : &Java_MediaCodecVideoDecoder_dequeueOutputBuffer)( + jni, *j_media_codec_video_decoder_, dequeue_timeout_ms); if (CheckException(jni)) { ALOGE << "dequeueOutputBuffer() error"; return false; @@ -710,10 +573,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( } // Get decoded video frame properties. - int color_format = GetIntField(jni, *j_media_codec_video_decoder_, - j_color_format_field_); - int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_); - int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_); + int color_format = Java_MediaCodecVideoDecoder_getColorFormat( + jni, *j_media_codec_video_decoder_); + int width = + Java_MediaCodecVideoDecoder_getWidth(jni, *j_media_codec_video_decoder_); + int height = + Java_MediaCodecVideoDecoder_getHeight(jni, *j_media_codec_video_decoder_); rtc::scoped_refptr frame_buffer; int64_t presentation_timestamps_ms = 0; @@ -723,24 +588,24 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( int64_t frame_delayed_ms = 0; if (use_surface_) { // Extract data from Java DecodedTextureBuffer. - presentation_timestamps_ms = GetLongField( - jni, j_decoder_output_buffer, - j_texture_presentation_timestamp_ms_field_); - output_timestamps_ms = GetLongField( - jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_); - output_ntp_timestamps_ms = GetLongField( - jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_); - decode_time_ms = GetLongField( - jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_); + presentation_timestamps_ms = + Java_DecodedTextureBuffer_getPresentationTimestampMs( + jni, j_decoder_output_buffer); + output_timestamps_ms = + Java_DecodedTextureBuffer_getTimeStampMs(jni, j_decoder_output_buffer); + output_ntp_timestamps_ms = Java_DecodedTextureBuffer_getNtpTimestampMs( + jni, j_decoder_output_buffer); + decode_time_ms = + Java_DecodedTextureBuffer_getDecodeTimeMs(jni, j_decoder_output_buffer); const int texture_id = - GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_); + Java_DecodedTextureBuffer_getTextureId(jni, j_decoder_output_buffer); if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame. const jfloatArray j_transform_matrix = - reinterpret_cast(GetObjectField( - jni, j_decoder_output_buffer, j_transform_matrix_field_)); - frame_delayed_ms = GetLongField( - jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_); + Java_DecodedTextureBuffer_getTransformMatrix(jni, + j_decoder_output_buffer); + frame_delayed_ms = Java_DecodedTextureBuffer_getFrameDelayMs( + jni, j_decoder_output_buffer); // Create VideoFrameBuffer with native texture handle. frame_buffer = surface_texture_helper_->CreateTextureFrame( @@ -751,25 +616,26 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( } else { // Extract data from Java ByteBuffer and create output yuv420 frame - // for non surface decoding only. - int stride = - GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_); - const int slice_height = - GetIntField(jni, *j_media_codec_video_decoder_, j_slice_height_field_); - const int output_buffer_index = GetIntField( - jni, j_decoder_output_buffer, j_info_index_field_); - const int output_buffer_offset = GetIntField( - jni, j_decoder_output_buffer, j_info_offset_field_); - const int output_buffer_size = GetIntField( - jni, j_decoder_output_buffer, j_info_size_field_); - presentation_timestamps_ms = GetLongField( - jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_); - output_timestamps_ms = GetLongField( - jni, j_decoder_output_buffer, j_timestamp_ms_field_); - output_ntp_timestamps_ms = GetLongField( - jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_); + int stride = Java_MediaCodecVideoDecoder_getStride( + jni, *j_media_codec_video_decoder_); + const int slice_height = Java_MediaCodecVideoDecoder_getSliceHeight( + jni, *j_media_codec_video_decoder_); + const int output_buffer_index = + Java_DecodedOutputBuffer_getIndex(jni, j_decoder_output_buffer); + const int output_buffer_offset = + Java_DecodedOutputBuffer_getOffset(jni, j_decoder_output_buffer); + const int output_buffer_size = + Java_DecodedOutputBuffer_getSize(jni, j_decoder_output_buffer); + presentation_timestamps_ms = + Java_DecodedOutputBuffer_getPresentationTimestampMs( + jni, j_decoder_output_buffer); + output_timestamps_ms = + Java_DecodedOutputBuffer_getTimestampMs(jni, j_decoder_output_buffer); + output_ntp_timestamps_ms = Java_DecodedOutputBuffer_getNtpTimestampMs( + jni, j_decoder_output_buffer); - decode_time_ms = GetLongField(jni, j_decoder_output_buffer, - j_byte_buffer_decode_time_ms_field_); + decode_time_ms = + Java_DecodedOutputBuffer_getDecodeTimeMs(jni, j_decoder_output_buffer); RTC_CHECK_GE(slice_height, height); if (output_buffer_size < width * height * 3 / 2) { @@ -782,8 +648,8 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( // output byte buffer, so actual stride value need to be corrected. stride = output_buffer_size * 2 / (height * 3); } - jobjectArray output_buffers = reinterpret_cast(GetObjectField( - jni, *j_media_codec_video_decoder_, j_output_buffers_field_)); + jobjectArray output_buffers = Java_MediaCodecVideoDecoder_getOutputBuffers( + jni, *j_media_codec_video_decoder_); jobject output_buffer = jni->GetObjectArrayElement(output_buffers, output_buffer_index); uint8_t* payload = reinterpret_cast(jni->GetDirectBufferAddress( @@ -847,10 +713,8 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs( frame_buffer = i420_buffer; // Return output byte buffer back to codec. - jni->CallVoidMethod( - *j_media_codec_video_decoder_, - j_return_decoded_byte_buffer_method_, - output_buffer_index); + Java_MediaCodecVideoDecoder_returnDecodedOutputBuffer( + jni, *j_media_codec_video_decoder_, output_buffer_index); if (CheckException(jni)) { ALOGE << "returnDecodedOutputBuffer error"; return false; @@ -934,38 +798,22 @@ MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() ALOGD << "MediaCodecVideoDecoderFactory ctor"; JNIEnv* jni = AttachCurrentThreadIfNeeded(); ScopedLocalRefFrame local_ref_frame(jni); - jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); supported_codec_types_.clear(); - bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( - j_decoder_class, - GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z")); - if (CheckException(jni)) { - is_vp8_hw_supported = false; - } - if (is_vp8_hw_supported) { + if (Java_MediaCodecVideoDecoder_isVp8HwSupported(jni) && + !CheckException(jni)) { ALOGD << "VP8 HW Decoder supported."; supported_codec_types_.push_back(kVideoCodecVP8); } - bool is_vp9_hw_supported = jni->CallStaticBooleanMethod( - j_decoder_class, - GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z")); - if (CheckException(jni)) { - is_vp9_hw_supported = false; - } - if (is_vp9_hw_supported) { + if (Java_MediaCodecVideoDecoder_isVp9HwSupported(jni) && + !CheckException(jni)) { ALOGD << "VP9 HW Decoder supported."; supported_codec_types_.push_back(kVideoCodecVP9); } - bool is_h264_hw_supported = jni->CallStaticBooleanMethod( - j_decoder_class, - GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z")); - if (CheckException(jni)) { - is_h264_hw_supported = false; - } - if (is_h264_hw_supported) { + if (Java_MediaCodecVideoDecoder_isH264HwSupported(jni) && + !CheckException(jni)) { ALOGD << "H264 HW Decoder supported."; supported_codec_types_.push_back(kVideoCodecH264); } @@ -1015,6 +863,10 @@ void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(VideoDecoder* decoder) { delete decoder; } +bool MediaCodecVideoDecoderFactory::IsH264HighProfileSupported(JNIEnv* env) { + return Java_MediaCodecVideoDecoder_isH264HighProfileHwSupported(env); +} + const char* MediaCodecVideoDecoder::ImplementationName() const { return "MediaCodec"; } diff --git a/sdk/android/src/jni/androidmediadecoder_jni.h b/sdk/android/src/jni/androidmediadecoder_jni.h index bcdf9cc0cd..d76512ded5 100644 --- a/sdk/android/src/jni/androidmediadecoder_jni.h +++ b/sdk/android/src/jni/androidmediadecoder_jni.h @@ -31,6 +31,8 @@ class MediaCodecVideoDecoderFactory void DestroyVideoDecoder(VideoDecoder* decoder) override; + static bool IsH264HighProfileSupported(JNIEnv* env); + private: jobject egl_context_; std::vector supported_codec_types_; diff --git a/sdk/android/src/jni/androidmediaencoder_jni.cc b/sdk/android/src/jni/androidmediaencoder_jni.cc index 9b804c6432..09e0937d64 100644 --- a/sdk/android/src/jni/androidmediaencoder_jni.cc +++ b/sdk/android/src/jni/androidmediaencoder_jni.cc @@ -37,8 +37,9 @@ #include "rtc_base/thread.h" #include "rtc_base/timeutils.h" #include "rtc_base/weak_ptr.h" +#include "sdk/android/generated_video_jni/jni/MediaCodecVideoEncoder_jni.h" #include "sdk/android/src/jni/androidmediacodeccommon.h" -#include "sdk/android/src/jni/classreferenceholder.h" +#include "sdk/android/src/jni/androidmediadecoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/videoframe.h" #include "system_wrappers/include/field_trial.h" @@ -178,13 +179,6 @@ class MediaCodecVideoEncoder : public VideoEncoder { jobject frame, int input_buffer_index); - // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. - int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); - jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info); - bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info); - jlong GetOutputBufferInfoPresentationTimestampUs( - JNIEnv* jni, jobject j_output_buffer_info); - // Deliver any outputs pending in the MediaCodec to our |callback_| and return // true on success. bool DeliverPendingOutputs(JNIEnv* jni); @@ -212,25 +206,7 @@ class MediaCodecVideoEncoder : public VideoEncoder { // State that is constant for the lifetime of this object once the ctor // returns. rtc::SequencedTaskChecker encoder_queue_checker_; - ScopedGlobalRef j_media_codec_video_encoder_class_; ScopedGlobalRef j_media_codec_video_encoder_; - jmethodID j_init_encode_method_; - jmethodID j_get_input_buffers_method_; - jmethodID j_dequeue_input_buffer_method_; - jmethodID j_encode_buffer_method_; - jmethodID j_encode_texture_method_; - jmethodID j_encode_frame_method_; - jmethodID j_release_method_; - jmethodID j_set_rates_method_; - jmethodID j_dequeue_output_buffer_method_; - jmethodID j_release_output_buffer_method_; - jfieldID j_color_format_field_; - jfieldID j_info_index_field_; - jfieldID j_info_buffer_field_; - jfieldID j_info_is_key_frame_field_; - jfieldID j_info_presentation_timestamp_us_field_; - - ScopedGlobalRef j_video_frame_texture_buffer_class_; // State that is valid only between InitEncode() and the next Release(). int width_; // Frame width in pixels. @@ -289,7 +265,7 @@ class MediaCodecVideoEncoder : public VideoEncoder { bool scale_; H264::Profile profile_; // Global references; must be deleted in Release(). - std::vector input_buffers_; + std::vector> input_buffers_; H264BitstreamParser h264_bitstream_parser_; // VP9 variables to populate codec specific structure. @@ -331,73 +307,15 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, jobject egl_context) : codec_(codec), callback_(NULL), - j_media_codec_video_encoder_class_( - jni, - FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")), j_media_codec_video_encoder_( jni, - jni->NewObject(*j_media_codec_video_encoder_class_, - GetMethodID(jni, - *j_media_codec_video_encoder_class_, - "", - "()V"))), - j_video_frame_texture_buffer_class_( - jni, - FindClass(jni, "org/webrtc/VideoFrame$TextureBuffer")), + Java_MediaCodecVideoEncoder_Constructor(jni)), inited_(false), use_surface_(false), egl_context_(egl_context), sw_fallback_required_(false) { encoder_queue_checker_.Detach(); - jclass j_output_buffer_info_class = - FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); - j_init_encode_method_ = - GetMethodID(jni, *j_media_codec_video_encoder_class_, "initEncode", - "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;" - "IIIIILorg/webrtc/EglBase14$Context;)Z"); - j_get_input_buffers_method_ = GetMethodID( - jni, - *j_media_codec_video_encoder_class_, - "getInputBuffers", - "()[Ljava/nio/ByteBuffer;"); - j_dequeue_input_buffer_method_ = GetMethodID( - jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I"); - j_encode_buffer_method_ = GetMethodID( - jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z"); - j_encode_texture_method_ = GetMethodID( - jni, *j_media_codec_video_encoder_class_, "encodeTexture", - "(ZI[FJ)Z"); - j_encode_frame_method_ = - GetMethodID(jni, *j_media_codec_video_encoder_class_, "encodeFrame", - "(JZLorg/webrtc/VideoFrame;I)Z"); - j_release_method_ = - GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); - j_set_rates_method_ = GetMethodID( - jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z"); - j_dequeue_output_buffer_method_ = GetMethodID( - jni, - *j_media_codec_video_encoder_class_, - "dequeueOutputBuffer", - "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;"); - j_release_output_buffer_method_ = GetMethodID( - jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z"); - - j_color_format_field_ = - GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I"); - j_info_index_field_ = - GetFieldID(jni, j_output_buffer_info_class, "index", "I"); - j_info_buffer_field_ = GetFieldID( - jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;"); - j_info_is_key_frame_field_ = - GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z"); - j_info_presentation_timestamp_us_field_ = GetFieldID( - jni, j_output_buffer_info_class, "presentationTimestampUs", "J"); - if (CheckException(jni)) { - ALOGW << "MediaCodecVideoEncoder ctor failed."; - ProcessHWError(true /* reset_if_fallback_unavailable */); - } - Random random(rtc::TimeMicros()); picture_id_ = random.Rand() & 0x7FFF; tl0_pic_idx_ = random.Rand(); @@ -603,12 +521,11 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width, frames_received_since_last_key_ = kMinKeyFrameInterval; // We enforce no extra stride/padding in the format creation step. - jobject j_video_codec_enum = JavaEnumFromIndexAndClassName( - jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type); - const bool encode_status = jni->CallBooleanMethod( - *j_media_codec_video_encoder_, j_init_encode_method_, j_video_codec_enum, - profile_, width, height, kbps, fps, - (use_surface ? egl_context_ : nullptr)); + jobject j_video_codec_enum = + Java_VideoCodecType_fromNativeIndex(jni, codec_type); + const bool encode_status = Java_MediaCodecVideoEncoder_initEncode( + jni, *j_media_codec_video_encoder_, j_video_codec_enum, profile_, width, + height, kbps, fps, (use_surface ? egl_context_ : nullptr)); if (!encode_status) { ALOGE << "Failed to configure encoder."; ProcessHWError(false /* reset_if_fallback_unavailable */); @@ -621,9 +538,8 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width, } if (!use_surface) { - jobjectArray input_buffers = reinterpret_cast( - jni->CallObjectMethod(*j_media_codec_video_encoder_, - j_get_input_buffers_method_)); + jobjectArray input_buffers = Java_MediaCodecVideoEncoder_getInputBuffers( + jni, *j_media_codec_video_encoder_); if (CheckException(jni)) { ALOGE << "Exception in get input buffers."; ProcessHWError(false /* reset_if_fallback_unavailable */); @@ -635,8 +551,8 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width, return WEBRTC_VIDEO_CODEC_ERROR; } - switch (GetIntField(jni, *j_media_codec_video_encoder_, - j_color_format_field_)) { + switch (Java_MediaCodecVideoEncoder_getColorFormat( + jni, *j_media_codec_video_encoder_)) { case COLOR_FormatYUV420Planar: encoder_fourcc_ = libyuv::FOURCC_YU12; break; @@ -650,15 +566,15 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width, ProcessHWError(false /* reset_if_fallback_unavailable */); return WEBRTC_VIDEO_CODEC_ERROR; } - size_t num_input_buffers = jni->GetArrayLength(input_buffers); + RTC_CHECK(input_buffers_.empty()) << "Unexpected double InitEncode without Release"; - input_buffers_.resize(num_input_buffers); - for (size_t i = 0; i < num_input_buffers; ++i) { - input_buffers_[i] = - jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i)); - int64_t yuv_buffer_capacity = - jni->GetDirectBufferCapacity(input_buffers_[i]); + input_buffers_ = JavaToNativeVector>( + jni, input_buffers, [](JNIEnv* env, jobject o) { + return ScopedGlobalRef(env, o); + }); + for (const ScopedGlobalRef& buffer : input_buffers_) { + int64_t yuv_buffer_capacity = jni->GetDirectBufferCapacity(*buffer); if (CheckException(jni)) { ALOGE << "Exception in get direct buffer capacity."; ProcessHWError(false /* reset_if_fallback_unavailable */); @@ -772,8 +688,8 @@ int32_t MediaCodecVideoEncoder::Encode( int j_input_buffer_index = -1; if (!use_surface_) { - j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, - j_dequeue_input_buffer_method_); + j_input_buffer_index = Java_MediaCodecVideoEncoder_dequeueInputBuffer( + jni, *j_media_codec_video_encoder_); if (CheckException(jni)) { ALOGE << "Exception in dequeu input buffer."; return ProcessHWErrorOnEncode(); @@ -892,9 +808,9 @@ bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni, case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: return true; case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: - return jni->IsInstanceOf(static_cast(android_buffer) - ->video_frame_buffer(), - *j_video_frame_texture_buffer_class_); + return Java_MediaCodecVideoEncoder_isTextureBuffer( + jni, static_cast(android_buffer) + ->video_frame_buffer()); default: RTC_NOTREACHED(); return false; @@ -916,9 +832,9 @@ bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, i420_buffer->StrideV())) { return false; } - bool encode_status = jni->CallBooleanMethod( - *j_media_codec_video_encoder_, j_encode_buffer_method_, key_frame, - input_buffer_index, yuv_size_, current_timestamp_us_); + bool encode_status = Java_MediaCodecVideoEncoder_encodeBuffer( + jni, *j_media_codec_video_encoder_, key_frame, input_buffer_index, + yuv_size_, current_timestamp_us_); if (CheckException(jni)) { ALOGE << "Exception in encode buffer."; ProcessHWError(true /* reset_if_fallback_unavailable */); @@ -935,7 +851,7 @@ bool MediaCodecVideoEncoder::FillInputBuffer(JNIEnv* jni, int stride_u, uint8_t const* buffer_v, int stride_v) { - jobject j_input_buffer = input_buffers_[input_buffer_index]; + jobject j_input_buffer = *input_buffers_[input_buffer_index]; uint8_t* yuv_buffer = reinterpret_cast(jni->GetDirectBufferAddress(j_input_buffer)); if (CheckException(jni)) { @@ -962,9 +878,9 @@ bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, ->native_handle_impl(); jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni); - bool encode_status = jni->CallBooleanMethod( - *j_media_codec_video_encoder_, j_encode_texture_method_, key_frame, - handle.oes_texture_id, sampling_matrix, current_timestamp_us_); + bool encode_status = Java_MediaCodecVideoEncoder_encodeTexture( + jni, *j_media_codec_video_encoder_, key_frame, handle.oes_texture_id, + sampling_matrix, current_timestamp_us_); if (CheckException(jni)) { ALOGE << "Exception in encode texture."; ProcessHWError(true /* reset_if_fallback_unavailable */); @@ -977,9 +893,9 @@ bool MediaCodecVideoEncoder::EncodeJavaFrame(JNIEnv* jni, bool key_frame, jobject frame, int input_buffer_index) { - bool encode_status = jni->CallBooleanMethod( - *j_media_codec_video_encoder_, j_encode_frame_method_, - jlongFromPointer(this), key_frame, frame, input_buffer_index); + bool encode_status = Java_MediaCodecVideoEncoder_encodeFrame( + jni, *j_media_codec_video_encoder_, jlongFromPointer(this), key_frame, + frame, input_buffer_index); if (CheckException(jni)) { ALOGE << "Exception in encode frame."; ProcessHWError(true /* reset_if_fallback_unavailable */); @@ -1009,10 +925,8 @@ int32_t MediaCodecVideoEncoder::Release() { encode_task_.reset(nullptr); weak_factory_.reset(nullptr); ScopedLocalRefFrame local_ref_frame(jni); - for (size_t i = 0; i < input_buffers_.size(); ++i) - jni->DeleteGlobalRef(input_buffers_[i]); input_buffers_.clear(); - jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_); + Java_MediaCodecVideoEncoder_release(jni, *j_media_codec_video_encoder_); if (CheckException(jni)) { ALOGE << "Exception in release."; ProcessHWError(false /* reset_if_fallback_unavailable */); @@ -1051,9 +965,9 @@ int32_t MediaCodecVideoEncoder::SetRateAllocation( if (frame_rate > 0) { last_set_fps_ = frame_rate; } - bool ret = - jni->CallBooleanMethod(*j_media_codec_video_encoder_, j_set_rates_method_, - last_set_bitrate_kbps_, last_set_fps_); + bool ret = Java_MediaCodecVideoEncoder_setRates( + jni, *j_media_codec_video_encoder_, last_set_bitrate_kbps_, + last_set_fps_); if (CheckException(jni) || !ret) { ProcessHWError(true /* reset_if_fallback_unavailable */); return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK @@ -1062,37 +976,13 @@ int32_t MediaCodecVideoEncoder::SetRateAllocation( return WEBRTC_VIDEO_CODEC_OK; } -int MediaCodecVideoEncoder::GetOutputBufferInfoIndex( - JNIEnv* jni, - jobject j_output_buffer_info) { - return GetIntField(jni, j_output_buffer_info, j_info_index_field_); -} - -jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer( - JNIEnv* jni, - jobject j_output_buffer_info) { - return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_); -} - -bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame( - JNIEnv* jni, - jobject j_output_buffer_info) { - return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_); -} - -jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs( - JNIEnv* jni, - jobject j_output_buffer_info) { - return GetLongField( - jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_); -} - bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); while (true) { - jobject j_output_buffer_info = jni->CallObjectMethod( - *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_); + jobject j_output_buffer_info = + Java_MediaCodecVideoEncoder_dequeueOutputBuffer( + jni, *j_media_codec_video_encoder_); if (CheckException(jni)) { ALOGE << "Exception in set dequeue output buffer."; ProcessHWError(true /* reset_if_fallback_unavailable */); @@ -1103,7 +993,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { } int output_buffer_index = - GetOutputBufferInfoIndex(jni, j_output_buffer_info); + Java_OutputBufferInfo_getIndex(jni, j_output_buffer_info); if (output_buffer_index == -1) { ProcessHWError(true /* reset_if_fallback_unavailable */); return false; @@ -1111,14 +1001,16 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { // Get key and config frame flags. jobject j_output_buffer = - GetOutputBufferInfoBuffer(jni, j_output_buffer_info); - bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info); + Java_OutputBufferInfo_getBuffer(jni, j_output_buffer_info); + bool key_frame = + Java_OutputBufferInfo_isKeyFrame(jni, j_output_buffer_info); // Get frame timestamps from a queue - for non config frames only. int64_t encoding_start_time_ms = 0; int64_t frame_encoding_time_ms = 0; last_output_timestamp_ms_ = - GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) / + Java_OutputBufferInfo_getPresentationTimestampUs(jni, + j_output_buffer_info) / rtc::kNumMicrosecsPerMillisec; if (!input_frame_infos_.empty()) { const InputFrameInfo& frame_info = input_frame_infos_.front(); @@ -1247,9 +1139,8 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) { } // Return output buffer back to the encoder. - bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_, - j_release_output_buffer_method_, - output_buffer_index); + bool success = Java_MediaCodecVideoEncoder_releaseOutputBuffer( + jni, *j_media_codec_video_encoder_, output_buffer_index); if (CheckException(jni) || !success) { ProcessHWError(true /* reset_if_fallback_unavailable */); return false; @@ -1348,23 +1239,15 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() : egl_context_(nullptr) { JNIEnv* jni = AttachCurrentThreadIfNeeded(); ScopedLocalRefFrame local_ref_frame(jni); - jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder"); - jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder"); supported_codecs_.clear(); - bool is_vp8_hw_supported = jni->CallStaticBooleanMethod( - j_encoder_class, - GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z")); - CHECK_EXCEPTION(jni); + bool is_vp8_hw_supported = Java_MediaCodecVideoEncoder_isVp8HwSupported(jni); if (is_vp8_hw_supported) { ALOGD << "VP8 HW Encoder supported."; supported_codecs_.push_back(cricket::VideoCodec(cricket::kVp8CodecName)); } - bool is_vp9_hw_supported = jni->CallStaticBooleanMethod( - j_encoder_class, - GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z")); - CHECK_EXCEPTION(jni); + bool is_vp9_hw_supported = Java_MediaCodecVideoEncoder_isVp9HwSupported(jni); if (is_vp9_hw_supported) { ALOGD << "VP9 HW Encoder supported."; supported_codecs_.push_back(cricket::VideoCodec(cricket::kVp9CodecName)); @@ -1373,11 +1256,8 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() // Check if high profile is supported by decoder. If yes, encoder can always // fall back to baseline profile as a subset as high profile. - bool is_h264_high_profile_hw_supported = jni->CallStaticBooleanMethod( - j_decoder_class, - GetStaticMethodID(jni, j_decoder_class, "isH264HighProfileHwSupported", - "()Z")); - CHECK_EXCEPTION(jni); + bool is_h264_high_profile_hw_supported = + MediaCodecVideoDecoderFactory::IsH264HighProfileSupported(jni); if (is_h264_high_profile_hw_supported) { ALOGD << "H.264 High Profile HW Encoder supported."; // TODO(magjed): Enumerate actual level instead of using hardcoded level @@ -1393,10 +1273,8 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() supported_codecs_with_h264_hp_.push_back(constrained_high); } - bool is_h264_hw_supported = jni->CallStaticBooleanMethod( - j_encoder_class, - GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z")); - CHECK_EXCEPTION(jni); + bool is_h264_hw_supported = + Java_MediaCodecVideoEncoder_isH264HwSupported(jni); if (is_h264_hw_supported) { ALOGD << "H.264 HW Encoder supported."; // TODO(magjed): Push Constrained High profile as well when negotiation is @@ -1468,7 +1346,7 @@ void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(VideoEncoder* encoder) { } JNI_FUNCTION_DECLARATION(void, - MediaCodecVideoEncoder_nativeFillBuffer, + MediaCodecVideoEncoder_fillNativeBuffer, JNIEnv* jni, jclass, jlong native_encoder, diff --git a/sdk/android/src/jni/classreferenceholder.cc b/sdk/android/src/jni/classreferenceholder.cc index b1f8fdaaf6..4dd9a6e26f 100644 --- a/sdk/android/src/jni/classreferenceholder.cc +++ b/sdk/android/src/jni/classreferenceholder.cc @@ -63,13 +63,6 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) { LoadClass(jni, "org/webrtc/EglBase14$Context"); LoadClass(jni, "org/webrtc/EncodedImage"); LoadClass(jni, "org/webrtc/EncodedImage$FrameType"); - LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder"); - LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer"); - LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer"); - LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType"); - LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder"); - LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); - LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType"); LoadClass(jni, "org/webrtc/MediaSource$State"); LoadClass(jni, "org/webrtc/NetworkMonitor"); LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType");