Android: Generate JNI code for MediaCodecVideoEncoder/Decoder

Bug: webrtc:8278
Change-Id: I19cff18b5d110720ea50d16254ddc4377adc3dbe
Reviewed-on: https://webrtc-review.googlesource.com/31261
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21204}
This commit is contained in:
Magnus Jedvert
2017-12-08 11:05:22 +01:00
committed by Commit Bot
parent f58353ea19
commit 655e1967ea
8 changed files with 367 additions and 421 deletions

View File

@ -27,8 +27,8 @@
#include "rtc_base/scoped_ref_ptr.h"
#include "rtc_base/thread.h"
#include "rtc_base/timeutils.h"
#include "sdk/android/generated_video_jni/jni/MediaCodecVideoDecoder_jni.h"
#include "sdk/android/src/jni/androidmediacodeccommon.h"
#include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/surfacetexturehelper_jni.h"
#include "sdk/android/src/jni/videoframe.h"
#include "third_party/libyuv/include/libyuv/convert.h"
@ -130,43 +130,10 @@ class MediaCodecVideoDecoder : public VideoDecoder, public rtc::MessageHandler {
// returns.
std::unique_ptr<Thread>
codec_thread_; // Thread on which to operate MediaCodec.
ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
jmethodID j_init_decode_method_;
jmethodID j_reset_method_;
jmethodID j_release_method_;
jmethodID j_dequeue_input_buffer_method_;
jmethodID j_queue_input_buffer_method_;
jmethodID j_dequeue_byte_buffer_method_;
jmethodID j_dequeue_texture_buffer_method_;
jmethodID j_return_decoded_byte_buffer_method_;
// MediaCodecVideoDecoder fields.
jfieldID j_input_buffers_field_;
jfieldID j_output_buffers_field_;
jfieldID j_color_format_field_;
jfieldID j_width_field_;
jfieldID j_height_field_;
jfieldID j_stride_field_;
jfieldID j_slice_height_field_;
// MediaCodecVideoDecoder.DecodedTextureBuffer fields.
jfieldID j_texture_id_field_;
jfieldID j_transform_matrix_field_;
jfieldID j_texture_presentation_timestamp_ms_field_;
jfieldID j_texture_timestamp_ms_field_;
jfieldID j_texture_ntp_timestamp_ms_field_;
jfieldID j_texture_decode_time_ms_field_;
jfieldID j_texture_frame_delay_ms_field_;
// MediaCodecVideoDecoder.DecodedOutputBuffer fields.
jfieldID j_info_index_field_;
jfieldID j_info_offset_field_;
jfieldID j_info_size_field_;
jfieldID j_presentation_timestamp_ms_field_;
jfieldID j_timestamp_ms_field_;
jfieldID j_ntp_timestamp_ms_field_;
jfieldID j_byte_buffer_decode_time_ms_field_;
// Global references; must be deleted in Release().
std::vector<jobject> input_buffers_;
std::vector<ScopedGlobalRef<jobject>> input_buffers_;
};
MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni,
@ -178,93 +145,12 @@ MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni,
inited_(false),
sw_fallback_required_(false),
codec_thread_(Thread::Create()),
j_media_codec_video_decoder_class_(
jni,
FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
j_media_codec_video_decoder_(
jni,
jni->NewObject(*j_media_codec_video_decoder_class_,
GetMethodID(jni,
*j_media_codec_video_decoder_class_,
"<init>",
"()V"))) {
Java_MediaCodecVideoDecoder_Constructor(jni)) {
codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
j_init_decode_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "initDecode",
"(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
"IILorg/webrtc/SurfaceTextureHelper;)Z");
j_reset_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, "reset", "(II)V");
j_release_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
j_dequeue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
j_queue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJJJ)Z");
j_dequeue_byte_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
"(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer;");
j_dequeue_texture_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_decoder_class_, "dequeueTextureBuffer",
"(I)Lorg/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer;");
j_return_decoded_byte_buffer_method_ =
GetMethodID(jni, *j_media_codec_video_decoder_class_,
"returnDecodedOutputBuffer", "(I)V");
j_input_buffers_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_,
"inputBuffers", "[Ljava/nio/ByteBuffer;");
j_output_buffers_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_,
"outputBuffers", "[Ljava/nio/ByteBuffer;");
j_color_format_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
j_width_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "width", "I");
j_height_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "height", "I");
j_stride_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "stride", "I");
j_slice_height_field_ = GetFieldID(
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
jclass j_decoded_texture_buffer_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
j_texture_id_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "textureID", "I");
j_transform_matrix_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "transformMatrix", "[F");
j_texture_presentation_timestamp_ms_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "presentationTimeStampMs", "J");
j_texture_timestamp_ms_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "timeStampMs", "J");
j_texture_ntp_timestamp_ms_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "ntpTimeStampMs", "J");
j_texture_decode_time_ms_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "decodeTimeMs", "J");
j_texture_frame_delay_ms_field_ = GetFieldID(
jni, j_decoded_texture_buffer_class, "frameDelayMs", "J");
jclass j_decoded_output_buffer_class = FindClass(jni,
"org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
j_info_index_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "index", "I");
j_info_offset_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "offset", "I");
j_info_size_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "size", "I");
j_presentation_timestamp_ms_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "presentationTimeStampMs", "J");
j_timestamp_ms_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "timeStampMs", "J");
j_ntp_timestamp_ms_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "ntpTimeStampMs", "J");
j_byte_buffer_decode_time_ms_field_ = GetFieldID(
jni, j_decoded_output_buffer_class, "decodeTimeMs", "J");
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
use_surface_ = (render_egl_context_ != NULL);
ALOGD << "MediaCodecVideoDecoder ctor. Use surface: " << use_surface_;
memset(&codec_, 0, sizeof(codec_));
@ -347,13 +233,10 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
}
}
jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
bool success = jni->CallBooleanMethod(
*j_media_codec_video_decoder_,
j_init_decode_method_,
j_video_codec_enum,
codec_.width,
jobject j_video_codec_enum =
Java_VideoCodecType_fromNativeIndex(jni, codecType_);
bool success = Java_MediaCodecVideoDecoder_initDecode(
jni, *j_media_codec_video_decoder_, j_video_codec_enum, codec_.width,
codec_.height,
use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
: nullptr);
@ -380,19 +263,11 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
}
ALOGD << "Maximum amount of pending frames: " << max_pending_frames_;
jobjectArray input_buffers = (jobjectArray)GetObjectField(
jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
size_t num_input_buffers = jni->GetArrayLength(input_buffers);
input_buffers_.resize(num_input_buffers);
for (size_t i = 0; i < num_input_buffers; ++i) {
input_buffers_[i] =
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
if (CheckException(jni)) {
ALOGE << "NewGlobalRef error - fallback to SW codec.";
sw_fallback_required_ = true;
return WEBRTC_VIDEO_CODEC_ERROR;
}
}
jobjectArray input_buffers = Java_MediaCodecVideoDecoder_getInputBuffers(
jni, *j_media_codec_video_decoder_);
input_buffers_ = JavaToNativeVector<ScopedGlobalRef<jobject>>(
jni, input_buffers,
[](JNIEnv* env, jobject o) { return ScopedGlobalRef<jobject>(env, o); });
codec_thread_->PostDelayed(RTC_FROM_HERE, kMediaCodecPollMs, this);
@ -412,11 +287,8 @@ int32_t MediaCodecVideoDecoder::ResetDecodeOnCodecThread() {
rtc::MessageQueueManager::Clear(this);
ResetVariables();
jni->CallVoidMethod(
*j_media_codec_video_decoder_,
j_reset_method_,
codec_.width,
codec_.height);
Java_MediaCodecVideoDecoder_reset(jni, *j_media_codec_video_decoder_,
codec_.width, codec_.height);
if (CheckException(jni)) {
ALOGE << "Soft reset error - fallback to SW codec.";
@ -445,11 +317,8 @@ int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
ALOGD << "DecoderReleaseOnCodecThread: Frames received: " <<
frames_received_ << ". Frames decoded: " << frames_decoded_;
ScopedLocalRefFrame local_ref_frame(jni);
for (size_t i = 0; i < input_buffers_.size(); i++) {
jni->DeleteGlobalRef(input_buffers_[i]);
}
input_buffers_.clear();
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
Java_MediaCodecVideoDecoder_release(jni, *j_media_codec_video_decoder_);
surface_texture_helper_ = nullptr;
inited_ = false;
rtc::MessageQueueManager::Clear(this);
@ -600,8 +469,8 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
}
// Get input buffer.
int j_input_buffer_index = jni->CallIntMethod(
*j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
int j_input_buffer_index = Java_MediaCodecVideoDecoder_dequeueInputBuffer(
jni, *j_media_codec_video_decoder_);
if (CheckException(jni) || j_input_buffer_index < 0) {
ALOGE << "dequeueInputBuffer error: " << j_input_buffer_index <<
". Retry DeliverPendingOutputs.";
@ -613,8 +482,8 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
return ProcessHWErrorOnCodecThread();
}
// Try dequeue input buffer one last time.
j_input_buffer_index = jni->CallIntMethod(
*j_media_codec_video_decoder_, j_dequeue_input_buffer_method_);
j_input_buffer_index = Java_MediaCodecVideoDecoder_dequeueInputBuffer(
jni, *j_media_codec_video_decoder_);
if (CheckException(jni) || j_input_buffer_index < 0) {
ALOGE << "dequeueInputBuffer critical error: " << j_input_buffer_index;
return ProcessHWErrorOnCodecThread();
@ -622,7 +491,7 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
}
// Copy encoded data to Java ByteBuffer.
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
jobject j_input_buffer = *input_buffers_[j_input_buffer_index];
uint8_t* buffer =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
RTC_CHECK(buffer) << "Indirect buffer??";
@ -664,14 +533,10 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
pending_frame_qps_.push_back(qp);
// Feed input to decoder.
bool success = jni->CallBooleanMethod(
*j_media_codec_video_decoder_,
j_queue_input_buffer_method_,
j_input_buffer_index,
inputImage._length,
presentation_timestamp_us,
static_cast<int64_t> (inputImage._timeStamp),
inputImage.ntp_time_ms_);
bool success = Java_MediaCodecVideoDecoder_queueInputBuffer(
jni, *j_media_codec_video_decoder_, j_input_buffer_index,
inputImage._length, presentation_timestamp_us,
static_cast<int64_t>(inputImage._timeStamp), inputImage.ntp_time_ms_);
if (CheckException(jni) || !success) {
ALOGE << "queueInputBuffer error";
return ProcessHWErrorOnCodecThread();
@ -695,11 +560,9 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
}
// Get decoder output.
jobject j_decoder_output_buffer =
jni->CallObjectMethod(*j_media_codec_video_decoder_,
use_surface_ ? j_dequeue_texture_buffer_method_
: j_dequeue_byte_buffer_method_,
dequeue_timeout_ms);
(use_surface_ ? &Java_MediaCodecVideoDecoder_dequeueTextureBuffer
: &Java_MediaCodecVideoDecoder_dequeueOutputBuffer)(
jni, *j_media_codec_video_decoder_, dequeue_timeout_ms);
if (CheckException(jni)) {
ALOGE << "dequeueOutputBuffer() error";
return false;
@ -710,10 +573,12 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
}
// Get decoded video frame properties.
int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
j_color_format_field_);
int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
int color_format = Java_MediaCodecVideoDecoder_getColorFormat(
jni, *j_media_codec_video_decoder_);
int width =
Java_MediaCodecVideoDecoder_getWidth(jni, *j_media_codec_video_decoder_);
int height =
Java_MediaCodecVideoDecoder_getHeight(jni, *j_media_codec_video_decoder_);
rtc::scoped_refptr<VideoFrameBuffer> frame_buffer;
int64_t presentation_timestamps_ms = 0;
@ -723,24 +588,24 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
int64_t frame_delayed_ms = 0;
if (use_surface_) {
// Extract data from Java DecodedTextureBuffer.
presentation_timestamps_ms = GetLongField(
jni, j_decoder_output_buffer,
j_texture_presentation_timestamp_ms_field_);
output_timestamps_ms = GetLongField(
jni, j_decoder_output_buffer, j_texture_timestamp_ms_field_);
output_ntp_timestamps_ms = GetLongField(
jni, j_decoder_output_buffer, j_texture_ntp_timestamp_ms_field_);
decode_time_ms = GetLongField(
jni, j_decoder_output_buffer, j_texture_decode_time_ms_field_);
presentation_timestamps_ms =
Java_DecodedTextureBuffer_getPresentationTimestampMs(
jni, j_decoder_output_buffer);
output_timestamps_ms =
Java_DecodedTextureBuffer_getTimeStampMs(jni, j_decoder_output_buffer);
output_ntp_timestamps_ms = Java_DecodedTextureBuffer_getNtpTimestampMs(
jni, j_decoder_output_buffer);
decode_time_ms =
Java_DecodedTextureBuffer_getDecodeTimeMs(jni, j_decoder_output_buffer);
const int texture_id =
GetIntField(jni, j_decoder_output_buffer, j_texture_id_field_);
Java_DecodedTextureBuffer_getTextureId(jni, j_decoder_output_buffer);
if (texture_id != 0) { // |texture_id| == 0 represents a dropped frame.
const jfloatArray j_transform_matrix =
reinterpret_cast<jfloatArray>(GetObjectField(
jni, j_decoder_output_buffer, j_transform_matrix_field_));
frame_delayed_ms = GetLongField(
jni, j_decoder_output_buffer, j_texture_frame_delay_ms_field_);
Java_DecodedTextureBuffer_getTransformMatrix(jni,
j_decoder_output_buffer);
frame_delayed_ms = Java_DecodedTextureBuffer_getFrameDelayMs(
jni, j_decoder_output_buffer);
// Create VideoFrameBuffer with native texture handle.
frame_buffer = surface_texture_helper_->CreateTextureFrame(
@ -751,25 +616,26 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
} else {
// Extract data from Java ByteBuffer and create output yuv420 frame -
// for non surface decoding only.
int stride =
GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
const int slice_height =
GetIntField(jni, *j_media_codec_video_decoder_, j_slice_height_field_);
const int output_buffer_index = GetIntField(
jni, j_decoder_output_buffer, j_info_index_field_);
const int output_buffer_offset = GetIntField(
jni, j_decoder_output_buffer, j_info_offset_field_);
const int output_buffer_size = GetIntField(
jni, j_decoder_output_buffer, j_info_size_field_);
presentation_timestamps_ms = GetLongField(
jni, j_decoder_output_buffer, j_presentation_timestamp_ms_field_);
output_timestamps_ms = GetLongField(
jni, j_decoder_output_buffer, j_timestamp_ms_field_);
output_ntp_timestamps_ms = GetLongField(
jni, j_decoder_output_buffer, j_ntp_timestamp_ms_field_);
int stride = Java_MediaCodecVideoDecoder_getStride(
jni, *j_media_codec_video_decoder_);
const int slice_height = Java_MediaCodecVideoDecoder_getSliceHeight(
jni, *j_media_codec_video_decoder_);
const int output_buffer_index =
Java_DecodedOutputBuffer_getIndex(jni, j_decoder_output_buffer);
const int output_buffer_offset =
Java_DecodedOutputBuffer_getOffset(jni, j_decoder_output_buffer);
const int output_buffer_size =
Java_DecodedOutputBuffer_getSize(jni, j_decoder_output_buffer);
presentation_timestamps_ms =
Java_DecodedOutputBuffer_getPresentationTimestampMs(
jni, j_decoder_output_buffer);
output_timestamps_ms =
Java_DecodedOutputBuffer_getTimestampMs(jni, j_decoder_output_buffer);
output_ntp_timestamps_ms = Java_DecodedOutputBuffer_getNtpTimestampMs(
jni, j_decoder_output_buffer);
decode_time_ms = GetLongField(jni, j_decoder_output_buffer,
j_byte_buffer_decode_time_ms_field_);
decode_time_ms =
Java_DecodedOutputBuffer_getDecodeTimeMs(jni, j_decoder_output_buffer);
RTC_CHECK_GE(slice_height, height);
if (output_buffer_size < width * height * 3 / 2) {
@ -782,8 +648,8 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
// output byte buffer, so actual stride value need to be corrected.
stride = output_buffer_size * 2 / (height * 3);
}
jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
jobjectArray output_buffers = Java_MediaCodecVideoDecoder_getOutputBuffers(
jni, *j_media_codec_video_decoder_);
jobject output_buffer =
jni->GetObjectArrayElement(output_buffers, output_buffer_index);
uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
@ -847,10 +713,8 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
frame_buffer = i420_buffer;
// Return output byte buffer back to codec.
jni->CallVoidMethod(
*j_media_codec_video_decoder_,
j_return_decoded_byte_buffer_method_,
output_buffer_index);
Java_MediaCodecVideoDecoder_returnDecodedOutputBuffer(
jni, *j_media_codec_video_decoder_, output_buffer_index);
if (CheckException(jni)) {
ALOGE << "returnDecodedOutputBuffer error";
return false;
@ -934,38 +798,22 @@ MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory()
ALOGD << "MediaCodecVideoDecoderFactory ctor";
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
supported_codec_types_.clear();
bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
j_decoder_class,
GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
if (CheckException(jni)) {
is_vp8_hw_supported = false;
}
if (is_vp8_hw_supported) {
if (Java_MediaCodecVideoDecoder_isVp8HwSupported(jni) &&
!CheckException(jni)) {
ALOGD << "VP8 HW Decoder supported.";
supported_codec_types_.push_back(kVideoCodecVP8);
}
bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
j_decoder_class,
GetStaticMethodID(jni, j_decoder_class, "isVp9HwSupported", "()Z"));
if (CheckException(jni)) {
is_vp9_hw_supported = false;
}
if (is_vp9_hw_supported) {
if (Java_MediaCodecVideoDecoder_isVp9HwSupported(jni) &&
!CheckException(jni)) {
ALOGD << "VP9 HW Decoder supported.";
supported_codec_types_.push_back(kVideoCodecVP9);
}
bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
j_decoder_class,
GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
if (CheckException(jni)) {
is_h264_hw_supported = false;
}
if (is_h264_hw_supported) {
if (Java_MediaCodecVideoDecoder_isH264HwSupported(jni) &&
!CheckException(jni)) {
ALOGD << "H264 HW Decoder supported.";
supported_codec_types_.push_back(kVideoCodecH264);
}
@ -1015,6 +863,10 @@ void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(VideoDecoder* decoder) {
delete decoder;
}
bool MediaCodecVideoDecoderFactory::IsH264HighProfileSupported(JNIEnv* env) {
return Java_MediaCodecVideoDecoder_isH264HighProfileHwSupported(env);
}
const char* MediaCodecVideoDecoder::ImplementationName() const {
return "MediaCodec";
}

View File

@ -31,6 +31,8 @@ class MediaCodecVideoDecoderFactory
void DestroyVideoDecoder(VideoDecoder* decoder) override;
static bool IsH264HighProfileSupported(JNIEnv* env);
private:
jobject egl_context_;
std::vector<VideoCodecType> supported_codec_types_;

View File

@ -37,8 +37,9 @@
#include "rtc_base/thread.h"
#include "rtc_base/timeutils.h"
#include "rtc_base/weak_ptr.h"
#include "sdk/android/generated_video_jni/jni/MediaCodecVideoEncoder_jni.h"
#include "sdk/android/src/jni/androidmediacodeccommon.h"
#include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/androidmediadecoder_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/videoframe.h"
#include "system_wrappers/include/field_trial.h"
@ -178,13 +179,6 @@ class MediaCodecVideoEncoder : public VideoEncoder {
jobject frame,
int input_buffer_index);
// Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
jlong GetOutputBufferInfoPresentationTimestampUs(
JNIEnv* jni, jobject j_output_buffer_info);
// Deliver any outputs pending in the MediaCodec to our |callback_| and return
// true on success.
bool DeliverPendingOutputs(JNIEnv* jni);
@ -212,25 +206,7 @@ class MediaCodecVideoEncoder : public VideoEncoder {
// State that is constant for the lifetime of this object once the ctor
// returns.
rtc::SequencedTaskChecker encoder_queue_checker_;
ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
jmethodID j_init_encode_method_;
jmethodID j_get_input_buffers_method_;
jmethodID j_dequeue_input_buffer_method_;
jmethodID j_encode_buffer_method_;
jmethodID j_encode_texture_method_;
jmethodID j_encode_frame_method_;
jmethodID j_release_method_;
jmethodID j_set_rates_method_;
jmethodID j_dequeue_output_buffer_method_;
jmethodID j_release_output_buffer_method_;
jfieldID j_color_format_field_;
jfieldID j_info_index_field_;
jfieldID j_info_buffer_field_;
jfieldID j_info_is_key_frame_field_;
jfieldID j_info_presentation_timestamp_us_field_;
ScopedGlobalRef<jclass> j_video_frame_texture_buffer_class_;
// State that is valid only between InitEncode() and the next Release().
int width_; // Frame width in pixels.
@ -289,7 +265,7 @@ class MediaCodecVideoEncoder : public VideoEncoder {
bool scale_;
H264::Profile profile_;
// Global references; must be deleted in Release().
std::vector<jobject> input_buffers_;
std::vector<ScopedGlobalRef<jobject>> input_buffers_;
H264BitstreamParser h264_bitstream_parser_;
// VP9 variables to populate codec specific structure.
@ -331,73 +307,15 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni,
jobject egl_context)
: codec_(codec),
callback_(NULL),
j_media_codec_video_encoder_class_(
jni,
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
j_media_codec_video_encoder_(
jni,
jni->NewObject(*j_media_codec_video_encoder_class_,
GetMethodID(jni,
*j_media_codec_video_encoder_class_,
"<init>",
"()V"))),
j_video_frame_texture_buffer_class_(
jni,
FindClass(jni, "org/webrtc/VideoFrame$TextureBuffer")),
Java_MediaCodecVideoEncoder_Constructor(jni)),
inited_(false),
use_surface_(false),
egl_context_(egl_context),
sw_fallback_required_(false) {
encoder_queue_checker_.Detach();
jclass j_output_buffer_info_class =
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
j_init_encode_method_ =
GetMethodID(jni, *j_media_codec_video_encoder_class_, "initEncode",
"(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
"IIIIILorg/webrtc/EglBase14$Context;)Z");
j_get_input_buffers_method_ = GetMethodID(
jni,
*j_media_codec_video_encoder_class_,
"getInputBuffers",
"()[Ljava/nio/ByteBuffer;");
j_dequeue_input_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
j_encode_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
j_encode_texture_method_ = GetMethodID(
jni, *j_media_codec_video_encoder_class_, "encodeTexture",
"(ZI[FJ)Z");
j_encode_frame_method_ =
GetMethodID(jni, *j_media_codec_video_encoder_class_, "encodeFrame",
"(JZLorg/webrtc/VideoFrame;I)Z");
j_release_method_ =
GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
j_set_rates_method_ = GetMethodID(
jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
j_dequeue_output_buffer_method_ = GetMethodID(
jni,
*j_media_codec_video_encoder_class_,
"dequeueOutputBuffer",
"()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
j_release_output_buffer_method_ = GetMethodID(
jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
j_color_format_field_ =
GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
j_info_index_field_ =
GetFieldID(jni, j_output_buffer_info_class, "index", "I");
j_info_buffer_field_ = GetFieldID(
jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
j_info_is_key_frame_field_ =
GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
j_info_presentation_timestamp_us_field_ = GetFieldID(
jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
if (CheckException(jni)) {
ALOGW << "MediaCodecVideoEncoder ctor failed.";
ProcessHWError(true /* reset_if_fallback_unavailable */);
}
Random random(rtc::TimeMicros());
picture_id_ = random.Rand<uint16_t>() & 0x7FFF;
tl0_pic_idx_ = random.Rand<uint8_t>();
@ -603,12 +521,11 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width,
frames_received_since_last_key_ = kMinKeyFrameInterval;
// We enforce no extra stride/padding in the format creation step.
jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
jni, "MediaCodecVideoEncoder$VideoCodecType", codec_type);
const bool encode_status = jni->CallBooleanMethod(
*j_media_codec_video_encoder_, j_init_encode_method_, j_video_codec_enum,
profile_, width, height, kbps, fps,
(use_surface ? egl_context_ : nullptr));
jobject j_video_codec_enum =
Java_VideoCodecType_fromNativeIndex(jni, codec_type);
const bool encode_status = Java_MediaCodecVideoEncoder_initEncode(
jni, *j_media_codec_video_encoder_, j_video_codec_enum, profile_, width,
height, kbps, fps, (use_surface ? egl_context_ : nullptr));
if (!encode_status) {
ALOGE << "Failed to configure encoder.";
ProcessHWError(false /* reset_if_fallback_unavailable */);
@ -621,9 +538,8 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width,
}
if (!use_surface) {
jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
jni->CallObjectMethod(*j_media_codec_video_encoder_,
j_get_input_buffers_method_));
jobjectArray input_buffers = Java_MediaCodecVideoEncoder_getInputBuffers(
jni, *j_media_codec_video_encoder_);
if (CheckException(jni)) {
ALOGE << "Exception in get input buffers.";
ProcessHWError(false /* reset_if_fallback_unavailable */);
@ -635,8 +551,8 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width,
return WEBRTC_VIDEO_CODEC_ERROR;
}
switch (GetIntField(jni, *j_media_codec_video_encoder_,
j_color_format_field_)) {
switch (Java_MediaCodecVideoEncoder_getColorFormat(
jni, *j_media_codec_video_encoder_)) {
case COLOR_FormatYUV420Planar:
encoder_fourcc_ = libyuv::FOURCC_YU12;
break;
@ -650,15 +566,15 @@ int32_t MediaCodecVideoEncoder::InitEncodeInternal(int width,
ProcessHWError(false /* reset_if_fallback_unavailable */);
return WEBRTC_VIDEO_CODEC_ERROR;
}
size_t num_input_buffers = jni->GetArrayLength(input_buffers);
RTC_CHECK(input_buffers_.empty())
<< "Unexpected double InitEncode without Release";
input_buffers_.resize(num_input_buffers);
for (size_t i = 0; i < num_input_buffers; ++i) {
input_buffers_[i] =
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
int64_t yuv_buffer_capacity =
jni->GetDirectBufferCapacity(input_buffers_[i]);
input_buffers_ = JavaToNativeVector<ScopedGlobalRef<jobject>>(
jni, input_buffers, [](JNIEnv* env, jobject o) {
return ScopedGlobalRef<jobject>(env, o);
});
for (const ScopedGlobalRef<jobject>& buffer : input_buffers_) {
int64_t yuv_buffer_capacity = jni->GetDirectBufferCapacity(*buffer);
if (CheckException(jni)) {
ALOGE << "Exception in get direct buffer capacity.";
ProcessHWError(false /* reset_if_fallback_unavailable */);
@ -772,8 +688,8 @@ int32_t MediaCodecVideoEncoder::Encode(
int j_input_buffer_index = -1;
if (!use_surface_) {
j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
j_dequeue_input_buffer_method_);
j_input_buffer_index = Java_MediaCodecVideoEncoder_dequeueInputBuffer(
jni, *j_media_codec_video_encoder_);
if (CheckException(jni)) {
ALOGE << "Exception in dequeu input buffer.";
return ProcessHWErrorOnEncode();
@ -892,9 +808,9 @@ bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni,
case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer:
return true;
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
return jni->IsInstanceOf(static_cast<AndroidVideoBuffer*>(android_buffer)
->video_frame_buffer(),
*j_video_frame_texture_buffer_class_);
return Java_MediaCodecVideoEncoder_isTextureBuffer(
jni, static_cast<AndroidVideoBuffer*>(android_buffer)
->video_frame_buffer());
default:
RTC_NOTREACHED();
return false;
@ -916,9 +832,9 @@ bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni,
i420_buffer->StrideV())) {
return false;
}
bool encode_status = jni->CallBooleanMethod(
*j_media_codec_video_encoder_, j_encode_buffer_method_, key_frame,
input_buffer_index, yuv_size_, current_timestamp_us_);
bool encode_status = Java_MediaCodecVideoEncoder_encodeBuffer(
jni, *j_media_codec_video_encoder_, key_frame, input_buffer_index,
yuv_size_, current_timestamp_us_);
if (CheckException(jni)) {
ALOGE << "Exception in encode buffer.";
ProcessHWError(true /* reset_if_fallback_unavailable */);
@ -935,7 +851,7 @@ bool MediaCodecVideoEncoder::FillInputBuffer(JNIEnv* jni,
int stride_u,
uint8_t const* buffer_v,
int stride_v) {
jobject j_input_buffer = input_buffers_[input_buffer_index];
jobject j_input_buffer = *input_buffers_[input_buffer_index];
uint8_t* yuv_buffer =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
if (CheckException(jni)) {
@ -962,9 +878,9 @@ bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni,
->native_handle_impl();
jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni);
bool encode_status = jni->CallBooleanMethod(
*j_media_codec_video_encoder_, j_encode_texture_method_, key_frame,
handle.oes_texture_id, sampling_matrix, current_timestamp_us_);
bool encode_status = Java_MediaCodecVideoEncoder_encodeTexture(
jni, *j_media_codec_video_encoder_, key_frame, handle.oes_texture_id,
sampling_matrix, current_timestamp_us_);
if (CheckException(jni)) {
ALOGE << "Exception in encode texture.";
ProcessHWError(true /* reset_if_fallback_unavailable */);
@ -977,9 +893,9 @@ bool MediaCodecVideoEncoder::EncodeJavaFrame(JNIEnv* jni,
bool key_frame,
jobject frame,
int input_buffer_index) {
bool encode_status = jni->CallBooleanMethod(
*j_media_codec_video_encoder_, j_encode_frame_method_,
jlongFromPointer(this), key_frame, frame, input_buffer_index);
bool encode_status = Java_MediaCodecVideoEncoder_encodeFrame(
jni, *j_media_codec_video_encoder_, jlongFromPointer(this), key_frame,
frame, input_buffer_index);
if (CheckException(jni)) {
ALOGE << "Exception in encode frame.";
ProcessHWError(true /* reset_if_fallback_unavailable */);
@ -1009,10 +925,8 @@ int32_t MediaCodecVideoEncoder::Release() {
encode_task_.reset(nullptr);
weak_factory_.reset(nullptr);
ScopedLocalRefFrame local_ref_frame(jni);
for (size_t i = 0; i < input_buffers_.size(); ++i)
jni->DeleteGlobalRef(input_buffers_[i]);
input_buffers_.clear();
jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
Java_MediaCodecVideoEncoder_release(jni, *j_media_codec_video_encoder_);
if (CheckException(jni)) {
ALOGE << "Exception in release.";
ProcessHWError(false /* reset_if_fallback_unavailable */);
@ -1051,9 +965,9 @@ int32_t MediaCodecVideoEncoder::SetRateAllocation(
if (frame_rate > 0) {
last_set_fps_ = frame_rate;
}
bool ret =
jni->CallBooleanMethod(*j_media_codec_video_encoder_, j_set_rates_method_,
last_set_bitrate_kbps_, last_set_fps_);
bool ret = Java_MediaCodecVideoEncoder_setRates(
jni, *j_media_codec_video_encoder_, last_set_bitrate_kbps_,
last_set_fps_);
if (CheckException(jni) || !ret) {
ProcessHWError(true /* reset_if_fallback_unavailable */);
return sw_fallback_required_ ? WEBRTC_VIDEO_CODEC_OK
@ -1062,37 +976,13 @@ int32_t MediaCodecVideoEncoder::SetRateAllocation(
return WEBRTC_VIDEO_CODEC_OK;
}
int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
JNIEnv* jni,
jobject j_output_buffer_info) {
return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
}
jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
JNIEnv* jni,
jobject j_output_buffer_info) {
return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
}
bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
JNIEnv* jni,
jobject j_output_buffer_info) {
return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
}
jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
JNIEnv* jni,
jobject j_output_buffer_info) {
return GetLongField(
jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
}
bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_);
while (true) {
jobject j_output_buffer_info = jni->CallObjectMethod(
*j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
jobject j_output_buffer_info =
Java_MediaCodecVideoEncoder_dequeueOutputBuffer(
jni, *j_media_codec_video_encoder_);
if (CheckException(jni)) {
ALOGE << "Exception in set dequeue output buffer.";
ProcessHWError(true /* reset_if_fallback_unavailable */);
@ -1103,7 +993,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
}
int output_buffer_index =
GetOutputBufferInfoIndex(jni, j_output_buffer_info);
Java_OutputBufferInfo_getIndex(jni, j_output_buffer_info);
if (output_buffer_index == -1) {
ProcessHWError(true /* reset_if_fallback_unavailable */);
return false;
@ -1111,14 +1001,16 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
// Get key and config frame flags.
jobject j_output_buffer =
GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
Java_OutputBufferInfo_getBuffer(jni, j_output_buffer_info);
bool key_frame =
Java_OutputBufferInfo_isKeyFrame(jni, j_output_buffer_info);
// Get frame timestamps from a queue - for non config frames only.
int64_t encoding_start_time_ms = 0;
int64_t frame_encoding_time_ms = 0;
last_output_timestamp_ms_ =
GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
Java_OutputBufferInfo_getPresentationTimestampUs(jni,
j_output_buffer_info) /
rtc::kNumMicrosecsPerMillisec;
if (!input_frame_infos_.empty()) {
const InputFrameInfo& frame_info = input_frame_infos_.front();
@ -1247,9 +1139,8 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
}
// Return output buffer back to the encoder.
bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
j_release_output_buffer_method_,
output_buffer_index);
bool success = Java_MediaCodecVideoEncoder_releaseOutputBuffer(
jni, *j_media_codec_video_encoder_, output_buffer_index);
if (CheckException(jni) || !success) {
ProcessHWError(true /* reset_if_fallback_unavailable */);
return false;
@ -1348,23 +1239,15 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
: egl_context_(nullptr) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
supported_codecs_.clear();
bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
j_encoder_class,
GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
CHECK_EXCEPTION(jni);
bool is_vp8_hw_supported = Java_MediaCodecVideoEncoder_isVp8HwSupported(jni);
if (is_vp8_hw_supported) {
ALOGD << "VP8 HW Encoder supported.";
supported_codecs_.push_back(cricket::VideoCodec(cricket::kVp8CodecName));
}
bool is_vp9_hw_supported = jni->CallStaticBooleanMethod(
j_encoder_class,
GetStaticMethodID(jni, j_encoder_class, "isVp9HwSupported", "()Z"));
CHECK_EXCEPTION(jni);
bool is_vp9_hw_supported = Java_MediaCodecVideoEncoder_isVp9HwSupported(jni);
if (is_vp9_hw_supported) {
ALOGD << "VP9 HW Encoder supported.";
supported_codecs_.push_back(cricket::VideoCodec(cricket::kVp9CodecName));
@ -1373,11 +1256,8 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
// Check if high profile is supported by decoder. If yes, encoder can always
// fall back to baseline profile as a subset as high profile.
bool is_h264_high_profile_hw_supported = jni->CallStaticBooleanMethod(
j_decoder_class,
GetStaticMethodID(jni, j_decoder_class, "isH264HighProfileHwSupported",
"()Z"));
CHECK_EXCEPTION(jni);
bool is_h264_high_profile_hw_supported =
MediaCodecVideoDecoderFactory::IsH264HighProfileSupported(jni);
if (is_h264_high_profile_hw_supported) {
ALOGD << "H.264 High Profile HW Encoder supported.";
// TODO(magjed): Enumerate actual level instead of using hardcoded level
@ -1393,10 +1273,8 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
supported_codecs_with_h264_hp_.push_back(constrained_high);
}
bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
j_encoder_class,
GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
CHECK_EXCEPTION(jni);
bool is_h264_hw_supported =
Java_MediaCodecVideoEncoder_isH264HwSupported(jni);
if (is_h264_hw_supported) {
ALOGD << "H.264 HW Encoder supported.";
// TODO(magjed): Push Constrained High profile as well when negotiation is
@ -1468,7 +1346,7 @@ void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(VideoEncoder* encoder) {
}
JNI_FUNCTION_DECLARATION(void,
MediaCodecVideoEncoder_nativeFillBuffer,
MediaCodecVideoEncoder_fillNativeBuffer,
JNIEnv* jni,
jclass,
jlong native_encoder,

View File

@ -63,13 +63,6 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/EglBase14$Context");
LoadClass(jni, "org/webrtc/EncodedImage");
LoadClass(jni, "org/webrtc/EncodedImage$FrameType");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaSource$State");
LoadClass(jni, "org/webrtc/NetworkMonitor");
LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType");