diff --git a/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java b/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java index b3decbea9f..808b43d6df 100644 --- a/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java +++ b/webrtc/sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java @@ -11,6 +11,7 @@ package org.webrtc; import android.annotation.TargetApi; +import android.graphics.Matrix; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaCodecInfo.CodecCapabilities; @@ -598,6 +599,46 @@ public class MediaCodecVideoEncoder { } } + /** + * Encodes a new style VideoFrame. Called by JNI. |bufferIndex| is -1 if we are not encoding in + * surface mode. + */ + boolean encodeFrame(long nativeEncoder, boolean isKeyframe, VideoFrame frame, int bufferIndex) { + checkOnMediaCodecThread(); + try { + long presentationTimestampUs = TimeUnit.NANOSECONDS.toMicros(frame.getTimestampNs()); + checkKeyFrameRequired(isKeyframe, presentationTimestampUs); + + VideoFrame.Buffer buffer = frame.getBuffer(); + if (buffer instanceof VideoFrame.TextureBuffer) { + VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer; + eglBase.makeCurrent(); + // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway, + // but it's a workaround for bug webrtc:5147. + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + drawer.drawOes(textureBuffer.getTextureId(), + RendererCommon.convertMatrixFromAndroidGraphicsMatrix( + textureBuffer.getTransformMatrix()), + width, height, 0, 0, width, height); + eglBase.swapBuffers(frame.getTimestampNs()); + } else { + VideoFrame.I420Buffer i420Buffer = buffer.toI420(); + nativeFillBuffer(nativeEncoder, bufferIndex, i420Buffer.getDataY(), i420Buffer.getStrideY(), + i420Buffer.getDataU(), i420Buffer.getStrideU(), i420Buffer.getDataV(), + i420Buffer.getStrideV()); + i420Buffer.release(); + // I420 consists of one full-resolution and two half-resolution planes. + // 1 + 1 / 4 + 1 / 4 = 3 / 2 + int yuvSize = width * height * 3 / 2; + mediaCodec.queueInputBuffer(bufferIndex, 0, yuvSize, presentationTimestampUs, 0); + } + return true; + } catch (RuntimeException e) { + Logging.e(TAG, "encodeFrame failed", e); + return false; + } + } + void release() { Logging.d(TAG, "Java releaseEncoder"); checkOnMediaCodecThread(); @@ -881,4 +922,8 @@ public class MediaCodecVideoEncoder { return false; } } + + /** Fills an inputBuffer with the given index with data from the byte buffers. */ + private static native void nativeFillBuffer(long nativeEncoder, int inputBuffer, ByteBuffer dataY, + int strideY, ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV); } diff --git a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc index 8f192e4fb1..8181e3dfba 100644 --- a/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc +++ b/webrtc/sdk/android/src/jni/androidmediaencoder_jni.cc @@ -124,6 +124,16 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder { bool SupportsNativeHandle() const override { return egl_context_ != nullptr; } const char* ImplementationName() const override; + // Fills the input buffer with data from the buffers passed as parameters. + bool FillInputBuffer(JNIEnv* jni, + int input_buffer_index, + uint8_t const* buffer_y, + int stride_y, + uint8_t const* buffer_u, + int stride_u, + uint8_t const* buffer_v, + int stride_v); + private: class EncodeTask : public rtc::QueuedTask { public: @@ -160,7 +170,12 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder { // Reconfigure to match |frame| in width, height. Also reconfigures the // encoder if |frame| is a texture/byte buffer and the encoder is initialized // for byte buffer/texture. Returns false if reconfiguring fails. - bool MaybeReconfigureEncoder(const webrtc::VideoFrame& frame); + bool MaybeReconfigureEncoder(JNIEnv* jni, const webrtc::VideoFrame& frame); + + // Returns true if the frame is a texture frame and we should use surface + // based encoding. + bool IsTextureFrame(JNIEnv* jni, const webrtc::VideoFrame& frame); + bool EncodeByteBuffer(JNIEnv* jni, bool key_frame, const webrtc::VideoFrame& frame, @@ -168,6 +183,12 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder { bool EncodeTexture(JNIEnv* jni, bool key_frame, const webrtc::VideoFrame& frame); + // Encodes a new style org.webrtc.VideoFrame. Might be a I420 or a texture + // frame. + bool EncodeJavaFrame(JNIEnv* jni, + bool key_frame, + jobject frame, + int input_buffer_index); // Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members. int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info); @@ -210,6 +231,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder { jmethodID j_dequeue_input_buffer_method_; jmethodID j_encode_buffer_method_; jmethodID j_encode_texture_method_; + jmethodID j_encode_frame_method_; jmethodID j_release_method_; jmethodID j_set_rates_method_; jmethodID j_dequeue_output_buffer_method_; @@ -220,6 +242,9 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder { jfieldID j_info_is_key_frame_field_; jfieldID j_info_presentation_timestamp_us_field_; + const JavaVideoFrameFactory video_frame_factory_; + ScopedGlobalRef j_video_frame_texture_buffer_class_; + // State that is valid only between InitEncode() and the next Release(). int width_; // Frame width in pixels. int height_; // Frame height in pixels. @@ -329,6 +354,10 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, *j_media_codec_video_encoder_class_, "", "()V"))), + video_frame_factory_(jni), + j_video_frame_texture_buffer_class_( + jni, + FindClass(jni, "org/webrtc/VideoFrame$TextureBuffer")), inited_(false), use_surface_(false), egl_context_(egl_context), @@ -353,6 +382,9 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni, j_encode_texture_method_ = GetMethodID( jni, *j_media_codec_video_encoder_class_, "encodeTexture", "(ZI[FJ)Z"); + j_encode_frame_method_ = + GetMethodID(jni, *j_media_codec_video_encoder_class_, "encodeFrame", + "(JZLorg/webrtc/VideoFrame;I)Z"); j_release_method_ = GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V"); j_set_rates_method_ = GetMethodID( @@ -736,7 +768,7 @@ int32_t MediaCodecVideoEncoder::Encode( VideoFrame input_frame(input_buffer, frame.timestamp(), frame.render_time_ms(), frame.rotation()); - if (!MaybeReconfigureEncoder(input_frame)) { + if (!MaybeReconfigureEncoder(jni, input_frame)) { ALOGE << "Failed to reconfigure encoder."; return WEBRTC_VIDEO_CODEC_ERROR; } @@ -744,10 +776,11 @@ int32_t MediaCodecVideoEncoder::Encode( const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta || send_key_frame; bool encode_status = true; - if (input_frame.video_frame_buffer()->type() != - webrtc::VideoFrameBuffer::Type::kNative) { - int j_input_buffer_index = jni->CallIntMethod( - *j_media_codec_video_encoder_, j_dequeue_input_buffer_method_); + + int j_input_buffer_index = -1; + if (!use_surface_) { + j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_, + j_dequeue_input_buffer_method_); if (CheckException(jni)) { ALOGE << "Exception in dequeu input buffer."; return ProcessHWErrorOnEncode(); @@ -768,10 +801,29 @@ int32_t MediaCodecVideoEncoder::Encode( } else if (j_input_buffer_index == -2) { return ProcessHWErrorOnEncode(); } + } + + if (input_frame.video_frame_buffer()->type() != + webrtc::VideoFrameBuffer::Type::kNative) { encode_status = EncodeByteBuffer(jni, key_frame, input_frame, j_input_buffer_index); } else { - encode_status = EncodeTexture(jni, key_frame, input_frame); + AndroidVideoFrameBuffer* android_buffer = + static_cast( + input_frame.video_frame_buffer().get()); + switch (android_buffer->android_type()) { + case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: + encode_status = EncodeTexture(jni, key_frame, input_frame); + break; + case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: + encode_status = EncodeJavaFrame( + jni, key_frame, video_frame_factory_.ToJavaFrame(jni, input_frame), + j_input_buffer_index); + break; + default: + RTC_NOTREACHED(); + return WEBRTC_VIDEO_CODEC_ERROR; + } } if (!encode_status) { @@ -802,10 +854,12 @@ int32_t MediaCodecVideoEncoder::Encode( } bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( + JNIEnv* jni, const webrtc::VideoFrame& frame) { RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); - const bool reconfigure_due_to_format = frame.is_texture() != use_surface_; + bool is_texture = IsTextureFrame(jni, frame); + const bool reconfigure_due_to_format = is_texture != use_surface_; const bool reconfigure_due_to_size = frame.width() != width_ || frame.height() != height_; @@ -830,10 +884,32 @@ bool MediaCodecVideoEncoder::MaybeReconfigureEncoder( Release(); - return InitEncodeInternal(width_, height_, 0, 0, frame.is_texture()) == + return InitEncodeInternal(width_, height_, 0, 0, is_texture) == WEBRTC_VIDEO_CODEC_OK; } +bool MediaCodecVideoEncoder::IsTextureFrame(JNIEnv* jni, + const webrtc::VideoFrame& frame) { + if (frame.video_frame_buffer()->type() != + webrtc::VideoFrameBuffer::Type::kNative) { + return false; + } + + AndroidVideoFrameBuffer* android_buffer = + static_cast(frame.video_frame_buffer().get()); + switch (android_buffer->android_type()) { + case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer: + return true; + case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: + return jni->IsInstanceOf(static_cast(android_buffer) + ->video_frame_buffer(), + *j_video_frame_texture_buffer_class_); + default: + RTC_NOTREACHED(); + return false; + } +} + bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, bool key_frame, const webrtc::VideoFrame& frame, @@ -841,6 +917,33 @@ bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); RTC_CHECK(!use_surface_); + rtc::scoped_refptr i420_buffer = + frame.video_frame_buffer()->ToI420(); + if (!FillInputBuffer(jni, input_buffer_index, i420_buffer->DataY(), + i420_buffer->StrideY(), i420_buffer->DataU(), + i420_buffer->StrideU(), i420_buffer->DataV(), + i420_buffer->StrideV())) { + return false; + } + bool encode_status = jni->CallBooleanMethod( + *j_media_codec_video_encoder_, j_encode_buffer_method_, key_frame, + input_buffer_index, yuv_size_, current_timestamp_us_); + if (CheckException(jni)) { + ALOGE << "Exception in encode buffer."; + ProcessHWError(true /* reset_if_fallback_unavailable */); + return false; + } + return encode_status; +} + +bool MediaCodecVideoEncoder::FillInputBuffer(JNIEnv* jni, + int input_buffer_index, + uint8_t const* buffer_y, + int stride_y, + uint8_t const* buffer_u, + int stride_u, + uint8_t const* buffer_v, + int stride_v) { jobject j_input_buffer = input_buffers_[input_buffer_index]; uint8_t* yuv_buffer = reinterpret_cast(jni->GetDirectBufferAddress(j_input_buffer)); @@ -850,26 +953,12 @@ bool MediaCodecVideoEncoder::EncodeByteBuffer(JNIEnv* jni, return false; } RTC_CHECK(yuv_buffer) << "Indirect buffer??"; - rtc::scoped_refptr i420_buffer = - frame.video_frame_buffer()->ToI420(); - RTC_CHECK(!libyuv::ConvertFromI420( - i420_buffer->DataY(), i420_buffer->StrideY(), i420_buffer->DataU(), - i420_buffer->StrideU(), i420_buffer->DataV(), i420_buffer->StrideV(), - yuv_buffer, width_, width_, height_, encoder_fourcc_)) - << "ConvertFromI420 failed"; - bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_, - j_encode_buffer_method_, - key_frame, - input_buffer_index, - yuv_size_, - current_timestamp_us_); - if (CheckException(jni)) { - ALOGE << "Exception in encode buffer."; - ProcessHWError(true /* reset_if_fallback_unavailable */); - return false; - } - return encode_status; + RTC_CHECK(!libyuv::ConvertFromI420(buffer_y, stride_y, buffer_u, stride_u, + buffer_v, stride_v, yuv_buffer, width_, + width_, height_, encoder_fourcc_)) + << "ConvertFromI420 failed"; + return true; } bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, @@ -893,6 +982,21 @@ bool MediaCodecVideoEncoder::EncodeTexture(JNIEnv* jni, return encode_status; } +bool MediaCodecVideoEncoder::EncodeJavaFrame(JNIEnv* jni, + bool key_frame, + jobject frame, + int input_buffer_index) { + bool encode_status = jni->CallBooleanMethod( + *j_media_codec_video_encoder_, j_encode_frame_method_, + jlongFromPointer(this), key_frame, frame, input_buffer_index); + if (CheckException(jni)) { + ALOGE << "Exception in encode frame."; + ProcessHWError(true /* reset_if_fallback_unavailable */); + return false; + } + return encode_status; +} + int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback( webrtc::EncodedImageCallback* callback) { RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_checker_); @@ -1373,4 +1477,26 @@ void MediaCodecVideoEncoderFactory::DestroyVideoEncoder( delete encoder; } +JOW(void, MediaCodecVideoEncoder_nativeFillBuffer) +(JNIEnv* jni, + jlong native_encoder, + jint input_buffer, + jobject j_buffer_y, + jint stride_y, + jobject j_buffer_u, + jint stride_u, + jobject j_buffer_v, + jint stride_v) { + uint8_t* buffer_y = + static_cast(jni->GetDirectBufferAddress(j_buffer_y)); + uint8_t* buffer_u = + static_cast(jni->GetDirectBufferAddress(j_buffer_u)); + uint8_t* buffer_v = + static_cast(jni->GetDirectBufferAddress(j_buffer_v)); + + reinterpret_cast(native_encoder) + ->FillInputBuffer(jni, input_buffer, buffer_y, stride_y, buffer_u, + stride_u, buffer_v, stride_v); +} + } // namespace webrtc_jni diff --git a/webrtc/sdk/android/src/jni/classreferenceholder.cc b/webrtc/sdk/android/src/jni/classreferenceholder.cc index 1f453881b5..dd5cfe74cf 100644 --- a/webrtc/sdk/android/src/jni/classreferenceholder.cc +++ b/webrtc/sdk/android/src/jni/classreferenceholder.cc @@ -111,6 +111,7 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) { LoadClass(jni, "org/webrtc/VideoFrame"); LoadClass(jni, "org/webrtc/VideoFrame$Buffer"); LoadClass(jni, "org/webrtc/VideoFrame$I420Buffer"); + LoadClass(jni, "org/webrtc/VideoFrame$TextureBuffer"); LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame"); LoadClass(jni, "org/webrtc/VideoTrack"); LoadClass(jni, "org/webrtc/WrappedNativeI420Buffer"); diff --git a/webrtc/sdk/android/src/jni/native_handle_impl.cc b/webrtc/sdk/android/src/jni/native_handle_impl.cc index f0070b9ee9..eebfba4035 100644 --- a/webrtc/sdk/android/src/jni/native_handle_impl.cc +++ b/webrtc/sdk/android/src/jni/native_handle_impl.cc @@ -375,8 +375,7 @@ rtc::scoped_refptr AndroidVideoBuffer::ToI420() { height_, j_i420_buffer); } -jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, - int rotation) { +jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, int rotation) { jclass j_byte_buffer_class = jni->FindClass("java/nio/ByteBuffer"); jclass j_i420_frame_class = FindClass(jni, "org/webrtc/VideoRenderer$I420Frame"); @@ -438,4 +437,29 @@ rtc::scoped_refptr AndroidVideoBufferFactory::CreateBuffer( jni, j_retain_id_, j_release_id_, width, height, j_video_frame_buffer); } +JavaVideoFrameFactory::JavaVideoFrameFactory(JNIEnv* jni) + : j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")) { + j_video_frame_constructor_id_ = + GetMethodID(jni, *j_video_frame_class_, "", + "(Lorg/webrtc/VideoFrame$Buffer;IJ)V"); +} + +jobject JavaVideoFrameFactory::ToJavaFrame( + JNIEnv* jni, + const webrtc::VideoFrame& frame) const { + RTC_DCHECK(frame.video_frame_buffer()->type() == + webrtc::VideoFrameBuffer::Type::kNative); + AndroidVideoFrameBuffer* android_buffer = + static_cast(frame.video_frame_buffer().get()); + RTC_DCHECK(android_buffer->android_type() == + AndroidVideoFrameBuffer::AndroidType::kJavaBuffer); + AndroidVideoBuffer* android_video_buffer = + static_cast(android_buffer); + jobject buffer = android_video_buffer->video_frame_buffer(); + return jni->NewObject( + *j_video_frame_class_, j_video_frame_constructor_id_, buffer, + static_cast(frame.rotation()), + static_cast(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec)); +} + } // namespace webrtc_jni diff --git a/webrtc/sdk/android/src/jni/native_handle_impl.h b/webrtc/sdk/android/src/jni/native_handle_impl.h index 65da9f7f0e..834441a7c6 100644 --- a/webrtc/sdk/android/src/jni/native_handle_impl.h +++ b/webrtc/sdk/android/src/jni/native_handle_impl.h @@ -156,6 +156,17 @@ class AndroidVideoBufferFactory { jmethodID j_get_height_id_; }; +class JavaVideoFrameFactory { + public: + JavaVideoFrameFactory(JNIEnv* jni); + + jobject ToJavaFrame(JNIEnv* jni, const webrtc::VideoFrame& frame) const; + + private: + ScopedGlobalRef j_video_frame_class_; + jmethodID j_video_frame_constructor_id_; +}; + } // namespace webrtc_jni #endif // WEBRTC_SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_