Android: Annotate and generate JNI code for VideoFrame.java

This CL also merged native_handle_impl.cc and videoframe_jni.cc to keep
all JNI code for the same Java class in the same file, and also renames
this file to jni/videoframe.cc.

The classes AndroidVideoBufferFactory and JavaVideoFrameFactory are
now unnecessary since we cache everything and can be simplified to
global static functions instead.

Bug: webrtc:8278
Change-Id: I03d7b0bbde64cfb407cd6210478ddf9d5599cd8c
Reviewed-on: https://webrtc-review.googlesource.com/22923
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20677}
This commit is contained in:
Magnus Jedvert
2017-11-14 17:08:59 +01:00
committed by Commit Bot
parent 651707bdf0
commit c2ac3c663f
17 changed files with 170 additions and 349 deletions

View File

@ -106,6 +106,7 @@ generate_jni("generated_video_jni") {
"api/org/webrtc/SurfaceTextureHelper.java", "api/org/webrtc/SurfaceTextureHelper.java",
"api/org/webrtc/VideoCodecStatus.java", "api/org/webrtc/VideoCodecStatus.java",
"api/org/webrtc/VideoEncoder.java", "api/org/webrtc/VideoEncoder.java",
"api/org/webrtc/VideoFrame.java",
"api/org/webrtc/VideoSink.java", "api/org/webrtc/VideoSink.java",
"src/java/org/webrtc/VideoEncoderWrapper.java", "src/java/org/webrtc/VideoEncoderWrapper.java",
"src/java/org/webrtc/WrappedNativeVideoDecoder.java", "src/java/org/webrtc/WrappedNativeVideoDecoder.java",
@ -127,8 +128,6 @@ rtc_static_library("video_jni") {
"src/jni/androidvideotracksource_jni.cc", "src/jni/androidvideotracksource_jni.cc",
"src/jni/defaultvideoencoderfactory.cc", "src/jni/defaultvideoencoderfactory.cc",
"src/jni/jni_generator_helper.h", "src/jni/jni_generator_helper.h",
"src/jni/native_handle_impl.cc",
"src/jni/native_handle_impl.h",
"src/jni/nv12buffer_jni.cc", "src/jni/nv12buffer_jni.cc",
"src/jni/nv21buffer_jni.cc", "src/jni/nv21buffer_jni.cc",
"src/jni/pc/video_jni.cc", "src/jni/pc/video_jni.cc",
@ -147,7 +146,8 @@ rtc_static_library("video_jni") {
"src/jni/videoencoderwrapper.cc", "src/jni/videoencoderwrapper.cc",
"src/jni/videoencoderwrapper.h", "src/jni/videoencoderwrapper.h",
"src/jni/videofilerenderer_jni.cc", "src/jni/videofilerenderer_jni.cc",
"src/jni/videoframe_jni.cc", "src/jni/videoframe.cc",
"src/jni/videoframe.h",
"src/jni/videotrack_jni.cc", "src/jni/videotrack_jni.cc",
"src/jni/vp8codec.cc", "src/jni/vp8codec.cc",
"src/jni/vp9codec.cc", "src/jni/vp9codec.cc",

View File

@ -30,22 +30,22 @@ public class VideoFrame {
/** /**
* Resolution of the buffer in pixels. * Resolution of the buffer in pixels.
*/ */
int getWidth(); @CalledByNative("Buffer") int getWidth();
int getHeight(); @CalledByNative("Buffer") int getHeight();
/** /**
* Returns a memory-backed frame in I420 format. If the pixel data is in another format, a * Returns a memory-backed frame in I420 format. If the pixel data is in another format, a
* conversion will take place. All implementations must provide a fallback to I420 for * conversion will take place. All implementations must provide a fallback to I420 for
* compatibility with e.g. the internal WebRTC software encoders. * compatibility with e.g. the internal WebRTC software encoders.
*/ */
I420Buffer toI420(); @CalledByNative("Buffer") I420Buffer toI420();
/** /**
* Reference counting is needed since a video buffer can be shared between multiple VideoSinks, * Reference counting is needed since a video buffer can be shared between multiple VideoSinks,
* and the buffer needs to be returned to the VideoSource as soon as all references are gone. * and the buffer needs to be returned to the VideoSource as soon as all references are gone.
*/ */
void retain(); @CalledByNative("Buffer") void retain();
void release(); @CalledByNative("Buffer") void release();
/** /**
* Crops a region defined by |cropx|, |cropY|, |cropWidth| and |cropHeight|. Scales it to size * Crops a region defined by |cropx|, |cropY|, |cropWidth| and |cropHeight|. Scales it to size
@ -65,25 +65,25 @@ public class VideoFrame {
* be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so * be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
* implementations must return a new ByteBuffer or slice for each call. * implementations must return a new ByteBuffer or slice for each call.
*/ */
ByteBuffer getDataY(); @CalledByNative("I420Buffer") ByteBuffer getDataY();
/** /**
* Returns a direct ByteBuffer containing U-plane data. The buffer capacity is at least * Returns a direct ByteBuffer containing U-plane data. The buffer capacity is at least
* getStrideU() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored * getStrideU() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
* and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so * and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
* implementations must return a new ByteBuffer or slice for each call. * implementations must return a new ByteBuffer or slice for each call.
*/ */
ByteBuffer getDataU(); @CalledByNative("I420Buffer") ByteBuffer getDataU();
/** /**
* Returns a direct ByteBuffer containing V-plane data. The buffer capacity is at least * Returns a direct ByteBuffer containing V-plane data. The buffer capacity is at least
* getStrideV() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored * getStrideV() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
* and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so * and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
* implementations must return a new ByteBuffer or slice for each call. * implementations must return a new ByteBuffer or slice for each call.
*/ */
ByteBuffer getDataV(); @CalledByNative("I420Buffer") ByteBuffer getDataV();
int getStrideY(); @CalledByNative("I420Buffer") int getStrideY();
int getStrideU(); @CalledByNative("I420Buffer") int getStrideU();
int getStrideV(); @CalledByNative("I420Buffer") int getStrideV();
} }
/** /**
@ -132,6 +132,7 @@ public class VideoFrame {
this.timestampNs = timestampNs; this.timestampNs = timestampNs;
} }
@CalledByNative
public Buffer getBuffer() { public Buffer getBuffer() {
return buffer; return buffer;
} }
@ -139,6 +140,7 @@ public class VideoFrame {
/** /**
* Rotation of the frame in degrees. * Rotation of the frame in degrees.
*/ */
@CalledByNative
public int getRotation() { public int getRotation() {
return rotation; return rotation;
} }
@ -146,6 +148,7 @@ public class VideoFrame {
/** /**
* Timestamp of the frame in nano seconds. * Timestamp of the frame in nano seconds.
*/ */
@CalledByNative
public long getTimestampNs() { public long getTimestampNs() {
return timestampNs; return timestampNs;
} }
@ -194,7 +197,7 @@ public class VideoFrame {
} }
JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight); JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(), cropAndScaleI420Native(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth, buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(), cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth, newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
@ -202,7 +205,13 @@ public class VideoFrame {
return newBuffer; return newBuffer;
} }
private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY, // TODO(bugs.webrtc.org/8278): Add a way to generate JNI code for constructors directly.
@CalledByNative
static VideoFrame create(Buffer buffer, int rotation, long timestampNs) {
return new VideoFrame(buffer, rotation, timestampNs);
}
private static native void cropAndScaleI420Native(ByteBuffer srcY, int srcStrideY,
ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY, ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight); int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);

View File

@ -21,4 +21,9 @@ import java.lang.annotation.Target;
*/ */
@Target(ElementType.METHOD) @Target(ElementType.METHOD)
@Retention(RetentionPolicy.CLASS) @Retention(RetentionPolicy.CLASS)
@interface CalledByNative {} @interface CalledByNative {
/*
* If present, tells which inner class the method belongs to.
*/
public String value() default "";
}

View File

@ -17,9 +17,6 @@
// androidmediacodeccommon.h to avoid build errors. // androidmediacodeccommon.h to avoid build errors.
#include "sdk/android/src/jni/androidmediadecoder_jni.h" #include "sdk/android/src/jni/androidmediadecoder_jni.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
#include "common_video/h264/h264_bitstream_parser.h" #include "common_video/h264/h264_bitstream_parser.h"
#include "common_video/include/i420_buffer_pool.h" #include "common_video/include/i420_buffer_pool.h"
#include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_codec_interface.h"
@ -32,8 +29,11 @@
#include "rtc_base/timeutils.h" #include "rtc_base/timeutils.h"
#include "sdk/android/src/jni/androidmediacodeccommon.h" #include "sdk/android/src/jni/androidmediacodeccommon.h"
#include "sdk/android/src/jni/classreferenceholder.h" #include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/surfacetexturehelper_jni.h" #include "sdk/android/src/jni/surfacetexturehelper_jni.h"
#include "sdk/android/src/jni/videoframe.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
using rtc::Bind; using rtc::Bind;
using rtc::Thread; using rtc::Thread;

View File

@ -18,9 +18,6 @@
#include <string> #include <string>
#include <utility> #include <utility>
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
#include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder.h"
#include "common_types.h" // NOLINT(build/include) #include "common_types.h" // NOLINT(build/include)
#include "common_video/h264/h264_bitstream_parser.h" #include "common_video/h264/h264_bitstream_parser.h"
@ -43,8 +40,11 @@
#include "sdk/android/src/jni/androidmediacodeccommon.h" #include "sdk/android/src/jni/androidmediacodeccommon.h"
#include "sdk/android/src/jni/classreferenceholder.h" #include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h" #include "sdk/android/src/jni/videoframe.h"
#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/field_trial.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
using rtc::Bind; using rtc::Bind;
using rtc::Thread; using rtc::Thread;
@ -230,7 +230,6 @@ class MediaCodecVideoEncoder : public VideoEncoder {
jfieldID j_info_is_key_frame_field_; jfieldID j_info_is_key_frame_field_;
jfieldID j_info_presentation_timestamp_us_field_; jfieldID j_info_presentation_timestamp_us_field_;
const JavaVideoFrameFactory video_frame_factory_;
ScopedGlobalRef<jclass> j_video_frame_texture_buffer_class_; ScopedGlobalRef<jclass> j_video_frame_texture_buffer_class_;
// State that is valid only between InitEncode() and the next Release(). // State that is valid only between InitEncode() and the next Release().
@ -342,7 +341,6 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni,
*j_media_codec_video_encoder_class_, *j_media_codec_video_encoder_class_,
"<init>", "<init>",
"()V"))), "()V"))),
video_frame_factory_(jni),
j_video_frame_texture_buffer_class_( j_video_frame_texture_buffer_class_(
jni, jni,
FindClass(jni, "org/webrtc/VideoFrame$TextureBuffer")), FindClass(jni, "org/webrtc/VideoFrame$TextureBuffer")),
@ -801,9 +799,9 @@ int32_t MediaCodecVideoEncoder::Encode(
encode_status = EncodeTexture(jni, key_frame, input_frame); encode_status = EncodeTexture(jni, key_frame, input_frame);
break; break;
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer: case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
encode_status = EncodeJavaFrame( encode_status =
jni, key_frame, video_frame_factory_.ToJavaFrame(jni, input_frame), EncodeJavaFrame(jni, key_frame, NativeToJavaFrame(jni, input_frame),
j_input_buffer_index); j_input_buffer_index);
break; break;
default: default:
RTC_NOTREACHED(); RTC_NOTREACHED();

View File

@ -33,7 +33,6 @@ AndroidVideoTrackSource::AndroidVideoTrackSource(
surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>( surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
jni, jni,
j_surface_texture_helper)), j_surface_texture_helper)),
video_buffer_factory_(jni),
is_screencast_(is_screencast) { is_screencast_(is_screencast) {
RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor"; RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
camera_thread_checker_.DetachFromThread(); camera_thread_checker_.DetachFromThread();
@ -189,7 +188,7 @@ void AndroidVideoTrackSource::OnFrameCaptured(JNIEnv* jni,
crop_height, adapted_width, adapted_height); crop_height, adapted_width, adapted_height);
rtc::scoped_refptr<VideoFrameBuffer> buffer = rtc::scoped_refptr<VideoFrameBuffer> buffer =
video_buffer_factory_.WrapBuffer(jni, j_adapted_video_frame_buffer); AndroidVideoBuffer::Adopt(jni, j_adapted_video_frame_buffer);
// AdaptedVideoTrackSource handles applying rotation for I420 frames. // AdaptedVideoTrackSource handles applying rotation for I420 frames.
if (apply_rotation() && rotation != kVideoRotation_0) { if (apply_rotation() && rotation != kVideoRotation_0) {

View File

@ -20,8 +20,8 @@
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
#include "rtc_base/thread_checker.h" #include "rtc_base/thread_checker.h"
#include "rtc_base/timestampaligner.h" #include "rtc_base/timestampaligner.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/surfacetexturehelper_jni.h" #include "sdk/android/src/jni/surfacetexturehelper_jni.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc { namespace webrtc {
namespace jni { namespace jni {
@ -84,7 +84,6 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
NV12ToI420Scaler nv12toi420_scaler_; NV12ToI420Scaler nv12toi420_scaler_;
I420BufferPool buffer_pool_; I420BufferPool buffer_pool_;
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_; rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
AndroidVideoBufferFactory video_buffer_factory_;
const bool is_screencast_; const bool is_screencast_;
jmethodID j_crop_and_scale_id_; jmethodID j_crop_and_scale_id_;

View File

@ -17,7 +17,7 @@
#include "rtc_base/refcount.h" #include "rtc_base/refcount.h"
#include "rtc_base/scoped_ref_ptr.h" #include "rtc_base/scoped_ref_ptr.h"
#include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h" #include "sdk/android/src/jni/videoframe.h"
namespace webrtc { namespace webrtc {
namespace jni { namespace jni {

View File

@ -14,7 +14,7 @@
#include "media/base/videosinkinterface.h" #include "media/base/videosinkinterface.h"
#include "sdk/android/src/jni/classreferenceholder.h" #include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h" #include "sdk/android/src/jni/videoframe.h"
namespace webrtc { namespace webrtc {
namespace jni { namespace jni {

View File

@ -22,8 +22,7 @@ namespace webrtc {
namespace jni { namespace jni {
VideoDecoderWrapper::VideoDecoderWrapper(JNIEnv* jni, jobject decoder) VideoDecoderWrapper::VideoDecoderWrapper(JNIEnv* jni, jobject decoder)
: android_video_buffer_factory_(jni), : decoder_(jni, decoder),
decoder_(jni, decoder),
encoded_image_class_(jni, FindClass(jni, "org/webrtc/EncodedImage")), encoded_image_class_(jni, FindClass(jni, "org/webrtc/EncodedImage")),
frame_type_class_(jni, frame_type_class_(jni,
FindClass(jni, "org/webrtc/EncodedImage$FrameType")), FindClass(jni, "org/webrtc/EncodedImage$FrameType")),
@ -187,8 +186,8 @@ void VideoDecoderWrapper::OnDecodedFrame(JNIEnv* jni,
// find a matching timestamp. // find a matching timestamp.
} while (frame_extra_info.capture_time_ns != capture_time_ns); } while (frame_extra_info.capture_time_ns != capture_time_ns);
VideoFrame frame = android_video_buffer_factory_.CreateFrame( VideoFrame frame =
jni, jframe, frame_extra_info.timestamp_rtp); JavaToNativeFrame(jni, jframe, frame_extra_info.timestamp_rtp);
rtc::Optional<int32_t> decoding_time_ms; rtc::Optional<int32_t> decoding_time_ms;
if (jdecode_time_ms != nullptr) { if (jdecode_time_ms != nullptr) {

View File

@ -17,7 +17,7 @@
#include "api/video_codecs/video_decoder.h" #include "api/video_codecs/video_decoder.h"
#include "common_video/h264/h264_bitstream_parser.h" #include "common_video/h264/h264_bitstream_parser.h"
#include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h" #include "sdk/android/src/jni/videoframe.h"
namespace webrtc { namespace webrtc {
namespace jni { namespace jni {
@ -78,7 +78,6 @@ class VideoDecoderWrapper : public VideoDecoder {
int32_t number_of_cores_; int32_t number_of_cores_;
bool initialized_; bool initialized_;
AndroidVideoBufferFactory android_video_buffer_factory_;
std::deque<FrameExtraInfo> frame_extra_infos_; std::deque<FrameExtraInfo> frame_extra_infos_;
bool qp_parsing_enabled_; bool qp_parsing_enabled_;
H264BitstreamParser h264_bitstream_parser_; H264BitstreamParser h264_bitstream_parser_;

View File

@ -35,8 +35,7 @@ VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni, jobject j_encoder)
: encoder_(jni, j_encoder), : encoder_(jni, j_encoder),
frame_type_class_(jni, frame_type_class_(jni,
GetClass(jni, "org/webrtc/EncodedImage$FrameType")), GetClass(jni, "org/webrtc/EncodedImage$FrameType")),
int_array_class_(jni, jni->FindClass("[I")), int_array_class_(jni, jni->FindClass("[I")) {
video_frame_factory_(jni) {
implementation_name_ = GetImplementationName(jni); implementation_name_ = GetImplementationName(jni);
initialized_ = false; initialized_ = false;
@ -137,8 +136,7 @@ int32_t VideoEncoderWrapper::Encode(
frame_extra_infos_.push_back(info); frame_extra_infos_.push_back(info);
jobject ret = Java_VideoEncoder_encode( jobject ret = Java_VideoEncoder_encode(
jni, *encoder_, video_frame_factory_.ToJavaFrame(jni, frame), jni, *encoder_, NativeToJavaFrame(jni, frame), encode_info);
encode_info);
return HandleReturnCode(jni, ret); return HandleReturnCode(jni, ret);
} }

View File

@ -21,7 +21,7 @@
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
#include "rtc_base/task_queue.h" #include "rtc_base/task_queue.h"
#include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h" #include "sdk/android/src/jni/videoframe.h"
namespace webrtc { namespace webrtc {
namespace jni { namespace jni {
@ -95,7 +95,6 @@ class VideoEncoderWrapper : public VideoEncoder {
std::string implementation_name_; std::string implementation_name_;
rtc::TaskQueue* encoder_queue_; rtc::TaskQueue* encoder_queue_;
JavaVideoFrameFactory video_frame_factory_;
std::deque<FrameExtraInfo> frame_extra_infos_; std::deque<FrameExtraInfo> frame_extra_infos_;
EncodedImageCallback* callback_; EncodedImageCallback* callback_;
bool initialized_; bool initialized_;

View File

@ -8,17 +8,19 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "sdk/android/src/jni/native_handle_impl.h" #include "sdk/android/src/jni/videoframe.h"
#include <memory> #include <memory>
#include "common_video/include/video_frame_buffer.h" #include "common_video/include/video_frame_buffer.h"
#include "libyuv/scale.h"
#include "rtc_base/bind.h" #include "rtc_base/bind.h"
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
#include "rtc_base/keep_ref_until_done.h" #include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/logging.h" #include "rtc_base/logging.h"
#include "rtc_base/scoped_ref_ptr.h" #include "rtc_base/scoped_ref_ptr.h"
#include "rtc_base/timeutils.h" #include "rtc_base/timeutils.h"
#include "sdk/android/generated_video_jni/jni/VideoFrame_jni.h"
#include "sdk/android/src/jni/classreferenceholder.h" #include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/wrapped_native_i420_buffer.h" #include "sdk/android/src/jni/wrapped_native_i420_buffer.h"
@ -31,26 +33,16 @@ namespace {
class AndroidVideoI420Buffer : public I420BufferInterface { class AndroidVideoI420Buffer : public I420BufferInterface {
public: public:
// Wraps an existing reference to a Java VideoBuffer. Retain will not be // Adopts and takes ownership of the Java VideoFrame.Buffer. I.e. retain()
// called but release will be called when the C++ object is destroyed. // will not be called, but release() will be called when the returned
static rtc::scoped_refptr<AndroidVideoI420Buffer> WrapReference( // AndroidVideoBuffer is destroyed.
JNIEnv* jni, static rtc::scoped_refptr<AndroidVideoI420Buffer>
jmethodID j_release_id, Adopt(JNIEnv* jni, int width, int height, jobject j_video_frame_buffer);
int width,
int height,
jobject j_video_frame_buffer);
protected: protected:
// Should not be called directly. Adopts the buffer. Use Adopt() instead for
// clarity.
AndroidVideoI420Buffer(JNIEnv* jni, AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
// Should not be called directly. Wraps a reference. Use
// AndroidVideoI420Buffer::WrapReference instead for clarity.
AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_release_id,
int width, int width,
int height, int height,
jobject j_video_frame_buffer); jobject j_video_frame_buffer);
@ -68,7 +60,6 @@ class AndroidVideoI420Buffer : public I420BufferInterface {
int width() const override { return width_; } int width() const override { return width_; }
int height() const override { return height_; } int height() const override { return height_; }
const jmethodID j_release_id_;
const int width_; const int width_;
const int height_; const int height_;
// Holds a VideoFrame.I420Buffer. // Holds a VideoFrame.I420Buffer.
@ -82,73 +73,38 @@ class AndroidVideoI420Buffer : public I420BufferInterface {
int stride_v_; int stride_v_;
}; };
rtc::scoped_refptr<AndroidVideoI420Buffer> rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Adopt(
AndroidVideoI420Buffer::WrapReference(JNIEnv* jni, JNIEnv* jni,
jmethodID j_release_id, int width,
int width, int height,
int height, jobject j_video_frame_buffer) {
jobject j_video_frame_buffer) {
return new rtc::RefCountedObject<AndroidVideoI420Buffer>( return new rtc::RefCountedObject<AndroidVideoI420Buffer>(
jni, j_release_id, width, height, j_video_frame_buffer); jni, width, height, j_video_frame_buffer);
} }
AndroidVideoI420Buffer::AndroidVideoI420Buffer(JNIEnv* jni, AndroidVideoI420Buffer::AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width, int width,
int height, int height,
jobject j_video_frame_buffer) jobject j_video_frame_buffer)
: AndroidVideoI420Buffer(jni, : width_(width),
j_release_id,
width,
height,
j_video_frame_buffer) {
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
}
AndroidVideoI420Buffer::AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: j_release_id_(j_release_id),
width_(width),
height_(height), height_(height),
j_video_frame_buffer_(jni, j_video_frame_buffer) { j_video_frame_buffer_(jni, j_video_frame_buffer) {
jclass j_video_frame_i420_buffer_class = jobject j_data_y = Java_I420Buffer_getDataY(jni, j_video_frame_buffer);
FindClass(jni, "org/webrtc/VideoFrame$I420Buffer"); jobject j_data_u = Java_I420Buffer_getDataU(jni, j_video_frame_buffer);
jmethodID j_get_data_y_id = jni->GetMethodID( jobject j_data_v = Java_I420Buffer_getDataV(jni, j_video_frame_buffer);
j_video_frame_i420_buffer_class, "getDataY", "()Ljava/nio/ByteBuffer;");
jmethodID j_get_data_u_id = jni->GetMethodID(
j_video_frame_i420_buffer_class, "getDataU", "()Ljava/nio/ByteBuffer;");
jmethodID j_get_data_v_id = jni->GetMethodID(
j_video_frame_i420_buffer_class, "getDataV", "()Ljava/nio/ByteBuffer;");
jmethodID j_get_stride_y_id =
jni->GetMethodID(j_video_frame_i420_buffer_class, "getStrideY", "()I");
jmethodID j_get_stride_u_id =
jni->GetMethodID(j_video_frame_i420_buffer_class, "getStrideU", "()I");
jmethodID j_get_stride_v_id =
jni->GetMethodID(j_video_frame_i420_buffer_class, "getStrideV", "()I");
jobject j_data_y =
jni->CallObjectMethod(j_video_frame_buffer, j_get_data_y_id);
jobject j_data_u =
jni->CallObjectMethod(j_video_frame_buffer, j_get_data_u_id);
jobject j_data_v =
jni->CallObjectMethod(j_video_frame_buffer, j_get_data_v_id);
data_y_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_y)); data_y_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_y));
data_u_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_u)); data_u_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_u));
data_v_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_v)); data_v_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_v));
stride_y_ = jni->CallIntMethod(j_video_frame_buffer, j_get_stride_y_id); stride_y_ = Java_I420Buffer_getStrideY(jni, j_video_frame_buffer);
stride_u_ = jni->CallIntMethod(j_video_frame_buffer, j_get_stride_u_id); stride_u_ = Java_I420Buffer_getStrideU(jni, j_video_frame_buffer);
stride_v_ = jni->CallIntMethod(j_video_frame_buffer, j_get_stride_v_id); stride_v_ = Java_I420Buffer_getStrideV(jni, j_video_frame_buffer);
} }
AndroidVideoI420Buffer::~AndroidVideoI420Buffer() { AndroidVideoI420Buffer::~AndroidVideoI420Buffer() {
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->CallVoidMethod(*j_video_frame_buffer_, j_release_id_); Java_Buffer_release(jni, *j_video_frame_buffer_);
} }
} // namespace } // namespace
@ -297,7 +253,7 @@ rtc::scoped_refptr<I420BufferInterface> AndroidTextureBuffer::ToI420() {
// See YuvConverter.java for the required layout. // See YuvConverter.java for the required layout.
uint8_t* y_data = yuv_data.get(); uint8_t* y_data = yuv_data.get();
uint8_t* u_data = y_data + height() * stride; uint8_t* u_data = y_data + height() * stride;
uint8_t* v_data = u_data + stride/2; uint8_t* v_data = u_data + stride / 2;
rtc::scoped_refptr<I420BufferInterface> copy = webrtc::WrapI420Buffer( rtc::scoped_refptr<I420BufferInterface> copy = webrtc::WrapI420Buffer(
width(), height(), y_data, stride, u_data, stride, v_data, stride, width(), height(), y_data, stride, u_data, stride, v_data, stride,
@ -326,43 +282,29 @@ rtc::scoped_refptr<I420BufferInterface> AndroidTextureBuffer::ToI420() {
return copy; return copy;
} }
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::WrapReference( rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Adopt(
JNIEnv* jni, JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer) { jobject j_video_frame_buffer) {
return new rtc::RefCountedObject<AndroidVideoBuffer>( return new rtc::RefCountedObject<AndroidVideoBuffer>(jni,
jni, j_release_id, width, height, j_video_frame_buffer); j_video_frame_buffer);
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Create(
JNIEnv* jni,
jobject j_video_frame_buffer) {
Java_Buffer_retain(jni, j_video_frame_buffer);
return Adopt(jni, j_video_frame_buffer);
} }
AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni, AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer) jobject j_video_frame_buffer)
: AndroidVideoBuffer(jni, : width_(Java_Buffer_getWidth(jni, j_video_frame_buffer)),
j_release_id, height_(Java_Buffer_getHeight(jni, j_video_frame_buffer)),
width,
height,
j_video_frame_buffer) {
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
}
AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: j_release_id_(j_release_id),
width_(width),
height_(height),
j_video_frame_buffer_(jni, j_video_frame_buffer) {} j_video_frame_buffer_(jni, j_video_frame_buffer) {}
AndroidVideoBuffer::~AndroidVideoBuffer() { AndroidVideoBuffer::~AndroidVideoBuffer() {
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->CallVoidMethod(*j_video_frame_buffer_, j_release_id_); Java_Buffer_release(jni, *j_video_frame_buffer_);
} }
jobject AndroidVideoBuffer::video_frame_buffer() const { jobject AndroidVideoBuffer::video_frame_buffer() const {
@ -384,20 +326,11 @@ int AndroidVideoBuffer::height() const {
rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() { rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() {
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
jobject j_i420_buffer = Java_Buffer_toI420(jni, *j_video_frame_buffer_);
jclass j_video_frame_buffer_class =
FindClass(jni, "org/webrtc/VideoFrame$Buffer");
jmethodID j_to_i420_id =
jni->GetMethodID(j_video_frame_buffer_class, "toI420",
"()Lorg/webrtc/VideoFrame$I420Buffer;");
jobject j_i420_buffer =
jni->CallObjectMethod(*j_video_frame_buffer_, j_to_i420_id);
// We don't need to retain the buffer because toI420 returns a new object that // We don't need to retain the buffer because toI420 returns a new object that
// we are assumed to take the ownership of. // we are assumed to take the ownership of.
return AndroidVideoI420Buffer::WrapReference(jni, j_release_id_, width_, return AndroidVideoI420Buffer::Adopt(jni, width_, height_, j_i420_buffer);
height_, j_i420_buffer);
} }
jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, int rotation) { jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, int rotation) {
@ -415,69 +348,19 @@ jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, int rotation) {
*j_video_frame_buffer_, jlongFromPointer(native_frame)); *j_video_frame_buffer_, jlongFromPointer(native_frame));
} }
AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni) VideoFrame JavaToNativeFrame(JNIEnv* jni,
: j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")), jobject j_video_frame,
j_get_buffer_id_(GetMethodID(jni, uint32_t timestamp_rtp) {
*j_video_frame_class_, jobject j_video_frame_buffer = Java_VideoFrame_getBuffer(jni, j_video_frame);
"getBuffer", int rotation = Java_VideoFrame_getRotation(jni, j_video_frame);
"()Lorg/webrtc/VideoFrame$Buffer;")), uint32_t timestamp_ns = Java_VideoFrame_getTimestampNs(jni, j_video_frame);
j_get_rotation_id_(
GetMethodID(jni, *j_video_frame_class_, "getRotation", "()I")),
j_get_timestamp_ns_id_(
GetMethodID(jni, *j_video_frame_class_, "getTimestampNs", "()J")),
j_video_frame_buffer_class_(
jni,
FindClass(jni, "org/webrtc/VideoFrame$Buffer")),
j_retain_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "retain", "()V")),
j_release_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "release", "()V")),
j_get_width_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "getWidth", "()I")),
j_get_height_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "getHeight", "()I")) {}
VideoFrame AndroidVideoBufferFactory::CreateFrame(
JNIEnv* jni,
jobject j_video_frame,
uint32_t timestamp_rtp) const {
jobject j_video_frame_buffer =
jni->CallObjectMethod(j_video_frame, j_get_buffer_id_);
int rotation = jni->CallIntMethod(j_video_frame, j_get_rotation_id_);
uint32_t timestamp_ns =
jni->CallLongMethod(j_video_frame, j_get_timestamp_ns_id_);
rtc::scoped_refptr<AndroidVideoBuffer> buffer = rtc::scoped_refptr<AndroidVideoBuffer> buffer =
CreateBuffer(jni, j_video_frame_buffer); AndroidVideoBuffer::Create(jni, j_video_frame_buffer);
return VideoFrame(buffer, timestamp_rtp, return VideoFrame(buffer, timestamp_rtp,
timestamp_ns / rtc::kNumNanosecsPerMillisec, timestamp_ns / rtc::kNumNanosecsPerMillisec,
static_cast<VideoRotation>(rotation)); static_cast<VideoRotation>(rotation));
} }
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::WrapBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const {
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
return AndroidVideoBuffer::WrapReference(jni, j_release_id_, width, height,
j_video_frame_buffer);
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const {
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
return new rtc::RefCountedObject<AndroidVideoBuffer>(
jni, j_retain_id_, j_release_id_, width, height, j_video_frame_buffer);
}
JavaVideoFrameFactory::JavaVideoFrameFactory(JNIEnv* jni)
: j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")) {
j_video_frame_constructor_id_ =
GetMethodID(jni, *j_video_frame_class_, "<init>",
"(Lorg/webrtc/VideoFrame$Buffer;IJ)V");
}
static bool IsJavaVideoBuffer(rtc::scoped_refptr<VideoFrameBuffer> buffer) { static bool IsJavaVideoBuffer(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
if (buffer->type() != VideoFrameBuffer::Type::kNative) { if (buffer->type() != VideoFrameBuffer::Type::kNative) {
return false; return false;
@ -488,8 +371,7 @@ static bool IsJavaVideoBuffer(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
AndroidVideoFrameBuffer::AndroidType::kJavaBuffer; AndroidVideoFrameBuffer::AndroidType::kJavaBuffer;
} }
jobject JavaVideoFrameFactory::ToJavaFrame(JNIEnv* jni, jobject NativeToJavaFrame(JNIEnv* jni, const VideoFrame& frame) {
const VideoFrame& frame) const {
rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer(); rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer();
jobject j_buffer; jobject j_buffer;
if (IsJavaVideoBuffer(buffer)) { if (IsJavaVideoBuffer(buffer)) {
@ -504,11 +386,53 @@ jobject JavaVideoFrameFactory::ToJavaFrame(JNIEnv* jni,
} else { } else {
j_buffer = WrapI420Buffer(jni, buffer->ToI420()); j_buffer = WrapI420Buffer(jni, buffer->ToI420());
} }
return jni->NewObject( return Java_VideoFrame_create(
*j_video_frame_class_, j_video_frame_constructor_id_, j_buffer, jni, j_buffer, static_cast<jint>(frame.rotation()),
static_cast<jint>(frame.rotation()),
static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec)); static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec));
} }
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoFrame_cropAndScaleI420Native(JNIEnv* jni,
jclass,
jobject j_src_y,
jint src_stride_y,
jobject j_src_u,
jint src_stride_u,
jobject j_src_v,
jint src_stride_v,
jint crop_x,
jint crop_y,
jint crop_width,
jint crop_height,
jobject j_dst_y,
jint dst_stride_y,
jobject j_dst_u,
jint dst_stride_u,
jobject j_dst_v,
jint dst_stride_v,
jint scale_width,
jint scale_height) {
uint8_t const* src_y =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_y));
uint8_t const* src_u =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_u));
uint8_t const* src_v =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_v));
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
// Perform cropping using pointer arithmetic.
src_y += crop_x + crop_y * src_stride_y;
src_u += crop_x / 2 + crop_y / 2 * src_stride_u;
src_v += crop_x / 2 + crop_y / 2 * src_stride_v;
bool ret = libyuv::I420Scale(
src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, crop_width,
crop_height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
dst_stride_v, scale_width, scale_height, libyuv::kFilterBox);
RTC_DCHECK_EQ(ret, 0) << "I420Scale failed";
}
} // namespace jni } // namespace jni
} // namespace webrtc } // namespace webrtc

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_ #ifndef SDK_ANDROID_SRC_JNI_VIDEOFRAME_H_
#define SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_ #define SDK_ANDROID_SRC_JNI_VIDEOFRAME_H_
#include <jni.h> #include <jni.h>
@ -104,28 +104,18 @@ class AndroidTextureBuffer : public AndroidVideoFrameBuffer {
class AndroidVideoBuffer : public AndroidVideoFrameBuffer { class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
public: public:
// Wraps an existing reference to a Java VideoBuffer. Retain will not be // Creates a native VideoFrameBuffer from a Java VideoFrame.Buffer.
// called but release will be called when the C++ object is destroyed. static rtc::scoped_refptr<AndroidVideoBuffer> Create(
static rtc::scoped_refptr<AndroidVideoBuffer> WrapReference( JNIEnv* jni,
jobject j_video_frame_buffer);
// Similar to the Create() above, but adopts and takes ownership of the Java
// VideoFrame.Buffer. I.e. retain() will not be called, but release() will be
// called when the returned AndroidVideoBuffer is destroyed.
static rtc::scoped_refptr<AndroidVideoBuffer> Adopt(
JNIEnv* jni, JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer); jobject j_video_frame_buffer);
AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
// Should not be called directly. Wraps a reference. Use
// AndroidVideoBuffer::WrapReference instead for clarity.
AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
~AndroidVideoBuffer() override; ~AndroidVideoBuffer() override;
jobject video_frame_buffer() const; jobject video_frame_buffer() const;
@ -133,6 +123,11 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
// Returns an instance of VideoRenderer.I420Frame (deprecated) // Returns an instance of VideoRenderer.I420Frame (deprecated)
jobject ToJavaI420Frame(JNIEnv* jni, int rotation); jobject ToJavaI420Frame(JNIEnv* jni, int rotation);
protected:
// Should not be called directly. Adopts the Java VideoFrame.Buffer. Use
// Create() or Adopt() instead for clarity.
AndroidVideoBuffer(JNIEnv* jni, jobject j_video_frame_buffer);
private: private:
Type type() const override; Type type() const override;
int width() const override; int width() const override;
@ -142,56 +137,19 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
AndroidType android_type() override { return AndroidType::kJavaBuffer; } AndroidType android_type() override { return AndroidType::kJavaBuffer; }
const jmethodID j_release_id_;
const int width_; const int width_;
const int height_; const int height_;
// Holds a VideoFrame.Buffer. // Holds a VideoFrame.Buffer.
const ScopedGlobalRef<jobject> j_video_frame_buffer_; const ScopedGlobalRef<jobject> j_video_frame_buffer_;
}; };
class AndroidVideoBufferFactory { VideoFrame JavaToNativeFrame(JNIEnv* jni,
public: jobject j_video_frame,
explicit AndroidVideoBufferFactory(JNIEnv* jni); uint32_t timestamp_rtp);
VideoFrame CreateFrame(JNIEnv* jni, jobject NativeToJavaFrame(JNIEnv* jni, const VideoFrame& frame);
jobject j_video_frame,
uint32_t timestamp_rtp) const;
// Wraps a buffer to AndroidVideoBuffer without incrementing the reference
// count.
rtc::scoped_refptr<AndroidVideoBuffer> WrapBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const;
rtc::scoped_refptr<AndroidVideoBuffer> CreateBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const;
private:
ScopedGlobalRef<jclass> j_video_frame_class_;
jmethodID j_get_buffer_id_;
jmethodID j_get_rotation_id_;
jmethodID j_get_timestamp_ns_id_;
ScopedGlobalRef<jclass> j_video_frame_buffer_class_;
jmethodID j_retain_id_;
jmethodID j_release_id_;
jmethodID j_get_width_id_;
jmethodID j_get_height_id_;
};
class JavaVideoFrameFactory {
public:
JavaVideoFrameFactory(JNIEnv* jni);
jobject ToJavaFrame(JNIEnv* jni, const VideoFrame& frame) const;
private:
ScopedGlobalRef<jclass> j_video_frame_class_;
jmethodID j_video_frame_constructor_id_;
};
} // namespace jni } // namespace jni
} // namespace webrtc } // namespace webrtc
#endif // SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_ #endif // SDK_ANDROID_SRC_JNI_VIDEOFRAME_H_

View File

@ -1,64 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "libyuv/scale.h"
#include "rtc_base/checks.h"
namespace webrtc {
namespace jni {
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoFrame_nativeCropAndScaleI420(JNIEnv* jni,
jclass,
jobject j_src_y,
jint src_stride_y,
jobject j_src_u,
jint src_stride_u,
jobject j_src_v,
jint src_stride_v,
jint crop_x,
jint crop_y,
jint crop_width,
jint crop_height,
jobject j_dst_y,
jint dst_stride_y,
jobject j_dst_u,
jint dst_stride_u,
jobject j_dst_v,
jint dst_stride_v,
jint scale_width,
jint scale_height) {
uint8_t const* src_y =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_y));
uint8_t const* src_u =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_u));
uint8_t const* src_v =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_v));
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
// Perform cropping using pointer arithmetic.
src_y += crop_x + crop_y * src_stride_y;
src_u += crop_x / 2 + crop_y / 2 * src_stride_u;
src_v += crop_x / 2 + crop_y / 2 * src_stride_v;
bool ret = libyuv::I420Scale(
src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, crop_width,
crop_height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
dst_stride_v, scale_width, scale_height, libyuv::kFilterBox);
RTC_DCHECK_EQ(ret, 0) << "I420Scale failed";
}
} // namespace jni
} // namespace webrtc

View File

@ -15,7 +15,7 @@
#include "sdk/android/generated_video_jni/jni/VideoSink_jni.h" #include "sdk/android/generated_video_jni/jni/VideoSink_jni.h"
#include "sdk/android/src/jni/classreferenceholder.h" #include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h" #include "sdk/android/src/jni/videoframe.h"
namespace webrtc { namespace webrtc {
namespace jni { namespace jni {
@ -30,18 +30,16 @@ class VideoSinkWrapper : public rtc::VideoSinkInterface<VideoFrame> {
private: private:
void OnFrame(const VideoFrame& frame) override; void OnFrame(const VideoFrame& frame) override;
const JavaVideoFrameFactory java_video_frame_factory_;
const ScopedGlobalRef<jobject> j_sink_; const ScopedGlobalRef<jobject> j_sink_;
}; };
VideoSinkWrapper::VideoSinkWrapper(JNIEnv* jni, jobject j_sink) VideoSinkWrapper::VideoSinkWrapper(JNIEnv* jni, jobject j_sink)
: java_video_frame_factory_(jni), j_sink_(jni, j_sink) {} : j_sink_(jni, j_sink) {}
void VideoSinkWrapper::OnFrame(const VideoFrame& frame) { void VideoSinkWrapper::OnFrame(const VideoFrame& frame) {
JNIEnv* jni = AttachCurrentThreadIfNeeded(); JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni); ScopedLocalRefFrame local_ref_frame(jni);
Java_VideoSink_onFrame(jni, *j_sink_, Java_VideoSink_onFrame(jni, *j_sink_, NativeToJavaFrame(jni, frame));
java_video_frame_factory_.ToJavaFrame(jni, frame));
} }
} // namespace } // namespace