Android: Annotate and generate JNI code for VideoFrame.java

This CL also merged native_handle_impl.cc and videoframe_jni.cc to keep
all JNI code for the same Java class in the same file, and also renames
this file to jni/videoframe.cc.

The classes AndroidVideoBufferFactory and JavaVideoFrameFactory are
now unnecessary since we cache everything and can be simplified to
global static functions instead.

Bug: webrtc:8278
Change-Id: I03d7b0bbde64cfb407cd6210478ddf9d5599cd8c
Reviewed-on: https://webrtc-review.googlesource.com/22923
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20677}
This commit is contained in:
Magnus Jedvert
2017-11-14 17:08:59 +01:00
committed by Commit Bot
parent 651707bdf0
commit c2ac3c663f
17 changed files with 170 additions and 349 deletions

View File

@ -106,6 +106,7 @@ generate_jni("generated_video_jni") {
"api/org/webrtc/SurfaceTextureHelper.java",
"api/org/webrtc/VideoCodecStatus.java",
"api/org/webrtc/VideoEncoder.java",
"api/org/webrtc/VideoFrame.java",
"api/org/webrtc/VideoSink.java",
"src/java/org/webrtc/VideoEncoderWrapper.java",
"src/java/org/webrtc/WrappedNativeVideoDecoder.java",
@ -127,8 +128,6 @@ rtc_static_library("video_jni") {
"src/jni/androidvideotracksource_jni.cc",
"src/jni/defaultvideoencoderfactory.cc",
"src/jni/jni_generator_helper.h",
"src/jni/native_handle_impl.cc",
"src/jni/native_handle_impl.h",
"src/jni/nv12buffer_jni.cc",
"src/jni/nv21buffer_jni.cc",
"src/jni/pc/video_jni.cc",
@ -147,7 +146,8 @@ rtc_static_library("video_jni") {
"src/jni/videoencoderwrapper.cc",
"src/jni/videoencoderwrapper.h",
"src/jni/videofilerenderer_jni.cc",
"src/jni/videoframe_jni.cc",
"src/jni/videoframe.cc",
"src/jni/videoframe.h",
"src/jni/videotrack_jni.cc",
"src/jni/vp8codec.cc",
"src/jni/vp9codec.cc",

View File

@ -30,22 +30,22 @@ public class VideoFrame {
/**
* Resolution of the buffer in pixels.
*/
int getWidth();
int getHeight();
@CalledByNative("Buffer") int getWidth();
@CalledByNative("Buffer") int getHeight();
/**
* Returns a memory-backed frame in I420 format. If the pixel data is in another format, a
* conversion will take place. All implementations must provide a fallback to I420 for
* compatibility with e.g. the internal WebRTC software encoders.
*/
I420Buffer toI420();
@CalledByNative("Buffer") I420Buffer toI420();
/**
* Reference counting is needed since a video buffer can be shared between multiple VideoSinks,
* and the buffer needs to be returned to the VideoSource as soon as all references are gone.
*/
void retain();
void release();
@CalledByNative("Buffer") void retain();
@CalledByNative("Buffer") void release();
/**
* Crops a region defined by |cropx|, |cropY|, |cropWidth| and |cropHeight|. Scales it to size
@ -65,25 +65,25 @@ public class VideoFrame {
* be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
* implementations must return a new ByteBuffer or slice for each call.
*/
ByteBuffer getDataY();
@CalledByNative("I420Buffer") ByteBuffer getDataY();
/**
* Returns a direct ByteBuffer containing U-plane data. The buffer capacity is at least
* getStrideU() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
* and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
* implementations must return a new ByteBuffer or slice for each call.
*/
ByteBuffer getDataU();
@CalledByNative("I420Buffer") ByteBuffer getDataU();
/**
* Returns a direct ByteBuffer containing V-plane data. The buffer capacity is at least
* getStrideV() * ((getHeight() + 1) / 2) bytes. The position of the returned buffer is ignored
* and must be 0. Callers may mutate the ByteBuffer (eg. through relative-read operations), so
* implementations must return a new ByteBuffer or slice for each call.
*/
ByteBuffer getDataV();
@CalledByNative("I420Buffer") ByteBuffer getDataV();
int getStrideY();
int getStrideU();
int getStrideV();
@CalledByNative("I420Buffer") int getStrideY();
@CalledByNative("I420Buffer") int getStrideU();
@CalledByNative("I420Buffer") int getStrideV();
}
/**
@ -132,6 +132,7 @@ public class VideoFrame {
this.timestampNs = timestampNs;
}
@CalledByNative
public Buffer getBuffer() {
return buffer;
}
@ -139,6 +140,7 @@ public class VideoFrame {
/**
* Rotation of the frame in degrees.
*/
@CalledByNative
public int getRotation() {
return rotation;
}
@ -146,6 +148,7 @@ public class VideoFrame {
/**
* Timestamp of the frame in nano seconds.
*/
@CalledByNative
public long getTimestampNs() {
return timestampNs;
}
@ -194,7 +197,7 @@ public class VideoFrame {
}
JavaI420Buffer newBuffer = JavaI420Buffer.allocate(scaleWidth, scaleHeight);
nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
cropAndScaleI420Native(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
@ -202,7 +205,13 @@ public class VideoFrame {
return newBuffer;
}
private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
// TODO(bugs.webrtc.org/8278): Add a way to generate JNI code for constructors directly.
@CalledByNative
static VideoFrame create(Buffer buffer, int rotation, long timestampNs) {
return new VideoFrame(buffer, rotation, timestampNs);
}
private static native void cropAndScaleI420Native(ByteBuffer srcY, int srcStrideY,
ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);

View File

@ -21,4 +21,9 @@ import java.lang.annotation.Target;
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.CLASS)
@interface CalledByNative {}
@interface CalledByNative {
/*
* If present, tells which inner class the method belongs to.
*/
public String value() default "";
}

View File

@ -17,9 +17,6 @@
// androidmediacodeccommon.h to avoid build errors.
#include "sdk/android/src/jni/androidmediadecoder_jni.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
#include "common_video/h264/h264_bitstream_parser.h"
#include "common_video/include/i420_buffer_pool.h"
#include "modules/video_coding/include/video_codec_interface.h"
@ -32,8 +29,11 @@
#include "rtc_base/timeutils.h"
#include "sdk/android/src/jni/androidmediacodeccommon.h"
#include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/surfacetexturehelper_jni.h"
#include "sdk/android/src/jni/videoframe.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
using rtc::Bind;
using rtc::Thread;

View File

@ -18,9 +18,6 @@
#include <string>
#include <utility>
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
#include "api/video_codecs/video_encoder.h"
#include "common_types.h" // NOLINT(build/include)
#include "common_video/h264/h264_bitstream_parser.h"
@ -43,8 +40,11 @@
#include "sdk/android/src/jni/androidmediacodeccommon.h"
#include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/videoframe.h"
#include "system_wrappers/include/field_trial.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/video_common.h"
using rtc::Bind;
using rtc::Thread;
@ -230,7 +230,6 @@ class MediaCodecVideoEncoder : public VideoEncoder {
jfieldID j_info_is_key_frame_field_;
jfieldID j_info_presentation_timestamp_us_field_;
const JavaVideoFrameFactory video_frame_factory_;
ScopedGlobalRef<jclass> j_video_frame_texture_buffer_class_;
// State that is valid only between InitEncode() and the next Release().
@ -342,7 +341,6 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni,
*j_media_codec_video_encoder_class_,
"<init>",
"()V"))),
video_frame_factory_(jni),
j_video_frame_texture_buffer_class_(
jni,
FindClass(jni, "org/webrtc/VideoFrame$TextureBuffer")),
@ -801,9 +799,9 @@ int32_t MediaCodecVideoEncoder::Encode(
encode_status = EncodeTexture(jni, key_frame, input_frame);
break;
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
encode_status = EncodeJavaFrame(
jni, key_frame, video_frame_factory_.ToJavaFrame(jni, input_frame),
j_input_buffer_index);
encode_status =
EncodeJavaFrame(jni, key_frame, NativeToJavaFrame(jni, input_frame),
j_input_buffer_index);
break;
default:
RTC_NOTREACHED();

View File

@ -33,7 +33,6 @@ AndroidVideoTrackSource::AndroidVideoTrackSource(
surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
jni,
j_surface_texture_helper)),
video_buffer_factory_(jni),
is_screencast_(is_screencast) {
RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
camera_thread_checker_.DetachFromThread();
@ -189,7 +188,7 @@ void AndroidVideoTrackSource::OnFrameCaptured(JNIEnv* jni,
crop_height, adapted_width, adapted_height);
rtc::scoped_refptr<VideoFrameBuffer> buffer =
video_buffer_factory_.WrapBuffer(jni, j_adapted_video_frame_buffer);
AndroidVideoBuffer::Adopt(jni, j_adapted_video_frame_buffer);
// AdaptedVideoTrackSource handles applying rotation for I420 frames.
if (apply_rotation() && rotation != kVideoRotation_0) {

View File

@ -20,8 +20,8 @@
#include "rtc_base/checks.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/timestampaligner.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/surfacetexturehelper_jni.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {
@ -84,7 +84,6 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
NV12ToI420Scaler nv12toi420_scaler_;
I420BufferPool buffer_pool_;
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
AndroidVideoBufferFactory video_buffer_factory_;
const bool is_screencast_;
jmethodID j_crop_and_scale_id_;

View File

@ -17,7 +17,7 @@
#include "rtc_base/refcount.h"
#include "rtc_base/scoped_ref_ptr.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {

View File

@ -14,7 +14,7 @@
#include "media/base/videosinkinterface.h"
#include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {

View File

@ -22,8 +22,7 @@ namespace webrtc {
namespace jni {
VideoDecoderWrapper::VideoDecoderWrapper(JNIEnv* jni, jobject decoder)
: android_video_buffer_factory_(jni),
decoder_(jni, decoder),
: decoder_(jni, decoder),
encoded_image_class_(jni, FindClass(jni, "org/webrtc/EncodedImage")),
frame_type_class_(jni,
FindClass(jni, "org/webrtc/EncodedImage$FrameType")),
@ -187,8 +186,8 @@ void VideoDecoderWrapper::OnDecodedFrame(JNIEnv* jni,
// find a matching timestamp.
} while (frame_extra_info.capture_time_ns != capture_time_ns);
VideoFrame frame = android_video_buffer_factory_.CreateFrame(
jni, jframe, frame_extra_info.timestamp_rtp);
VideoFrame frame =
JavaToNativeFrame(jni, jframe, frame_extra_info.timestamp_rtp);
rtc::Optional<int32_t> decoding_time_ms;
if (jdecode_time_ms != nullptr) {

View File

@ -17,7 +17,7 @@
#include "api/video_codecs/video_decoder.h"
#include "common_video/h264/h264_bitstream_parser.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {
@ -78,7 +78,6 @@ class VideoDecoderWrapper : public VideoDecoder {
int32_t number_of_cores_;
bool initialized_;
AndroidVideoBufferFactory android_video_buffer_factory_;
std::deque<FrameExtraInfo> frame_extra_infos_;
bool qp_parsing_enabled_;
H264BitstreamParser h264_bitstream_parser_;

View File

@ -35,8 +35,7 @@ VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni, jobject j_encoder)
: encoder_(jni, j_encoder),
frame_type_class_(jni,
GetClass(jni, "org/webrtc/EncodedImage$FrameType")),
int_array_class_(jni, jni->FindClass("[I")),
video_frame_factory_(jni) {
int_array_class_(jni, jni->FindClass("[I")) {
implementation_name_ = GetImplementationName(jni);
initialized_ = false;
@ -137,8 +136,7 @@ int32_t VideoEncoderWrapper::Encode(
frame_extra_infos_.push_back(info);
jobject ret = Java_VideoEncoder_encode(
jni, *encoder_, video_frame_factory_.ToJavaFrame(jni, frame),
encode_info);
jni, *encoder_, NativeToJavaFrame(jni, frame), encode_info);
return HandleReturnCode(jni, ret);
}

View File

@ -21,7 +21,7 @@
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
#include "rtc_base/task_queue.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {
@ -95,7 +95,6 @@ class VideoEncoderWrapper : public VideoEncoder {
std::string implementation_name_;
rtc::TaskQueue* encoder_queue_;
JavaVideoFrameFactory video_frame_factory_;
std::deque<FrameExtraInfo> frame_extra_infos_;
EncodedImageCallback* callback_;
bool initialized_;

View File

@ -8,17 +8,19 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/videoframe.h"
#include <memory>
#include "common_video/include/video_frame_buffer.h"
#include "libyuv/scale.h"
#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
#include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/logging.h"
#include "rtc_base/scoped_ref_ptr.h"
#include "rtc_base/timeutils.h"
#include "sdk/android/generated_video_jni/jni/VideoFrame_jni.h"
#include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/wrapped_native_i420_buffer.h"
@ -31,26 +33,16 @@ namespace {
class AndroidVideoI420Buffer : public I420BufferInterface {
public:
// Wraps an existing reference to a Java VideoBuffer. Retain will not be
// called but release will be called when the C++ object is destroyed.
static rtc::scoped_refptr<AndroidVideoI420Buffer> WrapReference(
JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
// Adopts and takes ownership of the Java VideoFrame.Buffer. I.e. retain()
// will not be called, but release() will be called when the returned
// AndroidVideoBuffer is destroyed.
static rtc::scoped_refptr<AndroidVideoI420Buffer>
Adopt(JNIEnv* jni, int width, int height, jobject j_video_frame_buffer);
protected:
// Should not be called directly. Adopts the buffer. Use Adopt() instead for
// clarity.
AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
// Should not be called directly. Wraps a reference. Use
// AndroidVideoI420Buffer::WrapReference instead for clarity.
AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
@ -68,7 +60,6 @@ class AndroidVideoI420Buffer : public I420BufferInterface {
int width() const override { return width_; }
int height() const override { return height_; }
const jmethodID j_release_id_;
const int width_;
const int height_;
// Holds a VideoFrame.I420Buffer.
@ -82,73 +73,38 @@ class AndroidVideoI420Buffer : public I420BufferInterface {
int stride_v_;
};
rtc::scoped_refptr<AndroidVideoI420Buffer>
AndroidVideoI420Buffer::WrapReference(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer) {
rtc::scoped_refptr<AndroidVideoI420Buffer> AndroidVideoI420Buffer::Adopt(
JNIEnv* jni,
int width,
int height,
jobject j_video_frame_buffer) {
return new rtc::RefCountedObject<AndroidVideoI420Buffer>(
jni, j_release_id, width, height, j_video_frame_buffer);
jni, width, height, j_video_frame_buffer);
}
AndroidVideoI420Buffer::AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: AndroidVideoI420Buffer(jni,
j_release_id,
width,
height,
j_video_frame_buffer) {
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
}
AndroidVideoI420Buffer::AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: j_release_id_(j_release_id),
width_(width),
: width_(width),
height_(height),
j_video_frame_buffer_(jni, j_video_frame_buffer) {
jclass j_video_frame_i420_buffer_class =
FindClass(jni, "org/webrtc/VideoFrame$I420Buffer");
jmethodID j_get_data_y_id = jni->GetMethodID(
j_video_frame_i420_buffer_class, "getDataY", "()Ljava/nio/ByteBuffer;");
jmethodID j_get_data_u_id = jni->GetMethodID(
j_video_frame_i420_buffer_class, "getDataU", "()Ljava/nio/ByteBuffer;");
jmethodID j_get_data_v_id = jni->GetMethodID(
j_video_frame_i420_buffer_class, "getDataV", "()Ljava/nio/ByteBuffer;");
jmethodID j_get_stride_y_id =
jni->GetMethodID(j_video_frame_i420_buffer_class, "getStrideY", "()I");
jmethodID j_get_stride_u_id =
jni->GetMethodID(j_video_frame_i420_buffer_class, "getStrideU", "()I");
jmethodID j_get_stride_v_id =
jni->GetMethodID(j_video_frame_i420_buffer_class, "getStrideV", "()I");
jobject j_data_y =
jni->CallObjectMethod(j_video_frame_buffer, j_get_data_y_id);
jobject j_data_u =
jni->CallObjectMethod(j_video_frame_buffer, j_get_data_u_id);
jobject j_data_v =
jni->CallObjectMethod(j_video_frame_buffer, j_get_data_v_id);
jobject j_data_y = Java_I420Buffer_getDataY(jni, j_video_frame_buffer);
jobject j_data_u = Java_I420Buffer_getDataU(jni, j_video_frame_buffer);
jobject j_data_v = Java_I420Buffer_getDataV(jni, j_video_frame_buffer);
data_y_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_y));
data_u_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_u));
data_v_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_v));
stride_y_ = jni->CallIntMethod(j_video_frame_buffer, j_get_stride_y_id);
stride_u_ = jni->CallIntMethod(j_video_frame_buffer, j_get_stride_u_id);
stride_v_ = jni->CallIntMethod(j_video_frame_buffer, j_get_stride_v_id);
stride_y_ = Java_I420Buffer_getStrideY(jni, j_video_frame_buffer);
stride_u_ = Java_I420Buffer_getStrideU(jni, j_video_frame_buffer);
stride_v_ = Java_I420Buffer_getStrideV(jni, j_video_frame_buffer);
}
AndroidVideoI420Buffer::~AndroidVideoI420Buffer() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->CallVoidMethod(*j_video_frame_buffer_, j_release_id_);
Java_Buffer_release(jni, *j_video_frame_buffer_);
}
} // namespace
@ -297,7 +253,7 @@ rtc::scoped_refptr<I420BufferInterface> AndroidTextureBuffer::ToI420() {
// See YuvConverter.java for the required layout.
uint8_t* y_data = yuv_data.get();
uint8_t* u_data = y_data + height() * stride;
uint8_t* v_data = u_data + stride/2;
uint8_t* v_data = u_data + stride / 2;
rtc::scoped_refptr<I420BufferInterface> copy = webrtc::WrapI420Buffer(
width(), height(), y_data, stride, u_data, stride, v_data, stride,
@ -326,43 +282,29 @@ rtc::scoped_refptr<I420BufferInterface> AndroidTextureBuffer::ToI420() {
return copy;
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::WrapReference(
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Adopt(
JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer) {
return new rtc::RefCountedObject<AndroidVideoBuffer>(
jni, j_release_id, width, height, j_video_frame_buffer);
return new rtc::RefCountedObject<AndroidVideoBuffer>(jni,
j_video_frame_buffer);
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::Create(
JNIEnv* jni,
jobject j_video_frame_buffer) {
Java_Buffer_retain(jni, j_video_frame_buffer);
return Adopt(jni, j_video_frame_buffer);
}
AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: AndroidVideoBuffer(jni,
j_release_id,
width,
height,
j_video_frame_buffer) {
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
}
AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: j_release_id_(j_release_id),
width_(width),
height_(height),
: width_(Java_Buffer_getWidth(jni, j_video_frame_buffer)),
height_(Java_Buffer_getHeight(jni, j_video_frame_buffer)),
j_video_frame_buffer_(jni, j_video_frame_buffer) {}
AndroidVideoBuffer::~AndroidVideoBuffer() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->CallVoidMethod(*j_video_frame_buffer_, j_release_id_);
Java_Buffer_release(jni, *j_video_frame_buffer_);
}
jobject AndroidVideoBuffer::video_frame_buffer() const {
@ -384,20 +326,11 @@ int AndroidVideoBuffer::height() const {
rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jclass j_video_frame_buffer_class =
FindClass(jni, "org/webrtc/VideoFrame$Buffer");
jmethodID j_to_i420_id =
jni->GetMethodID(j_video_frame_buffer_class, "toI420",
"()Lorg/webrtc/VideoFrame$I420Buffer;");
jobject j_i420_buffer =
jni->CallObjectMethod(*j_video_frame_buffer_, j_to_i420_id);
jobject j_i420_buffer = Java_Buffer_toI420(jni, *j_video_frame_buffer_);
// We don't need to retain the buffer because toI420 returns a new object that
// we are assumed to take the ownership of.
return AndroidVideoI420Buffer::WrapReference(jni, j_release_id_, width_,
height_, j_i420_buffer);
return AndroidVideoI420Buffer::Adopt(jni, width_, height_, j_i420_buffer);
}
jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, int rotation) {
@ -415,69 +348,19 @@ jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, int rotation) {
*j_video_frame_buffer_, jlongFromPointer(native_frame));
}
AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni)
: j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")),
j_get_buffer_id_(GetMethodID(jni,
*j_video_frame_class_,
"getBuffer",
"()Lorg/webrtc/VideoFrame$Buffer;")),
j_get_rotation_id_(
GetMethodID(jni, *j_video_frame_class_, "getRotation", "()I")),
j_get_timestamp_ns_id_(
GetMethodID(jni, *j_video_frame_class_, "getTimestampNs", "()J")),
j_video_frame_buffer_class_(
jni,
FindClass(jni, "org/webrtc/VideoFrame$Buffer")),
j_retain_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "retain", "()V")),
j_release_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "release", "()V")),
j_get_width_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "getWidth", "()I")),
j_get_height_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "getHeight", "()I")) {}
VideoFrame AndroidVideoBufferFactory::CreateFrame(
JNIEnv* jni,
jobject j_video_frame,
uint32_t timestamp_rtp) const {
jobject j_video_frame_buffer =
jni->CallObjectMethod(j_video_frame, j_get_buffer_id_);
int rotation = jni->CallIntMethod(j_video_frame, j_get_rotation_id_);
uint32_t timestamp_ns =
jni->CallLongMethod(j_video_frame, j_get_timestamp_ns_id_);
VideoFrame JavaToNativeFrame(JNIEnv* jni,
jobject j_video_frame,
uint32_t timestamp_rtp) {
jobject j_video_frame_buffer = Java_VideoFrame_getBuffer(jni, j_video_frame);
int rotation = Java_VideoFrame_getRotation(jni, j_video_frame);
uint32_t timestamp_ns = Java_VideoFrame_getTimestampNs(jni, j_video_frame);
rtc::scoped_refptr<AndroidVideoBuffer> buffer =
CreateBuffer(jni, j_video_frame_buffer);
AndroidVideoBuffer::Create(jni, j_video_frame_buffer);
return VideoFrame(buffer, timestamp_rtp,
timestamp_ns / rtc::kNumNanosecsPerMillisec,
static_cast<VideoRotation>(rotation));
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::WrapBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const {
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
return AndroidVideoBuffer::WrapReference(jni, j_release_id_, width, height,
j_video_frame_buffer);
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const {
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
return new rtc::RefCountedObject<AndroidVideoBuffer>(
jni, j_retain_id_, j_release_id_, width, height, j_video_frame_buffer);
}
JavaVideoFrameFactory::JavaVideoFrameFactory(JNIEnv* jni)
: j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")) {
j_video_frame_constructor_id_ =
GetMethodID(jni, *j_video_frame_class_, "<init>",
"(Lorg/webrtc/VideoFrame$Buffer;IJ)V");
}
static bool IsJavaVideoBuffer(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
if (buffer->type() != VideoFrameBuffer::Type::kNative) {
return false;
@ -488,8 +371,7 @@ static bool IsJavaVideoBuffer(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
AndroidVideoFrameBuffer::AndroidType::kJavaBuffer;
}
jobject JavaVideoFrameFactory::ToJavaFrame(JNIEnv* jni,
const VideoFrame& frame) const {
jobject NativeToJavaFrame(JNIEnv* jni, const VideoFrame& frame) {
rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer();
jobject j_buffer;
if (IsJavaVideoBuffer(buffer)) {
@ -504,11 +386,53 @@ jobject JavaVideoFrameFactory::ToJavaFrame(JNIEnv* jni,
} else {
j_buffer = WrapI420Buffer(jni, buffer->ToI420());
}
return jni->NewObject(
*j_video_frame_class_, j_video_frame_constructor_id_, j_buffer,
static_cast<jint>(frame.rotation()),
return Java_VideoFrame_create(
jni, j_buffer, static_cast<jint>(frame.rotation()),
static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec));
}
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoFrame_cropAndScaleI420Native(JNIEnv* jni,
jclass,
jobject j_src_y,
jint src_stride_y,
jobject j_src_u,
jint src_stride_u,
jobject j_src_v,
jint src_stride_v,
jint crop_x,
jint crop_y,
jint crop_width,
jint crop_height,
jobject j_dst_y,
jint dst_stride_y,
jobject j_dst_u,
jint dst_stride_u,
jobject j_dst_v,
jint dst_stride_v,
jint scale_width,
jint scale_height) {
uint8_t const* src_y =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_y));
uint8_t const* src_u =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_u));
uint8_t const* src_v =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_v));
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
// Perform cropping using pointer arithmetic.
src_y += crop_x + crop_y * src_stride_y;
src_u += crop_x / 2 + crop_y / 2 * src_stride_u;
src_v += crop_x / 2 + crop_y / 2 * src_stride_v;
bool ret = libyuv::I420Scale(
src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, crop_width,
crop_height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
dst_stride_v, scale_width, scale_height, libyuv::kFilterBox);
RTC_DCHECK_EQ(ret, 0) << "I420Scale failed";
}
} // namespace jni
} // namespace webrtc

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_
#define SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_
#ifndef SDK_ANDROID_SRC_JNI_VIDEOFRAME_H_
#define SDK_ANDROID_SRC_JNI_VIDEOFRAME_H_
#include <jni.h>
@ -104,28 +104,18 @@ class AndroidTextureBuffer : public AndroidVideoFrameBuffer {
class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
public:
// Wraps an existing reference to a Java VideoBuffer. Retain will not be
// called but release will be called when the C++ object is destroyed.
static rtc::scoped_refptr<AndroidVideoBuffer> WrapReference(
// Creates a native VideoFrameBuffer from a Java VideoFrame.Buffer.
static rtc::scoped_refptr<AndroidVideoBuffer> Create(
JNIEnv* jni,
jobject j_video_frame_buffer);
// Similar to the Create() above, but adopts and takes ownership of the Java
// VideoFrame.Buffer. I.e. retain() will not be called, but release() will be
// called when the returned AndroidVideoBuffer is destroyed.
static rtc::scoped_refptr<AndroidVideoBuffer> Adopt(
JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
// Should not be called directly. Wraps a reference. Use
// AndroidVideoBuffer::WrapReference instead for clarity.
AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
~AndroidVideoBuffer() override;
jobject video_frame_buffer() const;
@ -133,6 +123,11 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
// Returns an instance of VideoRenderer.I420Frame (deprecated)
jobject ToJavaI420Frame(JNIEnv* jni, int rotation);
protected:
// Should not be called directly. Adopts the Java VideoFrame.Buffer. Use
// Create() or Adopt() instead for clarity.
AndroidVideoBuffer(JNIEnv* jni, jobject j_video_frame_buffer);
private:
Type type() const override;
int width() const override;
@ -142,56 +137,19 @@ class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
AndroidType android_type() override { return AndroidType::kJavaBuffer; }
const jmethodID j_release_id_;
const int width_;
const int height_;
// Holds a VideoFrame.Buffer.
const ScopedGlobalRef<jobject> j_video_frame_buffer_;
};
class AndroidVideoBufferFactory {
public:
explicit AndroidVideoBufferFactory(JNIEnv* jni);
VideoFrame JavaToNativeFrame(JNIEnv* jni,
jobject j_video_frame,
uint32_t timestamp_rtp);
VideoFrame CreateFrame(JNIEnv* jni,
jobject j_video_frame,
uint32_t timestamp_rtp) const;
// Wraps a buffer to AndroidVideoBuffer without incrementing the reference
// count.
rtc::scoped_refptr<AndroidVideoBuffer> WrapBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const;
rtc::scoped_refptr<AndroidVideoBuffer> CreateBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const;
private:
ScopedGlobalRef<jclass> j_video_frame_class_;
jmethodID j_get_buffer_id_;
jmethodID j_get_rotation_id_;
jmethodID j_get_timestamp_ns_id_;
ScopedGlobalRef<jclass> j_video_frame_buffer_class_;
jmethodID j_retain_id_;
jmethodID j_release_id_;
jmethodID j_get_width_id_;
jmethodID j_get_height_id_;
};
class JavaVideoFrameFactory {
public:
JavaVideoFrameFactory(JNIEnv* jni);
jobject ToJavaFrame(JNIEnv* jni, const VideoFrame& frame) const;
private:
ScopedGlobalRef<jclass> j_video_frame_class_;
jmethodID j_video_frame_constructor_id_;
};
jobject NativeToJavaFrame(JNIEnv* jni, const VideoFrame& frame);
} // namespace jni
} // namespace webrtc
#endif // SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_
#endif // SDK_ANDROID_SRC_JNI_VIDEOFRAME_H_

View File

@ -1,64 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "libyuv/scale.h"
#include "rtc_base/checks.h"
namespace webrtc {
namespace jni {
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoFrame_nativeCropAndScaleI420(JNIEnv* jni,
jclass,
jobject j_src_y,
jint src_stride_y,
jobject j_src_u,
jint src_stride_u,
jobject j_src_v,
jint src_stride_v,
jint crop_x,
jint crop_y,
jint crop_width,
jint crop_height,
jobject j_dst_y,
jint dst_stride_y,
jobject j_dst_u,
jint dst_stride_u,
jobject j_dst_v,
jint dst_stride_v,
jint scale_width,
jint scale_height) {
uint8_t const* src_y =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_y));
uint8_t const* src_u =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_u));
uint8_t const* src_v =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_v));
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
// Perform cropping using pointer arithmetic.
src_y += crop_x + crop_y * src_stride_y;
src_u += crop_x / 2 + crop_y / 2 * src_stride_u;
src_v += crop_x / 2 + crop_y / 2 * src_stride_v;
bool ret = libyuv::I420Scale(
src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v, crop_width,
crop_height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
dst_stride_v, scale_width, scale_height, libyuv::kFilterBox);
RTC_DCHECK_EQ(ret, 0) << "I420Scale failed";
}
} // namespace jni
} // namespace webrtc

View File

@ -15,7 +15,7 @@
#include "sdk/android/generated_video_jni/jni/VideoSink_jni.h"
#include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/native_handle_impl.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {
@ -30,18 +30,16 @@ class VideoSinkWrapper : public rtc::VideoSinkInterface<VideoFrame> {
private:
void OnFrame(const VideoFrame& frame) override;
const JavaVideoFrameFactory java_video_frame_factory_;
const ScopedGlobalRef<jobject> j_sink_;
};
VideoSinkWrapper::VideoSinkWrapper(JNIEnv* jni, jobject j_sink)
: java_video_frame_factory_(jni), j_sink_(jni, j_sink) {}
: j_sink_(jni, j_sink) {}
void VideoSinkWrapper::OnFrame(const VideoFrame& frame) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
Java_VideoSink_onFrame(jni, *j_sink_,
java_video_frame_factory_.ToJavaFrame(jni, frame));
Java_VideoSink_onFrame(jni, *j_sink_, NativeToJavaFrame(jni, frame));
}
} // namespace