Android: Clean up JNI generated code
It's now possible to generate JNI code for constructors and enums correctly. This CL cleans that up. Bug: webrtc:8278,webrtc:8551,webrtc:8556 Change-Id: I2284a30139cbb186c80713eb6113eda5659c16ad Reviewed-on: https://webrtc-review.googlesource.com/25622 Reviewed-by: Sami Kalliomäki <sakal@webrtc.org> Commit-Queue: Magnus Jedvert <magjed@webrtc.org> Cr-Commit-Position: refs/heads/master@{#20860}
This commit is contained in:

committed by
Commit Bot

parent
aea1d1ad3f
commit
1f2a3e7058
@ -12,8 +12,6 @@ package org.webrtc;
|
|||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
// TODO(bugs.webrtc.org/8556): Remove unnecessary import.
|
|
||||||
import org.webrtc.EncodedImage;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An encoded frame from a video stream. Used as an input for decoders and as an output for
|
* An encoded frame from a video stream. Used as an input for decoders and as an output for
|
||||||
@ -36,9 +34,8 @@ public class EncodedImage {
|
|||||||
return nativeIndex;
|
return nativeIndex;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bugs.webrtc.org/8556): Remove unnecessary 'EncodedImage.'.
|
|
||||||
@CalledByNative("FrameType")
|
@CalledByNative("FrameType")
|
||||||
static EncodedImage.FrameType fromNativeIndex(int nativeIndex) {
|
static FrameType fromNativeIndex(int nativeIndex) {
|
||||||
for (FrameType type : FrameType.values()) {
|
for (FrameType type : FrameType.values()) {
|
||||||
if (type.getNative() == nativeIndex) {
|
if (type.getNative() == nativeIndex) {
|
||||||
return type;
|
return type;
|
||||||
@ -58,6 +55,7 @@ public class EncodedImage {
|
|||||||
public final boolean completeFrame;
|
public final boolean completeFrame;
|
||||||
public final Integer qp;
|
public final Integer qp;
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeNs,
|
private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeNs,
|
||||||
FrameType frameType, int rotation, boolean completeFrame, Integer qp) {
|
FrameType frameType, int rotation, boolean completeFrame, Integer qp) {
|
||||||
this.buffer = buffer;
|
this.buffer = buffer;
|
||||||
@ -138,13 +136,4 @@ public class EncodedImage {
|
|||||||
rotation, completeFrame, qp);
|
rotation, completeFrame, qp);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bugs.webrtc.org/8551) Remove.
|
|
||||||
@CalledByNative
|
|
||||||
static EncodedImage create(ByteBuffer buffer, int encodedWidth, int encodedHeight,
|
|
||||||
long captureTimeNs, EncodedImage.FrameType frameType, int rotation, boolean completeFrame,
|
|
||||||
Integer qp) {
|
|
||||||
return new EncodedImage(
|
|
||||||
buffer, encodedWidth, encodedHeight, captureTimeNs, frameType, rotation, completeFrame, qp);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -38,6 +38,9 @@ public class Metrics {
|
|||||||
public final Map<String, HistogramInfo> map =
|
public final Map<String, HistogramInfo> map =
|
||||||
new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
|
new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
|
Metrics() {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class holding histogram information.
|
* Class holding histogram information.
|
||||||
*/
|
*/
|
||||||
@ -48,6 +51,7 @@ public class Metrics {
|
|||||||
public final Map<Integer, Integer> samples =
|
public final Map<Integer, Integer> samples =
|
||||||
new HashMap<Integer, Integer>(); // <value, # of events>
|
new HashMap<Integer, Integer>(); // <value, # of events>
|
||||||
|
|
||||||
|
@CalledByNative("HistogramInfo")
|
||||||
public HistogramInfo(int min, int max, int bucketCount) {
|
public HistogramInfo(int min, int max, int bucketCount) {
|
||||||
this.min = min;
|
this.min = min;
|
||||||
this.max = max;
|
this.max = max;
|
||||||
@ -76,18 +80,6 @@ public class Metrics {
|
|||||||
return getAndResetNative();
|
return getAndResetNative();
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bugs.webrtc.org/8551) Remove.
|
|
||||||
@CalledByNative
|
|
||||||
static Metrics createMetrics() {
|
|
||||||
return new Metrics();
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(bugs.webrtc.org/8551) Remove.
|
|
||||||
@CalledByNative
|
|
||||||
static HistogramInfo createHistogramInfo(int min, int max, int bucketCount) {
|
|
||||||
return new HistogramInfo(min, max, bucketCount);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static native void enableNative();
|
private static native void enableNative();
|
||||||
private static native Metrics getAndResetNative();
|
private static native Metrics getAndResetNative();
|
||||||
}
|
}
|
||||||
|
@ -35,7 +35,6 @@ import java.net.SocketException;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import org.webrtc.NetworkMonitorAutoDetect;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Borrowed from Chromium's
|
* Borrowed from Chromium's
|
||||||
@ -90,7 +89,7 @@ public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@CalledByNative("NetworkInformation")
|
@CalledByNative("NetworkInformation")
|
||||||
private NetworkMonitorAutoDetect.ConnectionType getConnectionType() {
|
private ConnectionType getConnectionType() {
|
||||||
return type;
|
return type;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ public interface VideoDecoder {
|
|||||||
public final int width;
|
public final int width;
|
||||||
public final int height;
|
public final int height;
|
||||||
|
|
||||||
|
@CalledByNative("Settings")
|
||||||
public Settings(int numberOfCores, int width, int height) {
|
public Settings(int numberOfCores, int width, int height) {
|
||||||
this.numberOfCores = numberOfCores;
|
this.numberOfCores = numberOfCores;
|
||||||
this.width = width;
|
this.width = width;
|
||||||
|
@ -10,6 +10,8 @@
|
|||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
|
import org.webrtc.EncodedImage;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Interface for a video encoder that can be used with WebRTC. All calls will be made on the
|
* Interface for a video encoder that can be used with WebRTC. All calls will be made on the
|
||||||
* encoding thread. The encoder may be constructed on a different thread and changing thread after
|
* encoding thread. The encoder may be constructed on a different thread and changing thread after
|
||||||
@ -25,6 +27,7 @@ public interface VideoEncoder {
|
|||||||
public final int maxFramerate;
|
public final int maxFramerate;
|
||||||
public final boolean automaticResizeOn;
|
public final boolean automaticResizeOn;
|
||||||
|
|
||||||
|
@CalledByNative("Settings")
|
||||||
public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate,
|
public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate,
|
||||||
boolean automaticResizeOn) {
|
boolean automaticResizeOn) {
|
||||||
this.numberOfCores = numberOfCores;
|
this.numberOfCores = numberOfCores;
|
||||||
@ -40,6 +43,7 @@ public interface VideoEncoder {
|
|||||||
public class EncodeInfo {
|
public class EncodeInfo {
|
||||||
public final EncodedImage.FrameType[] frameTypes;
|
public final EncodedImage.FrameType[] frameTypes;
|
||||||
|
|
||||||
|
@CalledByNative("EncodeInfo")
|
||||||
public EncodeInfo(EncodedImage.FrameType[] frameTypes) {
|
public EncodeInfo(EncodedImage.FrameType[] frameTypes) {
|
||||||
this.frameTypes = frameTypes;
|
this.frameTypes = frameTypes;
|
||||||
}
|
}
|
||||||
@ -67,6 +71,7 @@ public interface VideoEncoder {
|
|||||||
* Initializes the allocation with a two dimensional array of bitrates. The first index of the
|
* Initializes the allocation with a two dimensional array of bitrates. The first index of the
|
||||||
* array is the spatial layer and the second index in the temporal layer.
|
* array is the spatial layer and the second index in the temporal layer.
|
||||||
*/
|
*/
|
||||||
|
@CalledByNative("BitrateAllocation")
|
||||||
public BitrateAllocation(int[][] bitratesBbs) {
|
public BitrateAllocation(int[][] bitratesBbs) {
|
||||||
this.bitratesBbs = bitratesBbs;
|
this.bitratesBbs = bitratesBbs;
|
||||||
}
|
}
|
||||||
|
@ -121,6 +121,7 @@ public class VideoFrame {
|
|||||||
private final int rotation;
|
private final int rotation;
|
||||||
private final long timestampNs;
|
private final long timestampNs;
|
||||||
|
|
||||||
|
@CalledByNative
|
||||||
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
|
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
|
||||||
if (buffer == null) {
|
if (buffer == null) {
|
||||||
throw new IllegalArgumentException("buffer not allowed to be null");
|
throw new IllegalArgumentException("buffer not allowed to be null");
|
||||||
@ -206,12 +207,6 @@ public class VideoFrame {
|
|||||||
return newBuffer;
|
return newBuffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bugs.webrtc.org/8278): Add a way to generate JNI code for constructors directly.
|
|
||||||
@CalledByNative
|
|
||||||
static VideoFrame create(Buffer buffer, int rotation, long timestampNs) {
|
|
||||||
return new VideoFrame(buffer, rotation, timestampNs);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static native void cropAndScaleI420Native(ByteBuffer srcY, int srcStrideY,
|
private static native void cropAndScaleI420Native(ByteBuffer srcY, int srcStrideY,
|
||||||
ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
|
ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
|
||||||
int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
|
int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
|
||||||
|
@ -19,7 +19,7 @@ import java.lang.annotation.Target;
|
|||||||
* @CalledByNative is used by the JNI generator to create the necessary JNI
|
* @CalledByNative is used by the JNI generator to create the necessary JNI
|
||||||
* bindings and expose this method to native code.
|
* bindings and expose this method to native code.
|
||||||
*/
|
*/
|
||||||
@Target(ElementType.METHOD)
|
@Target({ElementType.CONSTRUCTOR, ElementType.METHOD})
|
||||||
@Retention(RetentionPolicy.CLASS)
|
@Retention(RetentionPolicy.CLASS)
|
||||||
@interface CalledByNative {
|
@interface CalledByNative {
|
||||||
/*
|
/*
|
||||||
|
@ -16,12 +16,6 @@ import org.webrtc.VideoDecoder;
|
|||||||
* This class contains the Java glue code for JNI generation of VideoDecoder.
|
* This class contains the Java glue code for JNI generation of VideoDecoder.
|
||||||
*/
|
*/
|
||||||
class VideoDecoderWrapper {
|
class VideoDecoderWrapper {
|
||||||
// TODO(bugs.webrtc.org/8551) Remove.
|
|
||||||
@CalledByNative
|
|
||||||
static VideoDecoder.Settings createSettings(int numberOfCores, int width, int height) {
|
|
||||||
return new VideoDecoder.Settings(numberOfCores, width, height);
|
|
||||||
}
|
|
||||||
|
|
||||||
@CalledByNative
|
@CalledByNative
|
||||||
static VideoDecoder.Callback createDecoderCallback(final long nativeDecoder) {
|
static VideoDecoder.Callback createDecoderCallback(final long nativeDecoder) {
|
||||||
return (VideoFrame frame, Integer decodeTimeMs,
|
return (VideoFrame frame, Integer decodeTimeMs,
|
||||||
|
@ -11,7 +11,6 @@
|
|||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
// Explicit imports necessary for JNI generation.
|
// Explicit imports necessary for JNI generation.
|
||||||
import org.webrtc.EncodedImage;
|
|
||||||
import org.webrtc.VideoEncoder;
|
import org.webrtc.VideoEncoder;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
|
||||||
@ -19,23 +18,6 @@ import java.nio.ByteBuffer;
|
|||||||
* This class contains the Java glue code for JNI generation of VideoEncoder.
|
* This class contains the Java glue code for JNI generation of VideoEncoder.
|
||||||
*/
|
*/
|
||||||
class VideoEncoderWrapper {
|
class VideoEncoderWrapper {
|
||||||
@CalledByNative
|
|
||||||
static VideoEncoder.Settings createSettings(int numberOfCores, int width, int height,
|
|
||||||
int startBitrate, int maxFramerate, boolean automaticResizeOn) {
|
|
||||||
return new VideoEncoder.Settings(
|
|
||||||
numberOfCores, width, height, startBitrate, maxFramerate, automaticResizeOn);
|
|
||||||
}
|
|
||||||
|
|
||||||
@CalledByNative
|
|
||||||
static VideoEncoder.EncodeInfo createEncodeInfo(EncodedImage.FrameType[] frameTypes) {
|
|
||||||
return new VideoEncoder.EncodeInfo(frameTypes);
|
|
||||||
}
|
|
||||||
|
|
||||||
@CalledByNative
|
|
||||||
static VideoEncoder.BitrateAllocation createBitrateAllocation(int[][] bitratesBbs) {
|
|
||||||
return new VideoEncoder.BitrateAllocation(bitratesBbs);
|
|
||||||
}
|
|
||||||
|
|
||||||
@CalledByNative
|
@CalledByNative
|
||||||
static boolean getScalingSettingsOn(VideoEncoder.ScalingSettings scalingSettings) {
|
static boolean getScalingSettingsOn(VideoEncoder.ScalingSettings scalingSettings) {
|
||||||
return scalingSettings.on;
|
return scalingSettings.on;
|
||||||
|
@ -30,13 +30,13 @@ JNI_FUNCTION_DECLARATION(jobject,
|
|||||||
Metrics_getAndResetNative,
|
Metrics_getAndResetNative,
|
||||||
JNIEnv* jni,
|
JNIEnv* jni,
|
||||||
jclass) {
|
jclass) {
|
||||||
jobject j_metrics = Java_Metrics_createMetrics(jni);
|
jobject j_metrics = Java_Metrics_Constructor(jni);
|
||||||
|
|
||||||
std::map<std::string, std::unique_ptr<metrics::SampleInfo>> histograms;
|
std::map<std::string, std::unique_ptr<metrics::SampleInfo>> histograms;
|
||||||
metrics::GetAndReset(&histograms);
|
metrics::GetAndReset(&histograms);
|
||||||
for (const auto& kv : histograms) {
|
for (const auto& kv : histograms) {
|
||||||
// Create and add samples to |HistogramInfo|.
|
// Create and add samples to |HistogramInfo|.
|
||||||
jobject j_info = Java_Metrics_createHistogramInfo(
|
jobject j_info = Java_HistogramInfo_Constructor(
|
||||||
jni, kv.second->min, kv.second->max,
|
jni, kv.second->min, kv.second->max,
|
||||||
static_cast<int>(kv.second->bucket_count));
|
static_cast<int>(kv.second->bucket_count));
|
||||||
for (const auto& sample : kv.second->samples) {
|
for (const auto& sample : kv.second->samples) {
|
||||||
|
@ -25,7 +25,7 @@ jobject NativeToJavaEncodedImage(JNIEnv* jni, const EncodedImage& image) {
|
|||||||
jobject buffer = jni->NewDirectByteBuffer(image._buffer, image._length);
|
jobject buffer = jni->NewDirectByteBuffer(image._buffer, image._length);
|
||||||
jobject frame_type = NativeToJavaFrameType(jni, image._frameType);
|
jobject frame_type = NativeToJavaFrameType(jni, image._frameType);
|
||||||
jobject qp = (image.qp_ == -1) ? nullptr : JavaIntegerFromInt(jni, image.qp_);
|
jobject qp = (image.qp_ == -1) ? nullptr : JavaIntegerFromInt(jni, image.qp_);
|
||||||
return Java_EncodedImage_create(
|
return Java_EncodedImage_Constructor(
|
||||||
jni, buffer, image._encodedWidth, image._encodedHeight,
|
jni, buffer, image._encodedWidth, image._encodedHeight,
|
||||||
image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec, frame_type,
|
image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec, frame_type,
|
||||||
static_cast<jint>(image.rotation_), image._completeFrame, qp);
|
static_cast<jint>(image.rotation_), image._completeFrame, qp);
|
||||||
|
@ -57,7 +57,7 @@ int32_t VideoDecoderWrapper::InitDecode(const VideoCodec* codec_settings,
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t VideoDecoderWrapper::InitDecodeInternal(JNIEnv* jni) {
|
int32_t VideoDecoderWrapper::InitDecodeInternal(JNIEnv* jni) {
|
||||||
jobject settings = Java_VideoDecoderWrapper_createSettings(
|
jobject settings = Java_Settings_Constructor(
|
||||||
jni, number_of_cores_, codec_settings_.width, codec_settings_.height);
|
jni, number_of_cores_, codec_settings_.width, codec_settings_.height);
|
||||||
|
|
||||||
jobject callback = Java_VideoDecoderWrapper_createDecoderCallback(
|
jobject callback = Java_VideoDecoderWrapper_createDecoderCallback(
|
||||||
|
@ -74,7 +74,7 @@ int32_t VideoEncoderWrapper::InitEncodeInternal(JNIEnv* jni) {
|
|||||||
automatic_resize_on = true;
|
automatic_resize_on = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
jobject settings = Java_VideoEncoderWrapper_createSettings(
|
jobject settings = Java_Settings_Constructor(
|
||||||
jni, number_of_cores_, codec_settings_.width, codec_settings_.height,
|
jni, number_of_cores_, codec_settings_.width, codec_settings_.height,
|
||||||
codec_settings_.startBitrate, codec_settings_.maxFramerate,
|
codec_settings_.startBitrate, codec_settings_.maxFramerate,
|
||||||
automatic_resize_on);
|
automatic_resize_on);
|
||||||
@ -127,8 +127,7 @@ int32_t VideoEncoderWrapper::Encode(
|
|||||||
jobject j_frame_type = NativeToJavaFrameType(jni, (*frame_types)[i]);
|
jobject j_frame_type = NativeToJavaFrameType(jni, (*frame_types)[i]);
|
||||||
jni->SetObjectArrayElement(j_frame_types, i, j_frame_type);
|
jni->SetObjectArrayElement(j_frame_types, i, j_frame_type);
|
||||||
}
|
}
|
||||||
jobject encode_info =
|
jobject encode_info = Java_EncodeInfo_Constructor(jni, j_frame_types);
|
||||||
Java_VideoEncoderWrapper_createEncodeInfo(jni, j_frame_types);
|
|
||||||
|
|
||||||
FrameExtraInfo info;
|
FrameExtraInfo info;
|
||||||
info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec;
|
info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec;
|
||||||
@ -385,8 +384,7 @@ jobject VideoEncoderWrapper::ToJavaBitrateAllocation(
|
|||||||
jni->SetObjectArrayElement(j_allocation_array, spatial_i,
|
jni->SetObjectArrayElement(j_allocation_array, spatial_i,
|
||||||
j_array_spatial_layer);
|
j_array_spatial_layer);
|
||||||
}
|
}
|
||||||
return Java_VideoEncoderWrapper_createBitrateAllocation(jni,
|
return Java_BitrateAllocation_Constructor(jni, j_allocation_array);
|
||||||
j_allocation_array);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string VideoEncoderWrapper::GetImplementationName(JNIEnv* jni) const {
|
std::string VideoEncoderWrapper::GetImplementationName(JNIEnv* jni) const {
|
||||||
|
@ -403,7 +403,7 @@ jobject NativeToJavaFrame(JNIEnv* jni, const VideoFrame& frame) {
|
|||||||
} else {
|
} else {
|
||||||
j_buffer = WrapI420Buffer(jni, buffer->ToI420());
|
j_buffer = WrapI420Buffer(jni, buffer->ToI420());
|
||||||
}
|
}
|
||||||
return Java_VideoFrame_create(
|
return Java_VideoFrame_Constructor(
|
||||||
jni, j_buffer, static_cast<jint>(frame.rotation()),
|
jni, j_buffer, static_cast<jint>(frame.rotation()),
|
||||||
static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec));
|
static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec));
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user