Android: Generate JNI code for MediaStream

Bug: webrtc:8278
Change-Id: I48d0615f3db3f22e7179a2d7c59b970a33678ada
Reviewed-on: https://webrtc-review.googlesource.com/25962
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20891}
This commit is contained in:
Magnus Jedvert
2017-11-27 11:23:42 +01:00
committed by Commit Bot
parent b8ff8f7d40
commit 6a0345b3b0
6 changed files with 77 additions and 126 deletions

View File

@ -287,6 +287,7 @@ generate_jni("generated_peerconnection_jni") {
"api/org/webrtc/DataChannel.java", "api/org/webrtc/DataChannel.java",
"api/org/webrtc/IceCandidate.java", "api/org/webrtc/IceCandidate.java",
"api/org/webrtc/MediaConstraints.java", "api/org/webrtc/MediaConstraints.java",
"api/org/webrtc/MediaStream.java",
"api/org/webrtc/NetworkMonitor.java", "api/org/webrtc/NetworkMonitor.java",
"api/org/webrtc/NetworkMonitorAutoDetect.java", "api/org/webrtc/NetworkMonitorAutoDetect.java",
"api/org/webrtc/RTCStats.java", "api/org/webrtc/RTCStats.java",

View File

@ -12,21 +12,25 @@ package org.webrtc;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Iterator;
/** Java wrapper for a C++ MediaStreamInterface. */ /** Java wrapper for a C++ MediaStreamInterface. */
public class MediaStream { public class MediaStream {
private static final String TAG = "MediaStream";
public final List<AudioTrack> audioTracks = new ArrayList<>(); public final List<AudioTrack> audioTracks = new ArrayList<>();
public final List<VideoTrack> videoTracks = new ArrayList<>(); public final List<VideoTrack> videoTracks = new ArrayList<>();
public final List<VideoTrack> preservedVideoTracks = new ArrayList<>(); public final List<VideoTrack> preservedVideoTracks = new ArrayList<>();
// Package-protected for PeerConnection. // Package-protected for PeerConnection.
final long nativeStream; final long nativeStream;
@CalledByNative
public MediaStream(long nativeStream) { public MediaStream(long nativeStream) {
this.nativeStream = nativeStream; this.nativeStream = nativeStream;
} }
public boolean addTrack(AudioTrack track) { public boolean addTrack(AudioTrack track) {
if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) { if (addAudioTrackToNativeStream(nativeStream, track.nativeTrack)) {
audioTracks.add(track); audioTracks.add(track);
return true; return true;
} }
@ -34,7 +38,7 @@ public class MediaStream {
} }
public boolean addTrack(VideoTrack track) { public boolean addTrack(VideoTrack track) {
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) { if (addVideoTrackToNativeStream(nativeStream, track.nativeTrack)) {
videoTracks.add(track); videoTracks.add(track);
return true; return true;
} }
@ -45,7 +49,7 @@ public class MediaStream {
// is called. If video track need to be preserved after MediaStream is destroyed it // is called. If video track need to be preserved after MediaStream is destroyed it
// should be added to MediaStream using addPreservedTrack() call. // should be added to MediaStream using addPreservedTrack() call.
public boolean addPreservedTrack(VideoTrack track) { public boolean addPreservedTrack(VideoTrack track) {
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) { if (addVideoTrackToNativeStream(nativeStream, track.nativeTrack)) {
preservedVideoTracks.add(track); preservedVideoTracks.add(track);
return true; return true;
} }
@ -54,15 +58,16 @@ public class MediaStream {
public boolean removeTrack(AudioTrack track) { public boolean removeTrack(AudioTrack track) {
audioTracks.remove(track); audioTracks.remove(track);
return nativeRemoveAudioTrack(nativeStream, track.nativeTrack); return removeNativeAudioTrack(nativeStream, track.nativeTrack);
} }
public boolean removeTrack(VideoTrack track) { public boolean removeTrack(VideoTrack track) {
videoTracks.remove(track); videoTracks.remove(track);
preservedVideoTracks.remove(track); preservedVideoTracks.remove(track);
return nativeRemoveVideoTrack(nativeStream, track.nativeTrack); return removeNativeVideoTrack(nativeStream, track.nativeTrack);
} }
@CalledByNative
public void dispose() { public void dispose() {
// Remove and release previously added audio and video tracks. // Remove and release previously added audio and video tracks.
while (!audioTracks.isEmpty()) { while (!audioTracks.isEmpty()) {
@ -83,7 +88,7 @@ public class MediaStream {
} }
public String label() { public String label() {
return nativeLabel(nativeStream); return getNativeLabel(nativeStream);
} }
@Override @Override
@ -91,15 +96,51 @@ public class MediaStream {
return "[" + label() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]"; return "[" + label() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
} }
private static native boolean nativeAddAudioTrack(long nativeStream, long nativeAudioTrack); @CalledByNative
void addNativeAudioTrack(long nativeTrack) {
audioTracks.add(new AudioTrack(nativeTrack));
}
private static native boolean nativeAddVideoTrack(long nativeStream, long nativeVideoTrack); @CalledByNative
void addNativeVideoTrack(long nativeTrack) {
videoTracks.add(new VideoTrack(nativeTrack));
}
private static native boolean nativeRemoveAudioTrack(long nativeStream, long nativeAudioTrack); @CalledByNative
void removeAudioTrack(long nativeTrack) {
removeMediaStreamTrack(audioTracks, nativeTrack);
}
private static native boolean nativeRemoveVideoTrack(long nativeStream, long nativeVideoTrack); @CalledByNative
void removeVideoTrack(long nativeTrack) {
removeMediaStreamTrack(videoTracks, nativeTrack);
}
private static native String nativeLabel(long nativeStream); private static void removeMediaStreamTrack(
List<? extends MediaStreamTrack> tracks, long nativeTrack) {
final Iterator<? extends MediaStreamTrack> it = tracks.iterator();
while (it.hasNext()) {
MediaStreamTrack track = it.next();
if (track.nativeTrack == nativeTrack) {
track.dispose();
it.remove();
return;
}
}
Logging.e(TAG, "Couldn't not find track");
}
private static native boolean addAudioTrackToNativeStream(
long nativeStream, long nativeAudioTrack);
private static native boolean addVideoTrackToNativeStream(
long nativeStream, long nativeVideoTrack);
private static native boolean removeNativeAudioTrack(long nativeStream, long nativeAudioTrack);
private static native boolean removeNativeVideoTrack(long nativeStream, long nativeVideoTrack);
private static native String getNativeLabel(long nativeStream);
private static native void free(long nativeStream); private static native void free(long nativeStream);
} }

View File

@ -59,7 +59,6 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "java/nio/ByteBuffer"); LoadClass(jni, "java/nio/ByteBuffer");
LoadClass(jni, "java/util/ArrayList"); LoadClass(jni, "java/util/ArrayList");
LoadClass(jni, "java/util/LinkedHashMap"); LoadClass(jni, "java/util/LinkedHashMap");
LoadClass(jni, "org/webrtc/AudioTrack");
LoadClass(jni, "org/webrtc/Camera1Enumerator"); LoadClass(jni, "org/webrtc/Camera1Enumerator");
LoadClass(jni, "org/webrtc/Camera2Enumerator"); LoadClass(jni, "org/webrtc/Camera2Enumerator");
LoadClass(jni, "org/webrtc/CameraEnumerationAndroid"); LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
@ -76,7 +75,6 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo"); LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType"); LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaSource$State"); LoadClass(jni, "org/webrtc/MediaSource$State");
LoadClass(jni, "org/webrtc/MediaStream");
LoadClass(jni, "org/webrtc/MediaStreamTrack"); LoadClass(jni, "org/webrtc/MediaStreamTrack");
LoadClass(jni, "org/webrtc/MediaStreamTrack$MediaType"); LoadClass(jni, "org/webrtc/MediaStreamTrack$MediaType");
LoadClass(jni, "org/webrtc/MediaStreamTrack$State"); LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
@ -114,7 +112,6 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/VideoFrame$TextureBuffer"); LoadClass(jni, "org/webrtc/VideoFrame$TextureBuffer");
LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame"); LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
LoadClass(jni, "org/webrtc/VideoSink"); LoadClass(jni, "org/webrtc/VideoSink");
LoadClass(jni, "org/webrtc/VideoTrack");
LoadClass(jni, "org/webrtc/WrappedNativeI420Buffer"); LoadClass(jni, "org/webrtc/WrappedNativeI420Buffer");
} }

View File

@ -15,7 +15,7 @@ namespace webrtc {
namespace jni { namespace jni {
JNI_FUNCTION_DECLARATION(jboolean, JNI_FUNCTION_DECLARATION(jboolean,
MediaStream_nativeAddAudioTrack, MediaStream_addAudioTrackToNativeStream,
JNIEnv* jni, JNIEnv* jni,
jclass, jclass,
jlong pointer, jlong pointer,
@ -25,7 +25,7 @@ JNI_FUNCTION_DECLARATION(jboolean,
} }
JNI_FUNCTION_DECLARATION(jboolean, JNI_FUNCTION_DECLARATION(jboolean,
MediaStream_nativeAddVideoTrack, MediaStream_addVideoTrackToNativeStream,
JNIEnv* jni, JNIEnv* jni,
jclass, jclass,
jlong pointer, jlong pointer,
@ -35,7 +35,7 @@ JNI_FUNCTION_DECLARATION(jboolean,
} }
JNI_FUNCTION_DECLARATION(jboolean, JNI_FUNCTION_DECLARATION(jboolean,
MediaStream_nativeRemoveAudioTrack, MediaStream_removeNativeAudioTrack,
JNIEnv* jni, JNIEnv* jni,
jclass, jclass,
jlong pointer, jlong pointer,
@ -45,7 +45,7 @@ JNI_FUNCTION_DECLARATION(jboolean,
} }
JNI_FUNCTION_DECLARATION(jboolean, JNI_FUNCTION_DECLARATION(jboolean,
MediaStream_nativeRemoveVideoTrack, MediaStream_removeNativeVideoTrack,
JNIEnv* jni, JNIEnv* jni,
jclass, jclass,
jlong pointer, jlong pointer,
@ -55,7 +55,7 @@ JNI_FUNCTION_DECLARATION(jboolean,
} }
JNI_FUNCTION_DECLARATION(jstring, JNI_FUNCTION_DECLARATION(jstring,
MediaStream_nativeLabel, MediaStream_getNativeLabel,
JNIEnv* jni, JNIEnv* jni,
jclass, jclass,
jlong j_p) { jlong j_p) {

View File

@ -14,6 +14,7 @@
#include <string> #include <string>
#include "rtc_base/ptr_util.h" #include "rtc_base/ptr_util.h"
#include "sdk/android/generated_peerconnection_jni/jni/MediaStream_jni.h"
#include "sdk/android/src/jni/classreferenceholder.h" #include "sdk/android/src/jni/classreferenceholder.h"
#include "sdk/android/src/jni/pc/datachannel.h" #include "sdk/android/src/jni/pc/datachannel.h"
#include "sdk/android/src/jni/pc/java_native_conversion.h" #include "sdk/android/src/jni/pc/java_native_conversion.h"
@ -31,22 +32,6 @@ PeerConnectionObserverJni::PeerConnectionObserverJni(JNIEnv* jni,
jobject j_observer) jobject j_observer)
: j_observer_global_(jni, j_observer), : j_observer_global_(jni, j_observer),
j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)), j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
j_media_stream_ctor_(
GetMethodID(jni, *j_media_stream_class_, "<init>", "(J)V")),
j_media_stream_track_class_(
jni,
FindClass(jni, "org/webrtc/MediaStreamTrack")),
j_track_dispose_id_(
GetMethodID(jni, *j_media_stream_track_class_, "dispose", "()V")),
j_native_track_id_(
GetFieldID(jni, *j_media_stream_track_class_, "nativeTrack", "J")),
j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
j_audio_track_ctor_(
GetMethodID(jni, *j_audio_track_class_, "<init>", "(J)V")),
j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
j_video_track_ctor_(
GetMethodID(jni, *j_video_track_class_, "<init>", "(J)V")),
j_rtp_receiver_class_(jni, FindClass(jni, "org/webrtc/RtpReceiver")), j_rtp_receiver_class_(jni, FindClass(jni, "org/webrtc/RtpReceiver")),
j_rtp_receiver_ctor_( j_rtp_receiver_ctor_(
GetMethodID(jni, *j_rtp_receiver_class_, "<init>", "(J)V")) {} GetMethodID(jni, *j_rtp_receiver_class_, "<init>", "(J)V")) {}
@ -159,66 +144,17 @@ void PeerConnectionObserverJni::OnAddStream(
void PeerConnectionObserverJni::AddNativeAudioTrackToJavaStream( void PeerConnectionObserverJni::AddNativeAudioTrackToJavaStream(
rtc::scoped_refptr<AudioTrackInterface> track, rtc::scoped_refptr<AudioTrackInterface> track,
jobject j_stream) { jobject j_stream) {
jstring id = JavaStringFromStdString(jni(), track->id()); JNIEnv* env = AttachCurrentThreadIfNeeded();
// Java AudioTrack holds one reference. Corresponding Release() is in Java_MediaStream_addNativeAudioTrack(env, j_stream,
// MediaStreamTrack_free, triggered by AudioTrack.dispose(). jlongFromPointer(track.release()));
track->AddRef();
jobject j_track = jni()->NewObject(*j_audio_track_class_, j_audio_track_ctor_,
reinterpret_cast<jlong>(track.get()), id);
CHECK_EXCEPTION(jni()) << "error during NewObject";
// Now add to the audioTracks linked list.
jfieldID audio_tracks_id = GetFieldID(jni(), *j_media_stream_class_,
"audioTracks", "Ljava/util/List;");
jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
jmethodID add = GetMethodID(jni(), GetObjectClass(jni(), audio_tracks), "add",
"(Ljava/lang/Object;)Z");
jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
RTC_CHECK(added);
} }
void PeerConnectionObserverJni::AddNativeVideoTrackToJavaStream( void PeerConnectionObserverJni::AddNativeVideoTrackToJavaStream(
rtc::scoped_refptr<VideoTrackInterface> track, rtc::scoped_refptr<VideoTrackInterface> track,
jobject j_stream) { jobject j_stream) {
jstring id = JavaStringFromStdString(jni(), track->id()); JNIEnv* env = AttachCurrentThreadIfNeeded();
// Java VideoTrack holds one reference. Corresponding Release() is in Java_MediaStream_addNativeVideoTrack(env, j_stream,
// MediaStreamTrack_free, triggered by VideoTrack.dispose(). jlongFromPointer(track.release()));
track->AddRef();
jobject j_track = jni()->NewObject(*j_video_track_class_, j_video_track_ctor_,
reinterpret_cast<jlong>(track.get()), id);
CHECK_EXCEPTION(jni()) << "error during NewObject";
// Now add to the videoTracks linked list.
jfieldID video_tracks_id = GetFieldID(jni(), *j_media_stream_class_,
"videoTracks", "Ljava/util/List;");
jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
jmethodID add = GetMethodID(jni(), GetObjectClass(jni(), video_tracks), "add",
"(Ljava/lang/Object;)Z");
jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
RTC_CHECK(added);
}
void PeerConnectionObserverJni::RemoveAndDisposeNativeTrackFromJavaTrackList(
MediaStreamTrackInterface* track,
jobject j_tracks) {
Iterable iterable_tracks(jni(), j_tracks);
for (auto it = iterable_tracks.begin(); it != iterable_tracks.end(); ++it) {
MediaStreamTrackInterface* native_track =
reinterpret_cast<MediaStreamTrackInterface*>(
jni()->GetLongField(*it, j_native_track_id_));
CHECK_EXCEPTION(jni()) << "error during GetLongField";
if (native_track == track) {
jni()->CallVoidMethod(*it, j_track_dispose_id_);
it.Remove();
return;
}
}
// If we reached this point, we didn't find the track, which means we're
// getting a "track removed" callback but the Java stream doesn't have a
// corresponding track, which indicates a bug somewhere.
RTC_NOTREACHED();
} }
void PeerConnectionObserverJni::OnAudioTrackAddedToStream( void PeerConnectionObserverJni::OnAudioTrackAddedToStream(
@ -240,23 +176,19 @@ void PeerConnectionObserverJni::OnVideoTrackAddedToStream(
void PeerConnectionObserverJni::OnAudioTrackRemovedFromStream( void PeerConnectionObserverJni::OnAudioTrackRemovedFromStream(
AudioTrackInterface* track, AudioTrackInterface* track,
MediaStreamInterface* stream) { MediaStreamInterface* stream) {
ScopedLocalRefFrame local_ref_frame(jni()); JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
jobject j_stream = GetOrCreateJavaStream(stream); jobject j_stream = GetOrCreateJavaStream(stream);
jfieldID audio_tracks_id = GetFieldID(jni(), *j_media_stream_class_, Java_MediaStream_removeAudioTrack(env, j_stream, jlongFromPointer(track));
"audioTracks", "Ljava/util/List;");
jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
RemoveAndDisposeNativeTrackFromJavaTrackList(track, audio_tracks);
} }
void PeerConnectionObserverJni::OnVideoTrackRemovedFromStream( void PeerConnectionObserverJni::OnVideoTrackRemovedFromStream(
VideoTrackInterface* track, VideoTrackInterface* track,
MediaStreamInterface* stream) { MediaStreamInterface* stream) {
ScopedLocalRefFrame local_ref_frame(jni()); JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
jobject j_stream = GetOrCreateJavaStream(stream); jobject j_stream = GetOrCreateJavaStream(stream);
jfieldID video_tracks_id = GetFieldID(jni(), *j_media_stream_class_, Java_MediaStream_removeVideoTrack(env, j_stream, jlongFromPointer(track));
"videoTracks", "Ljava/util/List;");
jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
RemoveAndDisposeNativeTrackFromJavaTrackList(track, video_tracks);
} }
void PeerConnectionObserverJni::OnRemoveStream( void PeerConnectionObserverJni::OnRemoveStream(
@ -336,10 +268,9 @@ void PeerConnectionObserverJni::DisposeRemoteStream(
stream_observers_.end()); stream_observers_.end());
remote_streams_.erase(it); remote_streams_.erase(it);
jni()->CallVoidMethod( JNIEnv* env = AttachCurrentThreadIfNeeded();
j_stream, GetMethodID(jni(), *j_media_stream_class_, "dispose", "()V")); Java_MediaStream_dispose(env, j_stream);
CHECK_EXCEPTION(jni()) << "error during MediaStream.dispose()"; DeleteGlobalRef(env, j_stream);
DeleteGlobalRef(jni(), j_stream);
} }
void PeerConnectionObserverJni::DisposeRtpReceiver( void PeerConnectionObserverJni::DisposeRtpReceiver(
@ -365,9 +296,7 @@ jobject PeerConnectionObserverJni::GetOrCreateJavaStream(
// MediaStream_free, triggered by MediaStream.dispose(). // MediaStream_free, triggered by MediaStream.dispose().
stream->AddRef(); stream->AddRef();
jobject j_stream = jobject j_stream =
jni()->NewObject(*j_media_stream_class_, j_media_stream_ctor_, Java_MediaStream_Constructor(jni(), jlongFromPointer(stream.get()));
reinterpret_cast<jlong>(stream.get()));
CHECK_EXCEPTION(jni()) << "error during NewObject";
remote_streams_[stream] = NewGlobalRef(jni(), j_stream); remote_streams_[stream] = NewGlobalRef(jni(), j_stream);
return j_stream; return j_stream;
@ -376,8 +305,8 @@ jobject PeerConnectionObserverJni::GetOrCreateJavaStream(
jobjectArray PeerConnectionObserverJni::NativeToJavaMediaStreamArray( jobjectArray PeerConnectionObserverJni::NativeToJavaMediaStreamArray(
JNIEnv* jni, JNIEnv* jni,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) { const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
jobjectArray java_streams = jobjectArray java_streams = jni->NewObjectArray(
jni->NewObjectArray(streams.size(), *j_media_stream_class_, nullptr); streams.size(), org_webrtc_MediaStream_clazz(jni), nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray"; CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < streams.size(); ++i) { for (size_t i = 0; i < streams.size(); ++i) {
jobject j_stream = GetOrCreateJavaStream(streams[i]); jobject j_stream = GetOrCreateJavaStream(streams[i]);

View File

@ -87,14 +87,6 @@ class PeerConnectionObserverJni : public PeerConnectionObserver,
void AddNativeVideoTrackToJavaStream( void AddNativeVideoTrackToJavaStream(
rtc::scoped_refptr<VideoTrackInterface> track, rtc::scoped_refptr<VideoTrackInterface> track,
jobject j_stream); jobject j_stream);
// Remove and dispose the Java MediaStreamTrack object that wraps |track|,
// given |j_tracks| which is a linked list of tracks (either the videoTracks
// or audioTracks member of MediaStream).
//
// DCHECKs if the track isn't found.
void RemoveAndDisposeNativeTrackFromJavaTrackList(
MediaStreamTrackInterface* track,
jobject j_tracks);
// Callbacks invoked when a native stream changes, and the Java stream needs // Callbacks invoked when a native stream changes, and the Java stream needs
// to be updated; MediaStreamObserver is used to make this simpler. // to be updated; MediaStreamObserver is used to make this simpler.
@ -109,15 +101,6 @@ class PeerConnectionObserverJni : public PeerConnectionObserver,
const ScopedGlobalRef<jobject> j_observer_global_; const ScopedGlobalRef<jobject> j_observer_global_;
const ScopedGlobalRef<jclass> j_observer_class_; const ScopedGlobalRef<jclass> j_observer_class_;
const ScopedGlobalRef<jclass> j_media_stream_class_;
const jmethodID j_media_stream_ctor_;
const ScopedGlobalRef<jclass> j_media_stream_track_class_;
const jmethodID j_track_dispose_id_;
const jfieldID j_native_track_id_;
const ScopedGlobalRef<jclass> j_audio_track_class_;
const jmethodID j_audio_track_ctor_;
const ScopedGlobalRef<jclass> j_video_track_class_;
const jmethodID j_video_track_ctor_;
const ScopedGlobalRef<jclass> j_rtp_receiver_class_; const ScopedGlobalRef<jclass> j_rtp_receiver_class_;
const jmethodID j_rtp_receiver_ctor_; const jmethodID j_rtp_receiver_ctor_;