Android: One JNI file per Java file

This CL does the following:
 * Split out MediaStream JNI code from peerconnection.cc to mediastream.h/mediastream.cc.
 * Split out RtpSender JNI code from peerconnection.cc to rtpsender.h/rtpsender.cc.
 * Split out TurnCustomizer JNI code from peerconnection.cc to turncustomizer.h/turncustomizer.cc.
 * Add missing instanceof function to WrappedNativeVideoDecoder.java.
 * Move some PeerConnectionFactory JNI declarations from pc/video.cc to peerconnectionfactory.cc.
 * Add declaration to video.h for the JNI functions that depend on EglBase14_jni.h.
 * Use a scoped object to store the global Java MediaStream objects that also call dispose.

Bug: webrtc:8278
Change-Id: I3c56a599b8bcbc8f34e5c5a7b9c9fe1d192ff3f3
Reviewed-on: https://webrtc-review.googlesource.com/34645
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21380}
This commit is contained in:
Magnus Jedvert
2017-12-20 12:00:50 +01:00
committed by Commit Bot
parent 1ece1edddc
commit 1212f1e227
22 changed files with 395 additions and 308 deletions

View File

@ -40,4 +40,9 @@ abstract class WrappedNativeVideoDecoder implements VideoDecoder {
public String getImplementationName() {
throw new UnsupportedOperationException("Not implemented.");
}
@CalledByNative
static boolean isInstanceOf(VideoDecoder decoder) {
return decoder instanceof WrappedNativeVideoDecoder;
}
}

View File

@ -8,12 +8,97 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/mediastreaminterface.h"
#include "sdk/android/src/jni/pc/mediastream.h"
#include "rtc_base/ptr_util.h"
#include "sdk/android/generated_peerconnection_jni/jni/MediaStream_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
JavaMediaStream::JavaMediaStream(
JNIEnv* env,
rtc::scoped_refptr<MediaStreamInterface> media_stream)
: j_media_stream_(
env,
Java_MediaStream_Constructor(env,
jlongFromPointer(media_stream.get()))),
observer_(rtc::MakeUnique<MediaStreamObserver>(media_stream)) {
for (rtc::scoped_refptr<AudioTrackInterface> track :
media_stream->GetAudioTracks()) {
Java_MediaStream_addNativeAudioTrack(env, *j_media_stream_,
jlongFromPointer(track.release()));
}
for (rtc::scoped_refptr<VideoTrackInterface> track :
media_stream->GetVideoTracks()) {
Java_MediaStream_addNativeVideoTrack(env, *j_media_stream_,
jlongFromPointer(track.release()));
}
// Create an observer to update the Java stream when the native stream's set
// of tracks changes.
observer_->SignalAudioTrackRemoved.connect(
this, &JavaMediaStream::OnAudioTrackRemovedFromStream);
observer_->SignalVideoTrackRemoved.connect(
this, &JavaMediaStream::OnVideoTrackRemovedFromStream);
observer_->SignalAudioTrackAdded.connect(
this, &JavaMediaStream::OnAudioTrackAddedToStream);
observer_->SignalVideoTrackAdded.connect(
this, &JavaMediaStream::OnVideoTrackAddedToStream);
// |j_media_stream| holds one reference. Corresponding Release() is in
// MediaStream_free, triggered by MediaStream.dispose().
media_stream.release();
}
JavaMediaStream::~JavaMediaStream() {
JNIEnv* env = AttachCurrentThreadIfNeeded();
// Remove the observer first, so it doesn't react to events during deletion.
observer_ = nullptr;
Java_MediaStream_dispose(env, *j_media_stream_);
}
void JavaMediaStream::OnAudioTrackAddedToStream(AudioTrackInterface* track,
MediaStreamInterface* stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
track->AddRef();
Java_MediaStream_addNativeAudioTrack(env, *j_media_stream_,
jlongFromPointer(track));
}
void JavaMediaStream::OnVideoTrackAddedToStream(VideoTrackInterface* track,
MediaStreamInterface* stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
track->AddRef();
Java_MediaStream_addNativeVideoTrack(env, *j_media_stream_,
jlongFromPointer(track));
}
void JavaMediaStream::OnAudioTrackRemovedFromStream(
AudioTrackInterface* track,
MediaStreamInterface* stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
Java_MediaStream_removeAudioTrack(env, *j_media_stream_,
jlongFromPointer(track));
}
void JavaMediaStream::OnVideoTrackRemovedFromStream(
VideoTrackInterface* track,
MediaStreamInterface* stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
Java_MediaStream_removeVideoTrack(env, *j_media_stream_,
jlongFromPointer(track));
}
jclass GetMediaStreamClass(JNIEnv* env) {
return org_webrtc_MediaStream_clazz(env);
}
JNI_FUNCTION_DECLARATION(jboolean,
MediaStream_addAudioTrackToNativeStream,
JNIEnv* jni,
@ -63,9 +148,5 @@ JNI_FUNCTION_DECLARATION(jstring,
jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
}
JNI_FUNCTION_DECLARATION(void, MediaStream_free, JNIEnv*, jclass, jlong j_p) {
reinterpret_cast<MediaStreamInterface*>(j_p)->Release();
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,52 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_ANDROID_SRC_JNI_PC_MEDIASTREAM_H_
#define SDK_ANDROID_SRC_JNI_PC_MEDIASTREAM_H_
#include <jni.h>
#include <memory>
#include "api/mediastreaminterface.h"
#include "pc/mediastreamobserver.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
class JavaMediaStream : public sigslot::has_slots<> {
public:
explicit JavaMediaStream(
JNIEnv* env,
rtc::scoped_refptr<MediaStreamInterface> media_stream);
~JavaMediaStream();
jobject j_media_stream() { return *j_media_stream_; }
private:
void OnAudioTrackAddedToStream(AudioTrackInterface* track,
MediaStreamInterface* stream);
void OnVideoTrackAddedToStream(VideoTrackInterface* track,
MediaStreamInterface* stream);
void OnAudioTrackRemovedFromStream(AudioTrackInterface* track,
MediaStreamInterface* stream);
void OnVideoTrackRemovedFromStream(VideoTrackInterface* track,
MediaStreamInterface* stream);
ScopedGlobalRef<jobject> j_media_stream_;
std::unique_ptr<MediaStreamObserver> observer_;
};
jclass GetMediaStreamClass(JNIEnv* env);
} // namespace jni
} // namespace webrtc
#endif // SDK_ANDROID_SRC_JNI_PC_MEDIASTREAM_H_

View File

@ -23,6 +23,21 @@ VideoDecoderFactory* CreateVideoDecoderFactory(JNIEnv* jni,
return nullptr;
}
void SetEglContext(JNIEnv* env,
cricket::WebRtcVideoEncoderFactory* encoder_factory,
jobject egl_context) {}
void SetEglContext(JNIEnv* env,
cricket::WebRtcVideoDecoderFactory* decoder_factory,
jobject egl_context) {}
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
jobject j_surface_texture_helper,
jboolean is_screencast) {
return nullptr;
}
cricket::WebRtcVideoEncoderFactory* CreateLegacyVideoEncoderFactory() {
return nullptr;
}
@ -41,10 +56,5 @@ VideoDecoderFactory* WrapLegacyVideoDecoderFactory(
return nullptr;
}
jobject GetJavaSurfaceTextureHelper(
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper) {
return nullptr;
}
} // namespace jni
} // namespace webrtc

View File

@ -39,18 +39,17 @@
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/ptr_util.h"
#include "sdk/android/generated_peerconnection_jni/jni/MediaStream_jni.h"
#include "sdk/android/generated_peerconnection_jni/jni/PeerConnection_jni.h"
#include "sdk/android/generated_peerconnection_jni/jni/RtpSender_jni.h"
#include "sdk/android/generated_peerconnection_jni/jni/TurnCustomizer_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/pc/datachannel.h"
#include "sdk/android/src/jni/pc/icecandidate.h"
#include "sdk/android/src/jni/pc/mediaconstraints.h"
#include "sdk/android/src/jni/pc/rtcstatscollectorcallbackwrapper.h"
#include "sdk/android/src/jni/pc/rtpsender.h"
#include "sdk/android/src/jni/pc/sdpobserver.h"
#include "sdk/android/src/jni/pc/sessiondescription.h"
#include "sdk/android/src/jni/pc/statsobserver.h"
#include "sdk/android/src/jni/pc/turncustomizer.h"
namespace webrtc {
namespace jni {
@ -62,15 +61,6 @@ PeerConnectionInterface* ExtractNativePC(JNIEnv* jni, jobject j_pc) {
Java_PeerConnection_getNativePeerConnection(jni, j_pc));
}
jobject NativeToJavaRtpSender(JNIEnv* env,
rtc::scoped_refptr<RtpSenderInterface> sender) {
if (!sender)
return nullptr;
// Sender is now owned by the Java object, and will be freed from
// RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
return Java_RtpSender_Constructor(env, jlongFromPointer(sender.release()));
}
PeerConnectionInterface::IceServers JavaToNativeIceServers(
JNIEnv* jni,
jobject j_ice_servers) {
@ -169,10 +159,7 @@ void JavaToNativeRTCConfiguration(
rtc_config->ice_regather_interval_range.emplace(min, max);
}
if (!IsNull(jni, j_turn_customizer)) {
rtc_config->turn_customizer = reinterpret_cast<webrtc::TurnCustomizer*>(
Java_TurnCustomizer_getNativeTurnCustomizer(jni, j_turn_customizer));
}
rtc_config->turn_customizer = GetNativeTurnCustomizer(jni, j_turn_customizer);
rtc_config->disable_ipv6 =
Java_RTCConfiguration_getDisableIpv6(jni, j_rtc_config);
@ -201,12 +188,7 @@ PeerConnectionObserverJni::PeerConnectionObserverJni(JNIEnv* jni,
jobject j_observer)
: j_observer_global_(jni, j_observer) {}
PeerConnectionObserverJni::~PeerConnectionObserverJni() {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
while (!remote_streams_.empty())
DisposeRemoteStream(remote_streams_.begin());
}
PeerConnectionObserverJni::~PeerConnectionObserverJni() = default;
void PeerConnectionObserverJni::OnIceCandidate(
const IceCandidateInterface* candidate) {
@ -262,83 +244,9 @@ void PeerConnectionObserverJni::OnAddStream(
rtc::scoped_refptr<MediaStreamInterface> stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
// The stream could be added into the remote_streams_ map when calling
// OnAddTrack.
jobject j_stream = GetOrCreateJavaStream(stream);
for (const auto& track : stream->GetAudioTracks()) {
AddNativeAudioTrackToJavaStream(track, j_stream);
}
for (const auto& track : stream->GetVideoTracks()) {
AddNativeVideoTrackToJavaStream(track, j_stream);
}
jobject j_stream = GetOrCreateJavaStream(env, stream).j_media_stream();
Java_Observer_onAddStream(env, *j_observer_global_, j_stream);
// Create an observer to update the Java stream when the native stream's set
// of tracks changes.
auto observer = rtc::MakeUnique<MediaStreamObserver>(stream);
observer->SignalAudioTrackRemoved.connect(
this, &PeerConnectionObserverJni::OnAudioTrackRemovedFromStream);
observer->SignalVideoTrackRemoved.connect(
this, &PeerConnectionObserverJni::OnVideoTrackRemovedFromStream);
observer->SignalAudioTrackAdded.connect(
this, &PeerConnectionObserverJni::OnAudioTrackAddedToStream);
observer->SignalVideoTrackAdded.connect(
this, &PeerConnectionObserverJni::OnVideoTrackAddedToStream);
stream_observers_.push_back(std::move(observer));
}
void PeerConnectionObserverJni::AddNativeAudioTrackToJavaStream(
rtc::scoped_refptr<AudioTrackInterface> track,
jobject j_stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
Java_MediaStream_addNativeAudioTrack(env, j_stream,
jlongFromPointer(track.release()));
}
void PeerConnectionObserverJni::AddNativeVideoTrackToJavaStream(
rtc::scoped_refptr<VideoTrackInterface> track,
jobject j_stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
Java_MediaStream_addNativeVideoTrack(env, j_stream,
jlongFromPointer(track.release()));
}
void PeerConnectionObserverJni::OnAudioTrackAddedToStream(
AudioTrackInterface* track,
MediaStreamInterface* stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
jobject j_stream = GetOrCreateJavaStream(stream);
AddNativeAudioTrackToJavaStream(track, j_stream);
}
void PeerConnectionObserverJni::OnVideoTrackAddedToStream(
VideoTrackInterface* track,
MediaStreamInterface* stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
jobject j_stream = GetOrCreateJavaStream(stream);
AddNativeVideoTrackToJavaStream(track, j_stream);
}
void PeerConnectionObserverJni::OnAudioTrackRemovedFromStream(
AudioTrackInterface* track,
MediaStreamInterface* stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
jobject j_stream = GetOrCreateJavaStream(stream);
Java_MediaStream_removeAudioTrack(env, j_stream, jlongFromPointer(track));
}
void PeerConnectionObserverJni::OnVideoTrackRemovedFromStream(
VideoTrackInterface* track,
MediaStreamInterface* stream) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(env);
jobject j_stream = GetOrCreateJavaStream(stream);
Java_MediaStream_removeVideoTrack(env, j_stream, jlongFromPointer(track));
}
void PeerConnectionObserverJni::OnRemoveStream(
@ -348,12 +256,9 @@ void PeerConnectionObserverJni::OnRemoveStream(
NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream);
RTC_CHECK(it != remote_streams_.end())
<< "unexpected stream: " << std::hex << stream;
Java_Observer_onRemoveStream(env, *j_observer_global_, it->second);
// Release the refptr reference so that DisposeRemoteStream can assert
// it removes the final reference.
stream = nullptr;
DisposeRemoteStream(it);
Java_Observer_onRemoveStream(env, *j_observer_global_,
it->second.j_media_stream());
remote_streams_.erase(it);
}
void PeerConnectionObserverJni::OnDataChannel(
@ -388,53 +293,30 @@ void PeerConnectionObserverJni::SetConstraints(
constraints_ = std::move(constraints);
}
void PeerConnectionObserverJni::DisposeRemoteStream(
const NativeToJavaStreamsMap::iterator& it) {
MediaStreamInterface* stream = it->first;
jobject j_stream = it->second;
// Remove the observer first, so it doesn't react to events during deletion.
stream_observers_.erase(
std::remove_if(
stream_observers_.begin(), stream_observers_.end(),
[stream](const std::unique_ptr<MediaStreamObserver>& observer) {
return observer->stream() == stream;
}),
stream_observers_.end());
remote_streams_.erase(it);
JNIEnv* env = AttachCurrentThreadIfNeeded();
Java_MediaStream_dispose(env, j_stream);
DeleteGlobalRef(env, j_stream);
}
// If the NativeToJavaStreamsMap contains the stream, return it.
// Otherwise, create a new Java MediaStream.
jobject PeerConnectionObserverJni::GetOrCreateJavaStream(
JavaMediaStream& PeerConnectionObserverJni::GetOrCreateJavaStream(
JNIEnv* env,
const rtc::scoped_refptr<MediaStreamInterface>& stream) {
NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream);
if (it != remote_streams_.end()) {
return it->second;
if (it == remote_streams_.end()) {
it = remote_streams_
.emplace(std::piecewise_construct,
std::forward_as_tuple(stream.get()),
std::forward_as_tuple(env, stream))
.first;
}
// Java MediaStream holds one reference. Corresponding Release() is in
// MediaStream_free, triggered by MediaStream.dispose().
stream->AddRef();
JNIEnv* env = AttachCurrentThreadIfNeeded();
jobject j_stream =
Java_MediaStream_Constructor(env, jlongFromPointer(stream.get()));
remote_streams_[stream] = NewGlobalRef(env, j_stream);
return j_stream;
return it->second;
}
jobjectArray PeerConnectionObserverJni::NativeToJavaMediaStreamArray(
JNIEnv* jni,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
jobjectArray java_streams = jni->NewObjectArray(
streams.size(), org_webrtc_MediaStream_clazz(jni), nullptr);
jobjectArray java_streams =
jni->NewObjectArray(streams.size(), GetMediaStreamClass(jni), nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < streams.size(); ++i) {
jobject j_stream = GetOrCreateJavaStream(streams[i]);
jobject j_stream = GetOrCreateJavaStream(jni, streams[i]).j_media_stream();
jni->SetObjectArrayElement(java_streams, i, j_stream);
}
return java_streams;
@ -464,7 +346,7 @@ JNI_FUNCTION_DECLARATION(jobject,
jobject j_pc) {
const SessionDescriptionInterface* sdp =
ExtractNativePC(jni, j_pc)->local_description();
return sdp ? NativeToJavaSessionDescription(jni, sdp) : NULL;
return sdp ? NativeToJavaSessionDescription(jni, sdp) : nullptr;
}
JNI_FUNCTION_DECLARATION(jobject,
@ -473,7 +355,7 @@ JNI_FUNCTION_DECLARATION(jobject,
jobject j_pc) {
const SessionDescriptionInterface* sdp =
ExtractNativePC(jni, j_pc)->remote_description();
return sdp ? NativeToJavaSessionDescription(jni, sdp) : NULL;
return sdp ? NativeToJavaSessionDescription(jni, sdp) : nullptr;
}
JNI_FUNCTION_DECLARATION(jobject,
@ -732,7 +614,6 @@ JNI_FUNCTION_DECLARATION(void,
JNIEnv* jni,
jobject j_pc) {
ExtractNativePC(jni, j_pc)->Close();
return;
}
} // namespace jni

View File

@ -19,6 +19,7 @@
#include "api/peerconnectioninterface.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/pc/mediaconstraints.h"
#include "sdk/android/src/jni/pc/mediastream.h"
#include "sdk/android/src/jni/pc/rtpreceiver.h"
namespace webrtc {
@ -34,8 +35,7 @@ rtc::KeyType GetRtcConfigKeyType(JNIEnv* env, jobject j_rtc_config);
// Adapter between the C++ PeerConnectionObserver interface and the Java
// PeerConnection.Observer interface. Wraps an instance of the Java interface
// and dispatches C++ callbacks to Java.
class PeerConnectionObserverJni : public PeerConnectionObserver,
public sigslot::has_slots<> {
class PeerConnectionObserverJni : public PeerConnectionObserver {
public:
PeerConnectionObserverJni(JNIEnv* jni, jobject j_observer);
virtual ~PeerConnectionObserverJni();
@ -64,17 +64,15 @@ class PeerConnectionObserverJni : public PeerConnectionObserver,
const MediaConstraintsInterface* constraints() { return constraints_.get(); }
private:
typedef std::map<MediaStreamInterface*, jobject> NativeToJavaStreamsMap;
typedef std::map<MediaStreamTrackInterface*, jobject>
NativeToJavaMediaTrackMap;
typedef std::map<MediaStreamInterface*, JavaMediaStream>
NativeToJavaStreamsMap;
typedef std::map<MediaStreamTrackInterface*, RtpReceiverInterface*>
NativeMediaStreamTrackToNativeRtpReceiver;
void DisposeRemoteStream(const NativeToJavaStreamsMap::iterator& it);
// If the NativeToJavaStreamsMap contains the stream, return it.
// Otherwise, create a new Java MediaStream.
jobject GetOrCreateJavaStream(
// Otherwise, create a new Java MediaStream. Returns a global jobject.
JavaMediaStream& GetOrCreateJavaStream(
JNIEnv* env,
const rtc::scoped_refptr<MediaStreamInterface>& stream);
// Converts array of streams, creating or re-using Java streams as necessary.
@ -82,37 +80,12 @@ class PeerConnectionObserverJni : public PeerConnectionObserver,
JNIEnv* jni,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams);
// The three methods below must be called from within a local ref
// frame (e.g., using ScopedLocalRefFrame), otherwise they will
// leak references.
//
// Create a Java track object to wrap |track|, and add it to |j_stream|.
void AddNativeAudioTrackToJavaStream(
rtc::scoped_refptr<AudioTrackInterface> track,
jobject j_stream);
void AddNativeVideoTrackToJavaStream(
rtc::scoped_refptr<VideoTrackInterface> track,
jobject j_stream);
// Callbacks invoked when a native stream changes, and the Java stream needs
// to be updated; MediaStreamObserver is used to make this simpler.
void OnAudioTrackAddedToStream(AudioTrackInterface* track,
MediaStreamInterface* stream);
void OnVideoTrackAddedToStream(VideoTrackInterface* track,
MediaStreamInterface* stream);
void OnAudioTrackRemovedFromStream(AudioTrackInterface* track,
MediaStreamInterface* stream);
void OnVideoTrackRemovedFromStream(VideoTrackInterface* track,
MediaStreamInterface* stream);
const ScopedGlobalRef<jobject> j_observer_global_;
// C++ -> Java remote streams. The stored jobects are global refs and must be
// manually deleted upon removal. Use DisposeRemoteStream().
// C++ -> Java remote streams.
NativeToJavaStreamsMap remote_streams_;
std::vector<JavaRtpReceiverGlobalOwner> rtp_receivers_;
std::unique_ptr<MediaConstraintsInterface> constraints_;
std::vector<std::unique_ptr<webrtc::MediaStreamObserver>> stream_observers_;
};
} // namespace jni

View File

@ -462,7 +462,52 @@ JNI_FUNCTION_DECLARATION(jlong,
}
rtc::scoped_refptr<PeerConnectionInterface> pc(
f->CreatePeerConnection(rtc_config, nullptr, nullptr, observer));
return (jlong)pc.release();
return jlongFromPointer(pc.release());
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_createNativeVideoSource,
JNIEnv* jni,
jclass,
jlong native_factory,
jobject j_surface_texture_helper,
jboolean is_screencast) {
OwnedFactoryAndThreads* factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
return jlongFromPointer(CreateVideoSource(
jni, factory->signaling_thread(), factory->worker_thread(),
j_surface_texture_helper, is_screencast));
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_createNativeVideoTrack,
JNIEnv* jni,
jclass,
jlong native_factory,
jstring id,
jlong native_source) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
rtc::scoped_refptr<VideoTrackInterface> track(factory->CreateVideoTrack(
JavaToStdString(jni, id),
reinterpret_cast<VideoTrackSourceInterface*>(native_source)));
return jlongFromPointer(track.release());
}
JNI_FUNCTION_DECLARATION(
void,
PeerConnectionFactory_setNativeVideoHwAccelerationOptions,
JNIEnv* jni,
jclass,
jlong native_factory,
jobject local_egl_context,
jobject remote_egl_context) {
OwnedFactoryAndThreads* owned_factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
SetEglContext(jni, owned_factory->legacy_encoder_factory(),
local_egl_context);
SetEglContext(jni, owned_factory->legacy_decoder_factory(),
remote_egl_context);
}
} // namespace jni

View File

@ -8,13 +8,24 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/rtpsenderinterface.h"
#include "sdk/android/src/jni/pc/rtpsender.h"
#include "sdk/android/generated_peerconnection_jni/jni/RtpSender_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/pc/rtpparameters.h"
namespace webrtc {
namespace jni {
jobject NativeToJavaRtpSender(JNIEnv* env,
rtc::scoped_refptr<RtpSenderInterface> sender) {
if (!sender)
return nullptr;
// Sender is now owned by the Java object, and will be freed from
// RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
return Java_RtpSender_Constructor(env, jlongFromPointer(sender.release()));
}
JNI_FUNCTION_DECLARATION(jboolean,
RtpSender_setNativeTrack,
JNIEnv* jni,

View File

@ -0,0 +1,27 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_ANDROID_SRC_JNI_PC_RTPSENDER_H_
#define SDK_ANDROID_SRC_JNI_PC_RTPSENDER_H_
#include <jni.h>
#include "api/rtpsenderinterface.h"
namespace webrtc {
namespace jni {
jobject NativeToJavaRtpSender(JNIEnv* env,
rtc::scoped_refptr<RtpSenderInterface> sender);
} // namespace jni
} // namespace webrtc
#endif // SDK_ANDROID_SRC_JNI_PC_RTPSENDER_H_

View File

@ -9,11 +9,20 @@
*/
#include "api/turncustomizer.h"
#include "sdk/android/generated_peerconnection_jni/jni/TurnCustomizer_jni.h"
#include "sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
TurnCustomizer* GetNativeTurnCustomizer(JNIEnv* env,
jobject j_turn_customizer) {
if (IsNull(env, j_turn_customizer))
return nullptr;
return reinterpret_cast<webrtc::TurnCustomizer*>(
Java_TurnCustomizer_getNativeTurnCustomizer(env, j_turn_customizer));
}
JNI_FUNCTION_DECLARATION(void,
TurnCustomizer_freeNativeTurnCustomizer,
JNIEnv* jni,

View File

@ -0,0 +1,24 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_ANDROID_SRC_JNI_PC_TURNCUSTOMIZER_H_
#define SDK_ANDROID_SRC_JNI_PC_TURNCUSTOMIZER_H_
#include "api/turncustomizer.h"
namespace webrtc {
namespace jni {
TurnCustomizer* GetNativeTurnCustomizer(JNIEnv* env, jobject j_turn_customizer);
} // namespace jni
} // namespace webrtc
#endif // SDK_ANDROID_SRC_JNI_PC_TURNCUSTOMIZER_H_

View File

@ -42,6 +42,44 @@ VideoDecoderFactory* CreateVideoDecoderFactory(JNIEnv* jni,
return new VideoDecoderFactoryWrapper(jni, j_decoder_factory);
}
void SetEglContext(JNIEnv* env,
cricket::WebRtcVideoEncoderFactory* encoder_factory,
jobject egl_context) {
if (encoder_factory) {
MediaCodecVideoEncoderFactory* media_codec_factory =
static_cast<MediaCodecVideoEncoderFactory*>(encoder_factory);
if (media_codec_factory && Java_Context_isEgl14Context(env, egl_context)) {
RTC_LOG(LS_INFO) << "Set EGL context for HW encoding.";
media_codec_factory->SetEGLContext(env, egl_context);
}
}
}
void SetEglContext(JNIEnv* env,
cricket::WebRtcVideoDecoderFactory* decoder_factory,
jobject egl_context) {
if (decoder_factory) {
MediaCodecVideoDecoderFactory* media_codec_factory =
static_cast<MediaCodecVideoDecoderFactory*>(decoder_factory);
if (media_codec_factory) {
RTC_LOG(LS_INFO) << "Set EGL context for HW decoding.";
media_codec_factory->SetEGLContext(env, egl_context);
}
}
}
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
jobject j_surface_texture_helper,
jboolean is_screencast) {
rtc::scoped_refptr<AndroidVideoTrackSource> source(
new rtc::RefCountedObject<AndroidVideoTrackSource>(
signaling_thread, env, j_surface_texture_helper, is_screencast));
return VideoTrackSourceProxy::Create(signaling_thread, worker_thread, source)
.release();
}
cricket::WebRtcVideoEncoderFactory* CreateLegacyVideoEncoderFactory() {
return new MediaCodecVideoEncoderFactory();
}
@ -66,81 +104,5 @@ VideoDecoderFactory* WrapLegacyVideoDecoderFactory(
.release();
}
jobject GetJavaSurfaceTextureHelper(
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper) {
return surface_texture_helper
? surface_texture_helper->GetJavaSurfaceTextureHelper()
: nullptr;
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_createNativeVideoSource,
JNIEnv* jni,
jclass,
jlong native_factory,
jobject j_surface_texture_helper,
jboolean is_screencast) {
OwnedFactoryAndThreads* factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
rtc::scoped_refptr<AndroidVideoTrackSource> source(
new rtc::RefCountedObject<AndroidVideoTrackSource>(
factory->signaling_thread(), jni, j_surface_texture_helper,
is_screencast));
rtc::scoped_refptr<VideoTrackSourceProxy> proxy_source =
VideoTrackSourceProxy::Create(factory->signaling_thread(),
factory->worker_thread(), source);
return (jlong)proxy_source.release();
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_createNativeVideoTrack,
JNIEnv* jni,
jclass,
jlong native_factory,
jstring id,
jlong native_source) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
rtc::scoped_refptr<VideoTrackInterface> track(factory->CreateVideoTrack(
JavaToStdString(jni, id),
reinterpret_cast<VideoTrackSourceInterface*>(native_source)));
return (jlong)track.release();
}
JNI_FUNCTION_DECLARATION(
void,
PeerConnectionFactory_setNativeVideoHwAccelerationOptions,
JNIEnv* jni,
jclass,
jlong native_factory,
jobject local_egl_context,
jobject remote_egl_context) {
OwnedFactoryAndThreads* owned_factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
if (owned_factory->legacy_encoder_factory()) {
MediaCodecVideoEncoderFactory* encoder_factory =
static_cast<MediaCodecVideoEncoderFactory*>(
owned_factory->legacy_encoder_factory());
if (encoder_factory &&
Java_Context_isEgl14Context(jni, local_egl_context)) {
RTC_LOG(LS_INFO) << "Set EGL context for HW encoding.";
encoder_factory->SetEGLContext(jni, local_egl_context);
}
}
if (owned_factory->legacy_decoder_factory()) {
MediaCodecVideoDecoderFactory* decoder_factory =
static_cast<MediaCodecVideoDecoderFactory*>(
owned_factory->legacy_decoder_factory());
if (decoder_factory) {
RTC_LOG(LS_INFO) << "Set EGL context for HW decoding.";
decoder_factory->SetEGLContext(jni, remote_egl_context);
}
}
}
} // namespace jni
} // namespace webrtc

View File

@ -14,6 +14,7 @@
#include <jni.h>
#include "rtc_base/scoped_ref_ptr.h"
#include "rtc_base/thread.h"
namespace cricket {
class WebRtcVideoEncoderFactory;
@ -36,6 +37,19 @@ VideoEncoderFactory* CreateVideoEncoderFactory(JNIEnv* jni,
VideoDecoderFactory* CreateVideoDecoderFactory(JNIEnv* jni,
jobject j_decoder_factory);
void SetEglContext(JNIEnv* env,
cricket::WebRtcVideoEncoderFactory* encoder_factory,
jobject egl_context);
void SetEglContext(JNIEnv* env,
cricket::WebRtcVideoDecoderFactory* decoder_factory,
jobject egl_context);
void* CreateVideoSource(JNIEnv* env,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
jobject j_surface_texture_helper,
jboolean is_screencast);
cricket::WebRtcVideoEncoderFactory* CreateLegacyVideoEncoderFactory();
cricket::WebRtcVideoDecoderFactory* CreateLegacyVideoDecoderFactory();
@ -44,9 +58,6 @@ VideoEncoderFactory* WrapLegacyVideoEncoderFactory(
VideoDecoderFactory* WrapLegacyVideoDecoderFactory(
cricket::WebRtcVideoDecoderFactory* legacy_decoder_factory);
jobject GetJavaSurfaceTextureHelper(
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper);
} // namespace jni
} // namespace webrtc

View File

@ -13,6 +13,7 @@
#include "rtc_base/bind.h"
#include "rtc_base/logging.h"
#include "sdk/android/generated_video_jni/jni/SurfaceTextureHelper_jni.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {
@ -34,7 +35,7 @@ rtc::scoped_refptr<SurfaceTextureHelper> SurfaceTextureHelper::create(
const char* thread_name,
jobject j_egl_context) {
jobject j_surface_texture_helper = Java_SurfaceTextureHelper_create(
jni, jni->NewStringUTF(thread_name), j_egl_context);
jni, NativeToJavaString(jni, thread_name), j_egl_context);
CHECK_EXCEPTION(jni)
<< "error during initialization of Java SurfaceTextureHelper";
if (IsNull(jni, j_surface_texture_helper))
@ -66,9 +67,8 @@ rtc::scoped_refptr<VideoFrameBuffer> SurfaceTextureHelper::CreateTextureFrame(
int width,
int height,
const NativeHandleImpl& native_handle) {
return new rtc::RefCountedObject<AndroidTextureBuffer>(
width, height, native_handle, *j_surface_texture_helper_,
rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
return new rtc::RefCountedObject<AndroidTextureBuffer>(width, height,
native_handle, this);
}
} // namespace jni

View File

@ -17,11 +17,12 @@
#include "rtc_base/refcount.h"
#include "rtc_base/scoped_ref_ptr.h"
#include "sdk/android/src/jni/jni_helpers.h"
#include "sdk/android/src/jni/videoframe.h"
namespace webrtc {
namespace jni {
struct NativeHandleImpl;
// Helper class to create and synchronize access to an Android SurfaceTexture.
// It is used for creating VideoFrameBuffers from a SurfaceTexture when
// the SurfaceTexture has been updated.

View File

@ -34,8 +34,6 @@ static const int kMaxJavaEncoderResets = 3;
VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni, jobject j_encoder)
: encoder_(jni, j_encoder),
frame_type_class_(jni,
GetClass(jni, "org/webrtc/EncodedImage$FrameType")),
int_array_class_(jni, jni->FindClass("[I")) {
implementation_name_ = GetImplementationName(jni);

View File

@ -89,7 +89,6 @@ class VideoEncoderWrapper : public VideoEncoder {
std::string GetImplementationName(JNIEnv* jni) const;
const ScopedGlobalRef<jobject> encoder_;
const ScopedGlobalRef<jclass> frame_type_class_;
const ScopedGlobalRef<jclass> int_array_class_;
std::string implementation_name_;

View File

@ -210,16 +210,14 @@ AndroidTextureBuffer::AndroidTextureBuffer(
int width,
int height,
const NativeHandleImpl& native_handle,
jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used)
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper)
: width_(width),
height_(height),
native_handle_(native_handle),
surface_texture_helper_(surface_texture_helper),
no_longer_used_cb_(no_longer_used) {}
surface_texture_helper_(surface_texture_helper) {}
AndroidTextureBuffer::~AndroidTextureBuffer() {
no_longer_used_cb_();
surface_texture_helper_->ReturnTextureFrame();
}
VideoFrameBuffer::Type AndroidTextureBuffer::type() const {
@ -269,9 +267,9 @@ rtc::scoped_refptr<I420BufferInterface> AndroidTextureBuffer::ToI420() {
// TODO(sakal): This call to a deperecated method will be removed when
// AndroidTextureBuffer is removed.
jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
SurfaceTextureHelperTextureToYUV(jni, surface_texture_helper_, byte_buffer,
width(), height(), stride, native_handle_);
SurfaceTextureHelperTextureToYUV(
jni, surface_texture_helper_->GetJavaSurfaceTextureHelper(), byte_buffer,
width(), height(), stride, native_handle_);
return copy;
}
@ -364,7 +362,7 @@ static bool IsJavaVideoBuffer(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
jobject NativeToJavaFrame(JNIEnv* jni, const VideoFrame& frame) {
rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer();
jobject j_buffer;
if (IsJavaVideoBuffer(buffer)) {
RTC_DCHECK(buffer->type() == VideoFrameBuffer::Type::kNative);
AndroidVideoFrameBuffer* android_buffer =
@ -373,13 +371,19 @@ jobject NativeToJavaFrame(JNIEnv* jni, const VideoFrame& frame) {
AndroidVideoFrameBuffer::AndroidType::kJavaBuffer);
AndroidVideoBuffer* android_video_buffer =
static_cast<AndroidVideoBuffer*>(android_buffer);
j_buffer = android_video_buffer->video_frame_buffer();
return Java_VideoFrame_Constructor(
jni, android_video_buffer->video_frame_buffer(),
static_cast<jint>(frame.rotation()),
static_cast<jlong>(frame.timestamp_us() *
rtc::kNumNanosecsPerMicrosec));
} else {
j_buffer = WrapI420Buffer(jni, buffer->ToI420());
return Java_VideoFrame_Constructor(
jni, WrapI420Buffer(jni, buffer->ToI420()),
static_cast<jint>(frame.rotation()),
static_cast<jlong>(frame.timestamp_us() *
rtc::kNumNanosecsPerMicrosec));
}
return Java_VideoFrame_Constructor(
jni, j_buffer, static_cast<jint>(frame.rotation()),
static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec));
}
extern "C" JNIEXPORT void JNICALL

View File

@ -22,6 +22,8 @@
namespace webrtc {
namespace jni {
class SurfaceTextureHelper;
// Open gl texture matrix, in column-major order. Operations are
// in-place.
class Matrix {
@ -72,11 +74,11 @@ class AndroidVideoFrameBuffer : public VideoFrameBuffer {
class AndroidTextureBuffer : public AndroidVideoFrameBuffer {
public:
AndroidTextureBuffer(int width,
int height,
const NativeHandleImpl& native_handle,
jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used);
AndroidTextureBuffer(
int width,
int height,
const NativeHandleImpl& native_handle,
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper);
~AndroidTextureBuffer();
NativeHandleImpl native_handle_impl() const;
@ -93,13 +95,7 @@ class AndroidTextureBuffer : public AndroidVideoFrameBuffer {
const int width_;
const int height_;
NativeHandleImpl native_handle_;
// Raw object pointer, relying on the caller, i.e.,
// AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep
// a global reference. TODO(nisse): Make this a reference to the C++
// SurfaceTextureHelper instead, but that requires some refactoring
// of AndroidVideoCapturerJni.
jobject surface_texture_helper_;
rtc::Callback0<void> no_longer_used_cb_;
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
};
class AndroidVideoBuffer : public AndroidVideoFrameBuffer {

View File

@ -22,11 +22,8 @@ namespace jni {
std::unique_ptr<VideoDecoder> JavaToNativeVideoDecoder(JNIEnv* jni,
jobject j_decoder) {
jclass wrapped_native_decoder_class =
GetClass(jni, "org/webrtc/WrappedNativeVideoDecoder");
VideoDecoder* decoder;
if (jni->IsInstanceOf(j_decoder, wrapped_native_decoder_class)) {
if (Java_WrappedNativeVideoDecoder_isInstanceOf(jni, j_decoder)) {
jlong native_decoder =
Java_WrappedNativeVideoDecoder_createNativeDecoder(jni, j_decoder);
decoder = reinterpret_cast<VideoDecoder*>(native_decoder);