Android: Modular WebRTC follow-up

This CL cleans up parts from https://codereview.webrtc.org/2939203002/.

Bug: webrtc:7613
Change-Id: I96d1a2cc91174f43d3cae2cb41b2e0fe7142e3e9
Reviewed-on: https://chromium-review.googlesource.com/539456
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18665}
This commit is contained in:
Magnus Jedvert
2017-06-19 17:09:55 +02:00
committed by Commit Bot
parent 42308f615c
commit 3352ce92f9
14 changed files with 297 additions and 237 deletions

View File

@ -24,7 +24,7 @@ config("libjingle_peerconnection_jni_warnings_config") {
rtc_source_set("base_jni") { rtc_source_set("base_jni") {
sources = [ sources = [
"src/jni/androidmediacodeccommon.h", "src/jni/androidhistogram_jni.cc",
"src/jni/audio_jni.h", "src/jni/audio_jni.h",
"src/jni/classreferenceholder.cc", "src/jni/classreferenceholder.cc",
"src/jni/classreferenceholder.h", "src/jni/classreferenceholder.h",
@ -40,6 +40,7 @@ rtc_source_set("base_jni") {
"//webrtc/api:libjingle_peerconnection_api", "//webrtc/api:libjingle_peerconnection_api",
"//webrtc/base:rtc_base", "//webrtc/base:rtc_base",
"//webrtc/base:rtc_base_approved", "//webrtc/base:rtc_base_approved",
"//webrtc/system_wrappers:metrics_api",
] ]
if (is_clang) { if (is_clang) {
@ -71,15 +72,12 @@ rtc_static_library("null_audio_jni") {
deps = [ deps = [
":base_jni", ":base_jni",
"//webrtc/api:libjingle_peerconnection_api",
"//webrtc/base:rtc_base",
"//webrtc/base:rtc_base_approved",
] ]
} }
rtc_static_library("video_jni") { rtc_static_library("video_jni") {
sources = [ sources = [
"src/jni/androidhistogram_jni.cc", "src/jni/androidmediacodeccommon.h",
"src/jni/androidmediadecoder_jni.cc", "src/jni/androidmediadecoder_jni.cc",
"src/jni/androidmediadecoder_jni.h", "src/jni/androidmediadecoder_jni.h",
"src/jni/androidmediaencoder_jni.cc", "src/jni/androidmediaencoder_jni.cc",
@ -87,12 +85,15 @@ rtc_static_library("video_jni") {
"src/jni/androidvideotracksource.cc", "src/jni/androidvideotracksource.cc",
"src/jni/androidvideotracksource.h", "src/jni/androidvideotracksource.h",
"src/jni/androidvideotracksource_jni.cc", "src/jni/androidvideotracksource_jni.cc",
"src/jni/filevideocapturer_jni.cc",
"src/jni/native_handle_impl.cc", "src/jni/native_handle_impl.cc",
"src/jni/native_handle_impl.h", "src/jni/native_handle_impl.h",
"src/jni/surfacetexturehelper_jni.cc", "src/jni/surfacetexturehelper_jni.cc",
"src/jni/surfacetexturehelper_jni.h", "src/jni/surfacetexturehelper_jni.h",
"src/jni/video_jni.cc", "src/jni/video_jni.cc",
"src/jni/video_renderer_jni.cc", "src/jni/video_renderer_jni.cc",
"src/jni/videofilerenderer_jni.cc",
"src/jni/videotrack_jni.cc",
"src/jni/wrapped_native_i420_buffer.cc", "src/jni/wrapped_native_i420_buffer.cc",
"src/jni/wrapped_native_i420_buffer.h", "src/jni/wrapped_native_i420_buffer.h",
] ]
@ -154,16 +155,7 @@ rtc_static_library("null_video_jni") {
deps = [ deps = [
":base_jni", ":base_jni",
"//webrtc/base:rtc_base_approved",
] ]
if (is_clang) {
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
suppressed_configs += [
"//build/config/clang:extra_warnings",
"//build/config/clang:find_bad_constructs",
]
}
} }
rtc_static_library("media_jni") { rtc_static_library("media_jni") {
@ -173,9 +165,8 @@ rtc_static_library("media_jni") {
deps = [ deps = [
":base_jni", ":base_jni",
"//webrtc/api:libjingle_peerconnection_api", "//webrtc/call:call_interfaces",
"//webrtc/api/audio_codecs:audio_codecs_api", "//webrtc/logging:rtc_event_log_api",
"//webrtc/base:rtc_base_approved",
"//webrtc/media:rtc_audio_video", "//webrtc/media:rtc_audio_video",
] ]
@ -195,20 +186,7 @@ rtc_static_library("null_media_jni") {
deps = [ deps = [
":base_jni", ":base_jni",
"//webrtc/api:libjingle_peerconnection_api",
"//webrtc/base:rtc_base",
"//webrtc/base:rtc_base_approved",
"//webrtc/call:call_interfaces",
"//webrtc/logging:rtc_event_log_api",
] ]
if (is_clang) {
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
suppressed_configs += [
"//build/config/clang:extra_warnings",
"//build/config/clang:find_bad_constructs",
]
}
} }
rtc_static_library("peerconnection_jni") { rtc_static_library("peerconnection_jni") {

View File

@ -13,7 +13,6 @@
#include "webrtc/sdk/android/src/jni/classreferenceholder.h" #include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h" #include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/native_handle_impl.h"
#include "webrtc/system_wrappers/include/metrics.h" #include "webrtc/system_wrappers/include/metrics.h"
// Enables collection of native histograms and creating them. // Enables collection of native histograms and creating them.

View File

@ -0,0 +1,60 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
namespace webrtc_jni {
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_FileVideoCapturer_nativeI420ToNV21(JNIEnv* jni,
jclass,
jbyteArray j_src_buffer,
jint width,
jint height,
jbyteArray j_dst_buffer) {
size_t src_size = jni->GetArrayLength(j_src_buffer);
size_t dst_size = jni->GetArrayLength(j_dst_buffer);
int src_stride = width;
int dst_stride = width;
RTC_CHECK_GE(src_size, src_stride * height * 3 / 2);
RTC_CHECK_GE(dst_size, dst_stride * height * 3 / 2);
jbyte* src_bytes = jni->GetByteArrayElements(j_src_buffer, 0);
uint8_t* src = reinterpret_cast<uint8_t*>(src_bytes);
jbyte* dst_bytes = jni->GetByteArrayElements(j_dst_buffer, 0);
uint8_t* dst = reinterpret_cast<uint8_t*>(dst_bytes);
uint8_t* src_y = src;
size_t src_stride_y = src_stride;
uint8_t* src_u = src + src_stride * height;
size_t src_stride_u = src_stride / 2;
uint8_t* src_v = src + src_stride * height * 5 / 4;
size_t src_stride_v = src_stride / 2;
uint8_t* dst_y = dst;
size_t dst_stride_y = dst_stride;
size_t dst_stride_uv = dst_stride;
uint8_t* dst_uv = dst + dst_stride * height;
int ret = libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v,
src_stride_v, dst_y, dst_stride_y, dst_uv,
dst_stride_uv, width, height);
jni->ReleaseByteArrayElements(j_src_buffer, src_bytes, 0);
jni->ReleaseByteArrayElements(j_dst_buffer, dst_bytes, 0);
if (ret) {
LOG(LS_ERROR) << "Error converting I420 frame to NV21: " << ret;
}
}
} // namespace webrtc_jni

View File

@ -9,27 +9,32 @@
*/ */
#include "webrtc/sdk/android/src/jni/media_jni.h" #include "webrtc/sdk/android/src/jni/media_jni.h"
#include "webrtc/api/audio_codecs/audio_decoder_factory.h" #include "webrtc/call/callfactoryinterface.h"
#include "webrtc/api/audio_codecs/audio_encoder_factory.h" #include "webrtc/logging/rtc_event_log/rtc_event_log_factory_interface.h"
#include "webrtc/media/engine/webrtcvideodecoderfactory.h" #include "webrtc/media/engine/webrtcmediaengine.h"
#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
namespace webrtc_jni { namespace webrtc_jni {
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> webrtc::CallFactoryInterface* CreateCallFactory() {
CreateNativePeerConnectionFactory( return webrtc::CreateCallFactory().release();
rtc::Thread* network_thread, }
rtc::Thread* worker_thread,
rtc::Thread* signaling_thread, webrtc::RtcEventLogFactoryInterface* CreateRtcEventLogFactory() {
webrtc::AudioDeviceModule* default_adm, return webrtc::CreateRtcEventLogFactory().release();
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory, }
rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory,
cricket::MediaEngineInterface* CreateMediaEngine(
webrtc::AudioDeviceModule* adm,
const rtc::scoped_refptr<webrtc::AudioEncoderFactory>&
audio_encoder_factory,
const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
audio_decoder_factory,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory, cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory) { cricket::WebRtcVideoDecoderFactory* video_decoder_factory,
return webrtc::CreatePeerConnectionFactory( rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer) {
network_thread, worker_thread, signaling_thread, default_adm, return cricket::WebRtcMediaEngineFactory::Create(
audio_encoder_factory, audio_decoder_factory, video_encoder_factory, adm, audio_encoder_factory, audio_decoder_factory, video_encoder_factory,
video_decoder_factory); video_decoder_factory, audio_mixer);
} }
} // namespace webrtc_jni } // namespace webrtc_jni

View File

@ -11,22 +11,37 @@
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_MEDIA_JNI_H_ #ifndef WEBRTC_SDK_ANDROID_SRC_JNI_MEDIA_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_MEDIA_JNI_H_ #define WEBRTC_SDK_ANDROID_SRC_JNI_MEDIA_JNI_H_
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/base/scoped_ref_ptr.h" #include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/base/thread.h"
namespace webrtc {
class AudioDeviceModule;
class CallFactoryInterface;
class AudioEncoderFactory;
class AudioDecoderFactory;
class RtcEventLogFactoryInterface;
class AudioMixer;
} // namespace webrtc
namespace cricket {
class MediaEngineInterface;
class WebRtcVideoEncoderFactory;
class WebRtcVideoDecoderFactory;
} // namespace cricket
namespace webrtc_jni { namespace webrtc_jni {
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> webrtc::CallFactoryInterface* CreateCallFactory();
CreateNativePeerConnectionFactory( webrtc::RtcEventLogFactoryInterface* CreateRtcEventLogFactory();
rtc::Thread* network_thread,
rtc::Thread* worker_thread, cricket::MediaEngineInterface* CreateMediaEngine(
rtc::Thread* signaling_thread, webrtc::AudioDeviceModule* adm,
webrtc::AudioDeviceModule* default_adm, const rtc::scoped_refptr<webrtc::AudioEncoderFactory>&
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory, audio_encoder_factory,
rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory, const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
audio_decoder_factory,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory, cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory); cricket::WebRtcVideoDecoderFactory* video_decoder_factory,
rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer);
} // namespace webrtc_jni } // namespace webrtc_jni

View File

@ -12,11 +12,11 @@
namespace webrtc_jni { namespace webrtc_jni {
rtc::scoped_refptr<webrtc::AudioDecoderFactory> CreateAudioDecoderFactory() { rtc::scoped_refptr<webrtc::AudioDecoderFactory> CreateAudioDecoderFactory() {
return rtc::scoped_refptr<webrtc::AudioDecoderFactory>(); return nullptr;
} }
rtc::scoped_refptr<webrtc::AudioEncoderFactory> CreateAudioEncoderFactory() { rtc::scoped_refptr<webrtc::AudioEncoderFactory> CreateAudioEncoderFactory() {
return rtc::scoped_refptr<webrtc::AudioEncoderFactory>(); return nullptr;
} }
} // namespace webrtc_jni } // namespace webrtc_jni

View File

@ -10,34 +10,26 @@
#include "webrtc/sdk/android/src/jni/media_jni.h" #include "webrtc/sdk/android/src/jni/media_jni.h"
#include "webrtc/api/audio_codecs/audio_decoder_factory.h" // nogncheck
#include "webrtc/api/audio_codecs/audio_encoder_factory.h" // nogncheck
#include "webrtc/call/callfactoryinterface.h"
#include "webrtc/logging/rtc_event_log/rtc_event_log_factory_interface.h"
#include "webrtc/media/engine/webrtcvideodecoderfactory.h" // nogncheck
#include "webrtc/media/engine/webrtcvideoencoderfactory.h" // nogncheck
namespace webrtc_jni { namespace webrtc_jni {
// This implementation is used for building WebRTC without audio and video webrtc::CallFactoryInterface* CreateCallFactory() {
// support. return nullptr;
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> }
CreateNativePeerConnectionFactory(
rtc::Thread* network_thread, webrtc::RtcEventLogFactoryInterface* CreateRtcEventLogFactory() {
rtc::Thread* worker_thread, return nullptr;
rtc::Thread* signaling_thread, }
webrtc::AudioDeviceModule* default_adm,
rtc::scoped_refptr<webrtc::AudioEncoderFactory> audio_encoder_factory, cricket::MediaEngineInterface* CreateMediaEngine(
rtc::scoped_refptr<webrtc::AudioDecoderFactory> audio_decoder_factory, webrtc::AudioDeviceModule* adm,
const rtc::scoped_refptr<webrtc::AudioEncoderFactory>&
audio_encoder_factory,
const rtc::scoped_refptr<webrtc::AudioDecoderFactory>&
audio_decoder_factory,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory, cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory) { cricket::WebRtcVideoDecoderFactory* video_decoder_factory,
return CreateModularPeerConnectionFactory( rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer) {
network_thread, worker_thread, signaling_thread, default_adm, return nullptr;
audio_encoder_factory, audio_decoder_factory, video_encoder_factory,
video_decoder_factory, nullptr /*audio_mixer*/,
std::unique_ptr<cricket::MediaEngineInterface>(),
std::unique_ptr<webrtc::CallFactoryInterface>(),
std::unique_ptr<webrtc::RtcEventLogFactoryInterface>());
} }
} // namespace webrtc_jni } // namespace webrtc_jni

View File

@ -8,22 +8,10 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include <jni.h> #include "webrtc/sdk/android/src/jni/video_jni.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace cricket {
class WebRtcVideoEncoderFactory;
class WebRtcVideoDecoderFactory;
} // namespace cricket
namespace webrtc_jni { namespace webrtc_jni {
class MediaCodecVideoEncoderFactory;
class MediaCodecVideoDecoderFactory;
class SurfaceTextureHelper;
cricket::WebRtcVideoEncoderFactory* CreateVideoEncoderFactory() { cricket::WebRtcVideoEncoderFactory* CreateVideoEncoderFactory() {
return nullptr; return nullptr;
} }
@ -33,7 +21,7 @@ cricket::WebRtcVideoDecoderFactory* CreateVideoDecoderFactory() {
} }
jobject GetJavaSurfaceTextureHelper( jobject GetJavaSurfaceTextureHelper(
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper) { const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper) {
return nullptr; return nullptr;
} }

View File

@ -43,8 +43,6 @@
#include <memory> #include <memory>
#include <utility> #include <utility>
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "third_party/libyuv/include/libyuv/scale.h"
#include "webrtc/api/mediaconstraintsinterface.h" #include "webrtc/api/mediaconstraintsinterface.h"
#include "webrtc/api/peerconnectioninterface.h" #include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/api/rtpreceiverinterface.h" #include "webrtc/api/rtpreceiverinterface.h"
@ -60,6 +58,7 @@
#include "webrtc/base/rtccertificategenerator.h" #include "webrtc/base/rtccertificategenerator.h"
#include "webrtc/base/ssladapter.h" #include "webrtc/base/ssladapter.h"
#include "webrtc/base/stringutils.h" #include "webrtc/base/stringutils.h"
#include "webrtc/media/base/mediaengine.h"
#include "webrtc/media/base/videocapturer.h" #include "webrtc/media/base/videocapturer.h"
#include "webrtc/modules/utility/include/jvm_android.h" #include "webrtc/modules/utility/include/jvm_android.h"
#include "webrtc/pc/webrtcsdp.h" #include "webrtc/pc/webrtcsdp.h"
@ -1168,11 +1167,23 @@ JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory); rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
} }
webrtc::AudioDeviceModule* adm = nullptr;
rtc::scoped_refptr<webrtc::AudioMixer> audio_mixer = nullptr;
std::unique_ptr<webrtc::CallFactoryInterface> call_factory(
CreateCallFactory());
std::unique_ptr<webrtc::RtcEventLogFactoryInterface> rtc_event_log_factory(
CreateRtcEventLogFactory());
std::unique_ptr<cricket::MediaEngineInterface> media_engine(CreateMediaEngine(
adm, audio_encoder_factory, audio_decoder_factory, video_encoder_factory,
video_decoder_factory, audio_mixer));
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory( rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
CreateNativePeerConnectionFactory( CreateModularPeerConnectionFactory(
network_thread.get(), worker_thread.get(), signaling_thread.get(), network_thread.get(), worker_thread.get(), signaling_thread.get(),
nullptr, audio_encoder_factory, audio_decoder_factory, adm, audio_encoder_factory, audio_decoder_factory,
video_encoder_factory, video_decoder_factory)); video_encoder_factory, video_decoder_factory, audio_mixer,
std::move(media_engine), std::move(call_factory),
std::move(rtc_event_log_factory)));
RTC_CHECK(factory) << "Failed to create the peer connection factory; " RTC_CHECK(factory) << "Failed to create the peer connection factory; "
<< "WebRTC/libjingle init likely failed on this device"; << "WebRTC/libjingle init likely failed on this device";
// TODO(honghaiz): Maybe put the options as the argument of // TODO(honghaiz): Maybe put the options as the argument of
@ -1908,97 +1919,6 @@ JOW(jobject, MediaSource_nativeState)(JNIEnv* jni, jclass, jlong j_p) {
return JavaEnumFromIndex(jni, "MediaSource$State", p->state()); return JavaEnumFromIndex(jni, "MediaSource$State", p->state());
} }
JOW(void, FileVideoCapturer_nativeI420ToNV21)(
JNIEnv *jni, jclass, jbyteArray j_src_buffer, jint width, jint height,
jbyteArray j_dst_buffer) {
size_t src_size = jni->GetArrayLength(j_src_buffer);
size_t dst_size = jni->GetArrayLength(j_dst_buffer);
int src_stride = width;
int dst_stride = width;
RTC_CHECK_GE(src_size, src_stride * height * 3 / 2);
RTC_CHECK_GE(dst_size, dst_stride * height * 3 / 2);
jbyte* src_bytes = jni->GetByteArrayElements(j_src_buffer, 0);
uint8_t* src = reinterpret_cast<uint8_t*>(src_bytes);
jbyte* dst_bytes = jni->GetByteArrayElements(j_dst_buffer, 0);
uint8_t* dst = reinterpret_cast<uint8_t*>(dst_bytes);
uint8_t* src_y = src;
size_t src_stride_y = src_stride;
uint8_t* src_u = src + src_stride * height;
size_t src_stride_u = src_stride / 2;
uint8_t* src_v = src + src_stride * height * 5 / 4;
size_t src_stride_v = src_stride / 2;
uint8_t* dst_y = dst;
size_t dst_stride_y = dst_stride;
size_t dst_stride_uv = dst_stride;
uint8_t* dst_uv = dst + dst_stride * height;
int ret = libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v,
src_stride_v, dst_y, dst_stride_y, dst_uv,
dst_stride_uv, width, height);
jni->ReleaseByteArrayElements(j_src_buffer, src_bytes, 0);
jni->ReleaseByteArrayElements(j_dst_buffer, dst_bytes, 0);
if (ret) {
LOG(LS_ERROR) << "Error converting I420 frame to NV21: " << ret;
}
}
JOW(void, VideoFileRenderer_nativeI420Scale)(
JNIEnv *jni, jclass,
jobject j_src_buffer_y, jint j_src_stride_y,
jobject j_src_buffer_u, jint j_src_stride_u,
jobject j_src_buffer_v, jint j_src_stride_v,
jint width, jint height,
jbyteArray j_dst_buffer, jint dstWidth, jint dstHeight) {
size_t src_size_y = jni->GetDirectBufferCapacity(j_src_buffer_y);
size_t src_size_u = jni->GetDirectBufferCapacity(j_src_buffer_u);
size_t src_size_v = jni->GetDirectBufferCapacity(j_src_buffer_v);
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
int dst_stride = dstWidth;
RTC_CHECK_GE(src_size_y, j_src_stride_y * height);
RTC_CHECK_GE(src_size_u, j_src_stride_u * height / 4);
RTC_CHECK_GE(src_size_v, j_src_stride_v * height / 4);
RTC_CHECK_GE(dst_size, dst_stride * dstHeight * 3 / 2);
uint8_t* src_y =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_y));
uint8_t* src_u =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_u));
uint8_t* src_v =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_v));
uint8_t* dst =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
uint8_t* dst_y = dst;
size_t dst_stride_y = dst_stride;
uint8_t* dst_u = dst + dst_stride * dstHeight;
size_t dst_stride_u = dst_stride / 2;
uint8_t* dst_v = dst + dst_stride * dstHeight * 5 / 4;
size_t dst_stride_v = dst_stride / 2;
int ret = libyuv::I420Scale(
src_y, j_src_stride_y, src_u, j_src_stride_u, src_v, j_src_stride_v,
width, height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
dst_stride_v, dstWidth, dstHeight, libyuv::kFilterBilinear);
if (ret) {
LOG(LS_ERROR) << "Error scaling I420 frame: " << ret;
}
}
JOW(jobject, VideoFileRenderer_nativeCreateNativeByteBuffer)
(JNIEnv* jni, jclass, jint size) {
void* new_data = ::operator new(size);
jobject byte_buffer = jni->NewDirectByteBuffer(new_data, size);
return byte_buffer;
}
JOW(void, VideoFileRenderer_nativeFreeNativeByteBuffer)
(JNIEnv* jni, jclass, jobject byte_buffer) {
void* data = jni->GetDirectBufferAddress(byte_buffer);
::operator delete(data);
}
JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) { JOW(jstring, MediaStreamTrack_nativeId)(JNIEnv* jni, jclass, jlong j_p) {
return JavaStringFromStdString( return JavaStringFromStdString(
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id()); jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
@ -2026,26 +1946,6 @@ JOW(jboolean, MediaStreamTrack_nativeSetEnabled)(
->set_enabled(enabled); ->set_enabled(enabled);
} }
JOW(void, VideoTrack_nativeAddRenderer)(
JNIEnv* jni, jclass,
jlong j_video_track_pointer, jlong j_renderer_pointer) {
LOG(LS_INFO) << "VideoTrack::nativeAddRenderer";
reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)
->AddOrUpdateSink(
reinterpret_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(
j_renderer_pointer),
rtc::VideoSinkWants());
}
JOW(void, VideoTrack_nativeRemoveRenderer)(
JNIEnv* jni, jclass,
jlong j_video_track_pointer, jlong j_renderer_pointer) {
reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer)
->RemoveSink(
reinterpret_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(
j_renderer_pointer));
}
JOW(jlong, CallSessionFileRotatingLogSink_nativeAddSink)( JOW(jlong, CallSessionFileRotatingLogSink_nativeAddSink)(
JNIEnv* jni, jclass, JNIEnv* jni, jclass,
jstring j_dirPath, jint j_maxFileSize, jint j_severity) { jstring j_dirPath, jint j_maxFileSize, jint j_severity) {

View File

@ -21,25 +21,18 @@
#include "webrtc/sdk/android/src/jni/ownedfactoryandthreads.h" #include "webrtc/sdk/android/src/jni/ownedfactoryandthreads.h"
#include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" #include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h"
using cricket::WebRtcVideoDecoderFactory;
using cricket::WebRtcVideoEncoderFactory;
using webrtc::AndroidVideoTrackSource;
using webrtc::AudioSourceInterface;
using webrtc::VideoTrackSourceInterface;
using webrtc::VideoTrackInterface;
namespace webrtc_jni { namespace webrtc_jni {
WebRtcVideoEncoderFactory* CreateVideoEncoderFactory() { cricket::WebRtcVideoEncoderFactory* CreateVideoEncoderFactory() {
return new MediaCodecVideoEncoderFactory(); return new MediaCodecVideoEncoderFactory();
} }
WebRtcVideoDecoderFactory* CreateVideoDecoderFactory() { cricket::WebRtcVideoDecoderFactory* CreateVideoDecoderFactory() {
return new MediaCodecVideoDecoderFactory(); return new MediaCodecVideoDecoderFactory();
} }
jobject GetJavaSurfaceTextureHelper( jobject GetJavaSurfaceTextureHelper(
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper) { const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper) {
return surface_texture_helper return surface_texture_helper
? surface_texture_helper->GetJavaSurfaceTextureHelper() ? surface_texture_helper->GetJavaSurfaceTextureHelper()
: nullptr; : nullptr;
@ -69,9 +62,10 @@ JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)
(JNIEnv* jni, jclass, jlong native_factory, jstring id, jlong native_source) { (JNIEnv* jni, jclass, jlong native_factory, jstring id, jlong native_source) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory( rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory)); factoryFromJava(native_factory));
rtc::scoped_refptr<VideoTrackInterface> track(factory->CreateVideoTrack( rtc::scoped_refptr<webrtc::VideoTrackInterface> track(
JavaToStdString(jni, id), factory->CreateVideoTrack(
reinterpret_cast<VideoTrackSourceInterface*>(native_source))); JavaToStdString(jni, id),
reinterpret_cast<webrtc::VideoTrackSourceInterface*>(native_source)));
return (jlong)track.release(); return (jlong)track.release();
} }

View File

@ -14,18 +14,22 @@
#include <jni.h> #include <jni.h>
#include "webrtc/base/scoped_ref_ptr.h" #include "webrtc/base/scoped_ref_ptr.h"
// Adding 'nogncheck' to disable the gn include headers check.
// We don't want this target depend on video related targets namespace cricket {
#include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h" // nogncheck class WebRtcVideoEncoderFactory;
class WebRtcVideoDecoderFactory;
} // namespace cricket
namespace webrtc_jni { namespace webrtc_jni {
WebRtcVideoEncoderFactory* CreateVideoEncoderFactory(); class SurfaceTextureHelper;
WebRtcVideoDecoderFactory* CreateVideoDecoderFactory(); cricket::WebRtcVideoEncoderFactory* CreateVideoEncoderFactory();
cricket::WebRtcVideoDecoderFactory* CreateVideoDecoderFactory();
jobject GetJavaSurfaceTextureHelper( jobject GetJavaSurfaceTextureHelper(
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper); const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper);
} // namespace webrtc_jni } // namespace webrtc_jni

View File

@ -9,8 +9,6 @@
*/ */
#include <jni.h> #include <jni.h>
#undef JNIEXPORT
#define JNIEXPORT __attribute__((visibility("default")))
#include "webrtc/api/video/video_frame.h" #include "webrtc/api/video/video_frame.h"
#include "webrtc/media/base/videosinkinterface.h" #include "webrtc/media/base/videosinkinterface.h"

View File

@ -0,0 +1,85 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "third_party/libyuv/include/libyuv/scale.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/logging.h"
namespace webrtc_jni {
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoFileRenderer_nativeI420Scale(JNIEnv* jni,
jclass,
jobject j_src_buffer_y,
jint j_src_stride_y,
jobject j_src_buffer_u,
jint j_src_stride_u,
jobject j_src_buffer_v,
jint j_src_stride_v,
jint width,
jint height,
jbyteArray j_dst_buffer,
jint dstWidth,
jint dstHeight) {
size_t src_size_y = jni->GetDirectBufferCapacity(j_src_buffer_y);
size_t src_size_u = jni->GetDirectBufferCapacity(j_src_buffer_u);
size_t src_size_v = jni->GetDirectBufferCapacity(j_src_buffer_v);
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
int dst_stride = dstWidth;
RTC_CHECK_GE(src_size_y, j_src_stride_y * height);
RTC_CHECK_GE(src_size_u, j_src_stride_u * height / 4);
RTC_CHECK_GE(src_size_v, j_src_stride_v * height / 4);
RTC_CHECK_GE(dst_size, dst_stride * dstHeight * 3 / 2);
uint8_t* src_y =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_y));
uint8_t* src_u =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_u));
uint8_t* src_v =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer_v));
uint8_t* dst =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
uint8_t* dst_y = dst;
size_t dst_stride_y = dst_stride;
uint8_t* dst_u = dst + dst_stride * dstHeight;
size_t dst_stride_u = dst_stride / 2;
uint8_t* dst_v = dst + dst_stride * dstHeight * 5 / 4;
size_t dst_stride_v = dst_stride / 2;
int ret = libyuv::I420Scale(
src_y, j_src_stride_y, src_u, j_src_stride_u, src_v, j_src_stride_v,
width, height, dst_y, dst_stride_y, dst_u, dst_stride_u, dst_v,
dst_stride_v, dstWidth, dstHeight, libyuv::kFilterBilinear);
if (ret) {
LOG(LS_ERROR) << "Error scaling I420 frame: " << ret;
}
}
extern "C" JNIEXPORT jobject JNICALL
Java_org_webrtc_VideoFileRenderer_nativeCreateNativeByteBuffer(JNIEnv* jni,
jclass,
jint size) {
void* new_data = ::operator new(size);
jobject byte_buffer = jni->NewDirectByteBuffer(new_data, size);
return byte_buffer;
}
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoFileRenderer_nativeFreeNativeByteBuffer(
JNIEnv* jni,
jclass,
jobject byte_buffer) {
void* data = jni->GetDirectBufferAddress(byte_buffer);
::operator delete(data);
}
} // namespace webrtc_jni

View File

@ -0,0 +1,42 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/base/logging.h"
namespace webrtc_jni {
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoTrack_nativeAddRenderer(JNIEnv* jni,
jclass,
jlong j_video_track_pointer,
jlong j_renderer_pointer) {
LOG(LS_INFO) << "VideoTrack::nativeAddRenderer";
reinterpret_cast<webrtc::VideoTrackInterface*>(j_video_track_pointer)
->AddOrUpdateSink(
reinterpret_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(
j_renderer_pointer),
rtc::VideoSinkWants());
}
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoTrack_nativeRemoveRenderer(JNIEnv* jni,
jclass,
jlong j_video_track_pointer,
jlong j_renderer_pointer) {
reinterpret_cast<webrtc::VideoTrackInterface*>(j_video_track_pointer)
->RemoveSink(
reinterpret_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(
j_renderer_pointer));
}
} // namespace webrtc_jni