Add Android native API: CreateJavaVideoSource

Adds Android native API for creating VideoTrackSourceInterface objects
that can be fed frames using VideoCapturer.CapturerObserver.

NativeCapturerObserver is moved out of VideoSource because it will now
be used without a VideoSource. It now takes a pointer to
AndroidVideoTrackSource directly instead of VideoTrackSourceProxy.

VideoSource and NativeCapturerObserver JNI code is moved away from
androidvideotracksource.cc to their own files. This allows using
AndroidVideoTrackSource independently.

Bug: webrtc:8769
Change-Id: Ifb9e1eb27d4c8237597d19d932ca6e863abb4d27
Reviewed-on: https://webrtc-review.googlesource.com/76924
Reviewed-by: Paulina Hensman <phensman@webrtc.org>
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23269}
This commit is contained in:
Sami Kalliomäki
2018-05-16 12:49:47 +02:00
committed by Commit Bot
parent 8e7a62beb2
commit ff1de0af6b
11 changed files with 573 additions and 134 deletions

View File

@ -286,6 +286,7 @@ generate_jni("generated_video_jni") {
"src/java/org/webrtc/EglBase14.java",
"src/java/org/webrtc/NV12Buffer.java",
"src/java/org/webrtc/NV21Buffer.java",
"src/java/org/webrtc/NativeCapturerObserver.java",
"src/java/org/webrtc/VideoDecoderWrapper.java",
"src/java/org/webrtc/VideoEncoderWrapper.java",
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
@ -309,6 +310,8 @@ rtc_static_library("video_jni") {
"src/jni/encodedimage.cc",
"src/jni/encodedimage.h",
"src/jni/jni_generator_helper.h",
"src/jni/nativecapturerobserver.cc",
"src/jni/nativecapturerobserver.h",
"src/jni/nv12buffer.cc",
"src/jni/nv21buffer.cc",
"src/jni/pc/video.cc",
@ -331,6 +334,7 @@ rtc_static_library("video_jni") {
"src/jni/videoframe.h",
"src/jni/videosink.cc",
"src/jni/videosink.h",
"src/jni/videosource.cc",
"src/jni/videotrack.cc",
"src/jni/wrapped_native_i420_buffer.cc",
"src/jni/wrapped_native_i420_buffer.h",
@ -833,6 +837,7 @@ rtc_android_library("video_java") {
"api/org/webrtc/VideoDecoderFallback.java",
"api/org/webrtc/VideoEncoderFallback.java",
"api/org/webrtc/VideoFrameDrawer.java",
"src/java/org/webrtc/NativeCapturerObserver.java",
"src/java/org/webrtc/NV21Buffer.java",
"src/java/org/webrtc/VideoDecoderWrapper.java",
"src/java/org/webrtc/VideoEncoderWrapper.java",
@ -1002,7 +1007,7 @@ rtc_android_library("peerconnection_java") {
":audio_api_java",
":base_java",
":video_api_java",
":video_java", # TODO(sakal): Remove dependency.
":video_java",
"//modules/audio_device:audio_device_java",
"//rtc_base:base_java",
]
@ -1222,6 +1227,8 @@ rtc_static_library("native_api_video") {
"software_video_codecs", # TODO(bugs.webrtc.org/7925): Remove.
]
sources = [
"native_api/video/videosource.cc",
"native_api/video/videosource.h",
"native_api/video/wrapper.cc",
"native_api/video/wrapper.h",
]
@ -1241,6 +1248,7 @@ generate_jni("generated_native_unittests_jni") {
"native_unittests/org/webrtc/ApplicationContextProvider.java",
"native_unittests/org/webrtc/BuildInfo.java",
"native_unittests/org/webrtc/JavaTypesTestHelper.java",
"native_unittests/org/webrtc/JavaVideoSourceTestHelper.java",
"native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java",
]
jni_package = ""
@ -1254,6 +1262,7 @@ rtc_android_library("native_unittests_java") {
"native_unittests/org/webrtc/ApplicationContextProvider.java",
"native_unittests/org/webrtc/BuildInfo.java",
"native_unittests/org/webrtc/JavaTypesTestHelper.java",
"native_unittests/org/webrtc/JavaVideoSourceTestHelper.java",
"native_unittests/org/webrtc/PeerConnectionFactoryInitializationHelper.java",
]
@ -1271,6 +1280,7 @@ rtc_source_set("native_unittests") {
"native_unittests/java_types_unittest.cc",
"native_unittests/peerconnection/peerconnectionfactory_unittest.cc",
"native_unittests/test_jni_onload.cc",
"native_unittests/video/videosource_unittest.cc",
]
data = [
@ -1296,12 +1306,14 @@ rtc_source_set("native_unittests") {
":native_api_base",
":native_api_jni",
":native_api_peerconnection",
":native_api_video",
":native_unittests_java",
":opensles_audio_device_module",
":video_jni",
"../../system_wrappers:system_wrappers",
"//api/audio_codecs:builtin_audio_decoder_factory",
"//api/audio_codecs:builtin_audio_encoder_factory",
"//api/video:video_frame",
"//media:rtc_audio_video",
"//media:rtc_internal_video_codecs",
"//media:rtc_media_base",

View File

@ -17,83 +17,18 @@ import javax.annotation.Nullable;
*/
@JNINamespace("webrtc::jni")
public class VideoSource extends MediaSource {
private static class NativeCapturerObserver implements VideoCapturer.CapturerObserver {
private final long nativeSource;
// TODO(bugs.webrtc.org/9181): Remove.
@Nullable private final SurfaceTextureHelper surfaceTextureHelper;
public NativeCapturerObserver(long nativeSource) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = null;
}
// TODO(bugs.webrtc.org/9181): Remove.
public NativeCapturerObserver(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = surfaceTextureHelper;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeSource, success);
}
@Override
public void onCapturerStopped() {
nativeCapturerStopped(nativeSource);
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timestampNs) {
// This NV21Buffer is not possible to retain. This is safe only because the native code will
// always call cropAndScale() and directly make a deep copy of the buffer.
final VideoFrame.Buffer nv21Buffer =
new NV21Buffer(data, width, height, null /* releaseCallback */);
final VideoFrame frame = new VideoFrame(nv21Buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestampNs) {
final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(),
frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs(),
frame.getBuffer());
}
public void dispose() {
if (surfaceTextureHelper != null) {
surfaceTextureHelper.dispose();
}
}
}
private final NativeCapturerObserver capturerObserver;
public VideoSource(long nativeSource) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeGetInternalSource(nativeSource));
}
// TODO(bugs.webrtc.org/9181): Remove.
VideoSource(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeSource, surfaceTextureHelper);
this.capturerObserver =
new NativeCapturerObserver(nativeGetInternalSource(nativeSource), surfaceTextureHelper);
}
/**
@ -116,9 +51,7 @@ public class VideoSource extends MediaSource {
super.dispose();
}
// Returns source->internal() from webrtc::VideoTrackSourceProxy.
private static native long nativeGetInternalSource(long source);
private static native void nativeAdaptOutputFormat(long source, int width, int height, int fps);
private static native void nativeCapturerStarted(long source, boolean success);
private static native void nativeCapturerStopped(long source);
private static native void nativeOnFrameCaptured(
long source, int width, int height, int rotation, long timestampNs, VideoFrame.Buffer frame);
}

View File

@ -0,0 +1,104 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "sdk/android/native_api/video/videosource.h"
#include "sdk/android/src/jni/androidvideotracksource.h"
#include "sdk/android/src/jni/nativecapturerobserver.h"
namespace webrtc {
namespace {
// Hides full jni::AndroidVideoTrackSource interface and provides an instance of
// NativeCapturerObserver associated with the video source. Does not extend
// AndroidVideoTrackSource to avoid diamond inheritance on
// VideoTrackSourceInterface.
class JavaVideoTrackSourceImpl : public JavaVideoTrackSourceInterface {
public:
JavaVideoTrackSourceImpl(JNIEnv* env,
rtc::Thread* signaling_thread,
bool is_screencast)
: android_video_track_source_(
new rtc::RefCountedObject<jni::AndroidVideoTrackSource>(
signaling_thread,
env,
is_screencast)),
native_capturer_observer_(jni::CreateJavaNativeCapturerObserver(
env,
android_video_track_source_)) {}
ScopedJavaLocalRef<jobject> GetJavaVideoCapturerObserver(
JNIEnv* env) override {
return ScopedJavaLocalRef<jobject>(env, native_capturer_observer_);
}
// Delegate VideoTrackSourceInterface methods to android_video_track_source_.
void RegisterObserver(ObserverInterface* observer) override {
android_video_track_source_->RegisterObserver(observer);
}
void UnregisterObserver(ObserverInterface* observer) override {
android_video_track_source_->UnregisterObserver(observer);
}
SourceState state() const override {
return android_video_track_source_->state();
}
bool remote() const override { return android_video_track_source_->remote(); }
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
// The method is defined private in the implementation so we have to access
// it through the interface...
static_cast<VideoTrackSourceInterface*>(android_video_track_source_.get())
->AddOrUpdateSink(sink, wants);
}
void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override {
// The method is defined private in the implementation so we have to access
// it through the interface...
static_cast<VideoTrackSourceInterface*>(android_video_track_source_.get())
->RemoveSink(sink);
}
bool is_screencast() const override {
return android_video_track_source_->is_screencast();
}
rtc::Optional<bool> needs_denoising() const override {
return android_video_track_source_->needs_denoising();
}
bool GetStats(Stats* stats) override {
// The method is defined private in the implementation so we have to access
// it through the interface...
return static_cast<VideoTrackSourceInterface*>(
android_video_track_source_.get())
->GetStats(stats);
}
private:
rtc::scoped_refptr<jni::AndroidVideoTrackSource> android_video_track_source_;
ScopedJavaGlobalRef<jobject> native_capturer_observer_;
};
} // namespace
rtc::scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
JNIEnv* jni,
rtc::Thread* signaling_thread,
bool is_screencast) {
return new rtc::RefCountedObject<JavaVideoTrackSourceImpl>(
jni, signaling_thread, is_screencast);
}
} // namespace webrtc

View File

@ -0,0 +1,39 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_ANDROID_NATIVE_API_VIDEO_VIDEOSOURCE_H_
#define SDK_ANDROID_NATIVE_API_VIDEO_VIDEOSOURCE_H_
#include <jni.h>
#include "api/mediastreaminterface.h"
#include "sdk/android/native_api/jni/scoped_java_ref.h"
namespace webrtc {
// Interface for class that implements VideoTrackSourceInterface and provides a
// Java object that can be used to feed frames to the source.
class JavaVideoTrackSourceInterface : public VideoTrackSourceInterface {
public:
// Returns VideoCapturer.CapturerObserver object that can be used to feed
// frames to the video source.
virtual ScopedJavaLocalRef<jobject> GetJavaVideoCapturerObserver(
JNIEnv* env) = 0;
};
// Creates an instance of JavaVideoTrackSourceInterface,
rtc::scoped_refptr<JavaVideoTrackSourceInterface> CreateJavaVideoSource(
JNIEnv* env,
rtc::Thread* signaling_thread,
bool is_screencast);
} // namespace webrtc
#endif // SDK_ANDROID_NATIVE_API_VIDEO_VIDEOSOURCE_H_

View File

@ -0,0 +1,35 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import org.webrtc.VideoCapturer;
public class JavaVideoSourceTestHelper {
@CalledByNative
public static void startCapture(VideoCapturer.CapturerObserver observer, boolean success) {
observer.onCapturerStarted(success);
}
@CalledByNative
public static void stopCapture(VideoCapturer.CapturerObserver observer) {
observer.onCapturerStopped();
}
@CalledByNative
public static void deliverFrame(VideoCapturer.CapturerObserver observer) {
final int FRAME_WIDTH = 2;
final int FRAME_HEIGHT = 3;
final int FRAME_ROTATION = 180;
observer.onFrameCaptured(new VideoFrame(
JavaI420Buffer.allocate(FRAME_WIDTH, FRAME_HEIGHT), FRAME_ROTATION, 0 /* timestampNs */));
}
}

View File

@ -0,0 +1,136 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <vector>
#include "api/video/video_sink_interface.h"
#include "sdk/android/generated_native_unittests_jni/jni/JavaVideoSourceTestHelper_jni.h"
#include "sdk/android/native_api/video/videosource.h"
#include "test/gtest.h"
namespace webrtc {
namespace test {
namespace {
class TestVideoSink : public rtc::VideoSinkInterface<VideoFrame> {
public:
void OnFrame(const VideoFrame& frame) { frames_.push_back(frame); }
std::vector<VideoFrame> GetFrames() {
std::vector<VideoFrame> temp = frames_;
frames_.clear();
return temp;
}
private:
std::vector<VideoFrame> frames_;
};
} // namespace
TEST(JavaVideoSourceTest, CreateJavaVideoSource) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
// Wrap test thread so it can be used as the signaling thread.
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
ASSERT_NE(nullptr, video_track_source);
EXPECT_NE(nullptr,
video_track_source->GetJavaVideoCapturerObserver(env).obj());
}
TEST(JavaVideoSourceTest, OnFrameCapturedFrameIsDeliveredToSink) {
TestVideoSink test_video_sink;
JNIEnv* env = AttachCurrentThreadIfNeeded();
// Wrap test thread so it can be used as the signaling thread.
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
video_track_source->AddOrUpdateSink(&test_video_sink, rtc::VideoSinkWants());
Java_JavaVideoSourceTestHelper_startCapture(
env, video_track_source->GetJavaVideoCapturerObserver(env),
true /* success */);
Java_JavaVideoSourceTestHelper_deliverFrame(
env, video_track_source->GetJavaVideoCapturerObserver(env));
std::vector<VideoFrame> frames = test_video_sink.GetFrames();
ASSERT_EQ(1u, frames.size());
webrtc::VideoFrame frame = frames[0];
EXPECT_EQ(2, frame.width());
EXPECT_EQ(3, frame.height());
EXPECT_EQ(180, frame.rotation());
}
TEST(JavaVideoSourceTest, CapturerStartedSuccessStateBecomesLive) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
// Wrap test thread so it can be used as the signaling thread.
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
Java_JavaVideoSourceTestHelper_startCapture(
env, video_track_source->GetJavaVideoCapturerObserver(env),
true /* success */);
EXPECT_EQ(VideoTrackSourceInterface::SourceState::kLive,
video_track_source->state());
}
TEST(JavaVideoSourceTest, CapturerStartedFailureStateBecomesEnded) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
// Wrap test thread so it can be used as the signaling thread.
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
Java_JavaVideoSourceTestHelper_startCapture(
env, video_track_source->GetJavaVideoCapturerObserver(env),
false /* success */);
EXPECT_EQ(VideoTrackSourceInterface::SourceState::kEnded,
video_track_source->state());
}
TEST(JavaVideoSourceTest, CapturerStoppedStateBecomesEnded) {
JNIEnv* env = AttachCurrentThreadIfNeeded();
// Wrap test thread so it can be used as the signaling thread.
rtc::ThreadManager::Instance()->WrapCurrentThread();
rtc::scoped_refptr<JavaVideoTrackSourceInterface> video_track_source =
CreateJavaVideoSource(env,
rtc::ThreadManager::Instance()->CurrentThread(),
false /* is_screencast */);
Java_JavaVideoSourceTestHelper_startCapture(
env, video_track_source->GetJavaVideoCapturerObserver(env),
true /* success */);
Java_JavaVideoSourceTestHelper_stopCapture(
env, video_track_source->GetJavaVideoCapturerObserver(env));
EXPECT_EQ(VideoTrackSourceInterface::SourceState::kEnded,
video_track_source->state());
}
} // namespace test
} // namespace webrtc

View File

@ -0,0 +1,90 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import javax.annotation.Nullable;
/**
* Implements VideoCapturer.CapturerObserver and feeds frames to
* webrtc::jni::AndroidVideoTrackSource.
*/
@JNINamespace("webrtc::jni")
class NativeCapturerObserver implements VideoCapturer.CapturerObserver {
// Pointer to webrtc::jni::AndroidVideoTrackSource.
private final long nativeSource;
// TODO(bugs.webrtc.org/9181): Remove.
@Nullable private final SurfaceTextureHelper surfaceTextureHelper;
@CalledByNative
public NativeCapturerObserver(long nativeSource) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = null;
}
// TODO(bugs.webrtc.org/9181): Remove.
public NativeCapturerObserver(long nativeSource, SurfaceTextureHelper surfaceTextureHelper) {
this.nativeSource = nativeSource;
this.surfaceTextureHelper = surfaceTextureHelper;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeSource, success);
}
@Override
public void onCapturerStopped() {
nativeCapturerStopped(nativeSource);
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timestampNs) {
// This NV21Buffer is not possible to retain. This is safe only because the native code will
// always call cropAndScale() and directly make a deep copy of the buffer.
final VideoFrame.Buffer nv21Buffer =
new NV21Buffer(data, width, height, null /* releaseCallback */);
final VideoFrame frame = new VideoFrame(nv21Buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
// TODO(bugs.webrtc.org/9181): Remove.
@Override
@SuppressWarnings("deprecation")
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestampNs) {
final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
onFrameCaptured(frame);
frame.release();
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(), frame.getBuffer().getHeight(),
frame.getRotation(), frame.getTimestampNs(), frame.getBuffer());
}
public void dispose() {
if (surfaceTextureHelper != null) {
surfaceTextureHelper.dispose();
}
}
private static native void nativeCapturerStarted(long source, boolean success);
private static native void nativeCapturerStopped(long source);
private static native void nativeOnFrameCaptured(
long source, int width, int height, int rotation, long timestampNs, VideoFrame.Buffer frame);
}

View File

@ -14,7 +14,6 @@
#include "api/videosourceproxy.h"
#include "rtc_base/logging.h"
#include "sdk/android/generated_video_jni/jni/VideoSource_jni.h"
namespace webrtc {
namespace jni {
@ -22,18 +21,6 @@ namespace jni {
namespace {
// MediaCodec wants resolution to be divisible by 2.
const int kRequiredResolutionAlignment = 2;
VideoRotation jintToVideoRotation(jint rotation) {
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
return static_cast<VideoRotation>(rotation);
}
AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy(jlong j_proxy) {
auto* proxy_source = reinterpret_cast<VideoTrackSourceProxy*>(j_proxy);
return reinterpret_cast<AndroidVideoTrackSource*>(proxy_source->internal());
}
} // namespace
AndroidVideoTrackSource::AndroidVideoTrackSource(
@ -125,53 +112,5 @@ void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
video_adapter()->OnOutputFormatRequest(format);
}
static void JNI_VideoSource_OnFrameCaptured(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
jint j_width,
jint j_height,
jint j_rotation,
jlong j_timestamp_ns,
const JavaParamRef<jobject>& j_video_frame_buffer) {
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnFrameCaptured(jni, j_width, j_height, j_timestamp_ns,
jintToVideoRotation(j_rotation),
j_video_frame_buffer);
}
static void JNI_VideoSource_CapturerStarted(JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
jboolean j_success) {
RTC_LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStarted";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->SetState(j_success ? AndroidVideoTrackSource::SourceState::kLive
: AndroidVideoTrackSource::SourceState::kEnded);
}
static void JNI_VideoSource_CapturerStopped(JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source) {
RTC_LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStopped";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->SetState(AndroidVideoTrackSource::SourceState::kEnded);
}
static void JNI_VideoSource_AdaptOutputFormat(JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
jint j_width,
jint j_height,
jint j_fps) {
RTC_LOG(LS_INFO) << "VideoSource_nativeAdaptOutputFormat";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnOutputFormatRequest(j_width, j_height, j_fps);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,76 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "sdk/android/src/jni/nativecapturerobserver.h"
#include "api/videosourceproxy.h"
#include "rtc_base/logging.h"
#include "sdk/android/generated_video_jni/jni/NativeCapturerObserver_jni.h"
#include "sdk/android/native_api/jni/java_types.h"
#include "sdk/android/src/jni/androidvideotracksource.h"
namespace webrtc {
namespace jni {
namespace {
VideoRotation jintToVideoRotation(jint rotation) {
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
return static_cast<VideoRotation>(rotation);
}
} // namespace
ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
JNIEnv* env,
rtc::scoped_refptr<AndroidVideoTrackSource> native_source) {
return Java_NativeCapturerObserver_Constructor(
env, NativeToJavaPointer(native_source.release()));
}
static void JNI_NativeCapturerObserver_OnFrameCaptured(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
jint j_width,
jint j_height,
jint j_rotation,
jlong j_timestamp_ns,
const JavaParamRef<jobject>& j_video_frame_buffer) {
AndroidVideoTrackSource* source =
reinterpret_cast<AndroidVideoTrackSource*>(j_source);
source->OnFrameCaptured(jni, j_width, j_height, j_timestamp_ns,
jintToVideoRotation(j_rotation),
j_video_frame_buffer);
}
static void JNI_NativeCapturerObserver_CapturerStarted(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
jboolean j_success) {
RTC_LOG(LS_INFO) << "NativeCapturerObserver_nativeCapturerStarted";
AndroidVideoTrackSource* source =
reinterpret_cast<AndroidVideoTrackSource*>(j_source);
source->SetState(j_success ? AndroidVideoTrackSource::SourceState::kLive
: AndroidVideoTrackSource::SourceState::kEnded);
}
static void JNI_NativeCapturerObserver_CapturerStopped(
JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source) {
RTC_LOG(LS_INFO) << "NativeCapturerObserver_nativeCapturerStopped";
AndroidVideoTrackSource* source =
reinterpret_cast<AndroidVideoTrackSource*>(j_source);
source->SetState(AndroidVideoTrackSource::SourceState::kEnded);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,29 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_ANDROID_SRC_JNI_NATIVECAPTUREROBSERVER_H_
#define SDK_ANDROID_SRC_JNI_NATIVECAPTUREROBSERVER_H_
#include <jni.h>
#include "sdk/android/native_api/jni/scoped_java_ref.h"
#include "sdk/android/src/jni/androidvideotracksource.h"
namespace webrtc {
namespace jni {
ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
JNIEnv* env,
rtc::scoped_refptr<AndroidVideoTrackSource> native_source);
} // namespace jni
} // namespace webrtc
#endif // SDK_ANDROID_SRC_JNI_NATIVECAPTUREROBSERVER_H_

View File

@ -0,0 +1,46 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/videosourceproxy.h"
#include "rtc_base/logging.h"
#include "sdk/android/generated_video_jni/jni/VideoSource_jni.h"
#include "sdk/android/native_api/jni/java_types.h"
#include "sdk/android/src/jni/androidvideotracksource.h"
namespace webrtc {
namespace jni {
namespace {
AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy(jlong j_proxy) {
auto* proxy_source = reinterpret_cast<VideoTrackSourceProxy*>(j_proxy);
return reinterpret_cast<AndroidVideoTrackSource*>(proxy_source->internal());
}
} // namespace
static jlong JNI_VideoSource_GetInternalSource(JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source) {
return NativeToJavaPointer(AndroidVideoTrackSourceFromJavaProxy(j_source));
}
static void JNI_VideoSource_AdaptOutputFormat(JNIEnv* jni,
const JavaParamRef<jclass>&,
jlong j_source,
jint j_width,
jint j_height,
jint j_fps) {
RTC_LOG(LS_INFO) << "VideoSource_nativeAdaptOutputFormat";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnOutputFormatRequest(j_width, j_height, j_fps);
}
} // namespace jni
} // namespace webrtc