Introduce class that handles native wrapping of AndroidVideoTrackSource
This CL attempts to do separation of concerns by introducing a simple class that only handles JNI wrapping of a C++ AndroidVideoTrackSource. This layer can be easiliy mocked out in Java unit tests. Bug: webrtc:10247 Change-Id: Idbdbfde6d3e00b64f3f310f76505801fa496580d Reviewed-on: https://webrtc-review.googlesource.com/c/121562 Commit-Queue: Magnus Jedvert <magjed@webrtc.org> Reviewed-by: Sami Kalliomäki <sakal@webrtc.org> Cr-Commit-Position: refs/heads/master@{#26556}
This commit is contained in:
committed by
Commit Bot
parent
b3032b6e33
commit
99b275d126
@ -327,6 +327,7 @@ if (is_android) {
|
|||||||
"api/org/webrtc/VideoTrack.java",
|
"api/org/webrtc/VideoTrack.java",
|
||||||
"src/java/org/webrtc/NativeLibrary.java",
|
"src/java/org/webrtc/NativeLibrary.java",
|
||||||
"src/java/org/webrtc/NativeCapturerObserver.java",
|
"src/java/org/webrtc/NativeCapturerObserver.java",
|
||||||
|
"src/java/org/webrtc/NativeAndroidVideoTrackSource.java",
|
||||||
]
|
]
|
||||||
|
|
||||||
deps = [
|
deps = [
|
||||||
@ -662,7 +663,6 @@ if (is_android) {
|
|||||||
"src/jni/video_encoder_wrapper.h",
|
"src/jni/video_encoder_wrapper.h",
|
||||||
"src/jni/video_sink.cc",
|
"src/jni/video_sink.cc",
|
||||||
"src/jni/video_sink.h",
|
"src/jni/video_sink.h",
|
||||||
"src/jni/video_source.cc",
|
|
||||||
"src/jni/video_track.cc",
|
"src/jni/video_track.cc",
|
||||||
"src/jni/yuv_helper.cc",
|
"src/jni/yuv_helper.cc",
|
||||||
]
|
]
|
||||||
@ -1256,12 +1256,12 @@ if (is_android) {
|
|||||||
"api/org/webrtc/VideoEncoderFallback.java",
|
"api/org/webrtc/VideoEncoderFallback.java",
|
||||||
"api/org/webrtc/VideoFrame.java",
|
"api/org/webrtc/VideoFrame.java",
|
||||||
"api/org/webrtc/VideoSink.java",
|
"api/org/webrtc/VideoSink.java",
|
||||||
"api/org/webrtc/VideoSource.java",
|
|
||||||
"api/org/webrtc/VideoTrack.java",
|
"api/org/webrtc/VideoTrack.java",
|
||||||
"api/org/webrtc/YuvHelper.java",
|
"api/org/webrtc/YuvHelper.java",
|
||||||
"src/java/org/webrtc/H264Utils.java",
|
"src/java/org/webrtc/H264Utils.java",
|
||||||
"src/java/org/webrtc/NV12Buffer.java",
|
"src/java/org/webrtc/NV12Buffer.java",
|
||||||
"src/java/org/webrtc/NV21Buffer.java",
|
"src/java/org/webrtc/NV21Buffer.java",
|
||||||
|
"src/java/org/webrtc/NativeAndroidVideoTrackSource.java",
|
||||||
"src/java/org/webrtc/NativeCapturerObserver.java",
|
"src/java/org/webrtc/NativeCapturerObserver.java",
|
||||||
"src/java/org/webrtc/VideoDecoderWrapper.java",
|
"src/java/org/webrtc/VideoDecoderWrapper.java",
|
||||||
"src/java/org/webrtc/VideoEncoderWrapper.java",
|
"src/java/org/webrtc/VideoEncoderWrapper.java",
|
||||||
|
|||||||
@ -16,11 +16,40 @@ import android.support.annotation.Nullable;
|
|||||||
* Java wrapper of native AndroidVideoTrackSource.
|
* Java wrapper of native AndroidVideoTrackSource.
|
||||||
*/
|
*/
|
||||||
public class VideoSource extends MediaSource {
|
public class VideoSource extends MediaSource {
|
||||||
private final NativeCapturerObserver capturerObserver;
|
/** Simple aspect ratio clas for use in constraining output format. */
|
||||||
|
public static class AspectRatio {
|
||||||
|
public static final AspectRatio UNDEFINED = new AspectRatio(/* width= */ 0, /* height= */ 0);
|
||||||
|
|
||||||
|
public final int width;
|
||||||
|
public final int height;
|
||||||
|
|
||||||
|
public AspectRatio(int width, int height) {
|
||||||
|
this.width = width;
|
||||||
|
this.height = height;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource;
|
||||||
|
private final CapturerObserver capturerObserver = new CapturerObserver() {
|
||||||
|
@Override
|
||||||
|
public void onCapturerStarted(boolean success) {
|
||||||
|
nativeAndroidVideoTrackSource.setState(success);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onCapturerStopped() {
|
||||||
|
nativeAndroidVideoTrackSource.setState(/* isLive= */ false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFrameCaptured(VideoFrame frame) {
|
||||||
|
nativeAndroidVideoTrackSource.onFrameCaptured(frame);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
public VideoSource(long nativeSource) {
|
public VideoSource(long nativeSource) {
|
||||||
super(nativeSource);
|
super(nativeSource);
|
||||||
this.capturerObserver = new NativeCapturerObserver(nativeSource);
|
this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -42,8 +71,18 @@ public class VideoSource extends MediaSource {
|
|||||||
*/
|
*/
|
||||||
public void adaptOutputFormat(
|
public void adaptOutputFormat(
|
||||||
int landscapeWidth, int landscapeHeight, int portraitWidth, int portraitHeight, int fps) {
|
int landscapeWidth, int landscapeHeight, int portraitWidth, int portraitHeight, int fps) {
|
||||||
nativeAdaptOutputFormat(getNativeVideoTrackSource(), landscapeWidth, landscapeHeight,
|
adaptOutputFormat(new AspectRatio(landscapeWidth, landscapeHeight),
|
||||||
portraitWidth, portraitHeight, fps);
|
/* maxLandscapePixelCount= */ landscapeWidth * landscapeHeight,
|
||||||
|
new AspectRatio(portraitWidth, portraitHeight),
|
||||||
|
/* maxPortraitPixelCount= */ portraitWidth * portraitHeight, fps);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Same as above, with even more control as each constraint is optional. */
|
||||||
|
public void adaptOutputFormat(AspectRatio targetLandscapeAspectRatio,
|
||||||
|
@Nullable Integer maxLandscapePixelCount, AspectRatio targetPortraitAspectRatio,
|
||||||
|
@Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) {
|
||||||
|
nativeAndroidVideoTrackSource.adaptOutputFormat(targetLandscapeAspectRatio,
|
||||||
|
maxLandscapePixelCount, targetPortraitAspectRatio, maxPortraitPixelCount, maxFps);
|
||||||
}
|
}
|
||||||
|
|
||||||
public CapturerObserver getCapturerObserver() {
|
public CapturerObserver getCapturerObserver() {
|
||||||
@ -54,7 +93,4 @@ public class VideoSource extends MediaSource {
|
|||||||
long getNativeVideoTrackSource() {
|
long getNativeVideoTrackSource() {
|
||||||
return getNativeMediaSource();
|
return getNativeMediaSource();
|
||||||
}
|
}
|
||||||
|
|
||||||
private static native void nativeAdaptOutputFormat(long source, int landscapeWidth,
|
|
||||||
int landscapeHeight, int portraitWidth, int portraitHeight, int fps);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -0,0 +1,64 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.support.annotation.Nullable;
|
||||||
|
import org.webrtc.VideoFrame;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class is meant to be a simple layer that only handles the JNI wrapping of a C++
|
||||||
|
* AndroidVideoTrackSource, that can easily be mocked out in Java unit tests. Refrain from adding
|
||||||
|
* any unnecessary logic to this class.
|
||||||
|
*/
|
||||||
|
class NativeAndroidVideoTrackSource {
|
||||||
|
// Pointer to webrtc::jni::AndroidVideoTrackSource.
|
||||||
|
private final long nativeAndroidVideoTrackSource;
|
||||||
|
|
||||||
|
public NativeAndroidVideoTrackSource(long nativeAndroidVideoTrackSource) {
|
||||||
|
this.nativeAndroidVideoTrackSource = nativeAndroidVideoTrackSource;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the state for the native MediaSourceInterface. Maps boolean to either
|
||||||
|
* SourceState::kLive or SourceState::kEnded.
|
||||||
|
*/
|
||||||
|
public void setState(boolean isLive) {
|
||||||
|
nativeSetState(nativeAndroidVideoTrackSource, isLive);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Pass a frame to the native AndroidVideoTrackSource. */
|
||||||
|
public void onFrameCaptured(VideoFrame frame) {
|
||||||
|
nativeOnFrameCaptured(nativeAndroidVideoTrackSource, frame.getBuffer().getWidth(),
|
||||||
|
frame.getBuffer().getHeight(), frame.getRotation(), frame.getTimestampNs(),
|
||||||
|
frame.getBuffer());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calling this function will cause frames to be scaled down to the requested resolution. Also,
|
||||||
|
* frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
|
||||||
|
* the requested fps.
|
||||||
|
*/
|
||||||
|
public void adaptOutputFormat(VideoSource.AspectRatio targetLandscapeAspectRatio,
|
||||||
|
@Nullable Integer maxLandscapePixelCount, VideoSource.AspectRatio targetPortraitAspectRatio,
|
||||||
|
@Nullable Integer maxPortraitPixelCount, @Nullable Integer maxFps) {
|
||||||
|
nativeAdaptOutputFormat(nativeAndroidVideoTrackSource, targetLandscapeAspectRatio.width,
|
||||||
|
targetLandscapeAspectRatio.height, maxLandscapePixelCount, targetPortraitAspectRatio.width,
|
||||||
|
targetPortraitAspectRatio.height, maxPortraitPixelCount, maxFps);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static native void nativeSetState(long nativeAndroidVideoTrackSource, boolean isLive);
|
||||||
|
private static native void nativeAdaptOutputFormat(long nativeAndroidVideoTrackSource,
|
||||||
|
int landscapeWidth, int landscapeHeight, @Nullable Integer maxLandscapePixelCount,
|
||||||
|
int portraitWidth, int portraitHeight, @Nullable Integer maxPortraitPixelCount,
|
||||||
|
@Nullable Integer maxFps);
|
||||||
|
private static native void nativeOnFrameCaptured(long nativeAndroidVideoTrackSource, int width,
|
||||||
|
int height, int rotation, long timestampNs, VideoFrame.Buffer buffer);
|
||||||
|
}
|
||||||
@ -10,40 +10,32 @@
|
|||||||
|
|
||||||
package org.webrtc;
|
package org.webrtc;
|
||||||
|
|
||||||
import android.support.annotation.Nullable;
|
|
||||||
import org.webrtc.VideoFrame;
|
import org.webrtc.VideoFrame;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implements VideoCapturer.CapturerObserver and feeds frames to
|
* Used from native api and implements a simple VideoCapturer.CapturerObserver that feeds frames to
|
||||||
* webrtc::jni::AndroidVideoTrackSource.
|
* a webrtc::jni::AndroidVideoTrackSource.
|
||||||
*/
|
*/
|
||||||
class NativeCapturerObserver implements CapturerObserver {
|
class NativeCapturerObserver implements CapturerObserver {
|
||||||
// Pointer to webrtc::jni::AndroidVideoTrackSource.
|
private final NativeAndroidVideoTrackSource nativeAndroidVideoTrackSource;
|
||||||
private final long nativeSource;
|
|
||||||
|
|
||||||
@CalledByNative
|
@CalledByNative
|
||||||
public NativeCapturerObserver(long nativeSource) {
|
public NativeCapturerObserver(long nativeSource) {
|
||||||
this.nativeSource = nativeSource;
|
this.nativeAndroidVideoTrackSource = new NativeAndroidVideoTrackSource(nativeSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onCapturerStarted(boolean success) {
|
public void onCapturerStarted(boolean success) {
|
||||||
nativeCapturerStarted(nativeSource, success);
|
nativeAndroidVideoTrackSource.setState(success);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onCapturerStopped() {
|
public void onCapturerStopped() {
|
||||||
nativeCapturerStopped(nativeSource);
|
nativeAndroidVideoTrackSource.setState(/* isLive= */ false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onFrameCaptured(VideoFrame frame) {
|
public void onFrameCaptured(VideoFrame frame) {
|
||||||
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(), frame.getBuffer().getHeight(),
|
nativeAndroidVideoTrackSource.onFrameCaptured(frame);
|
||||||
frame.getRotation(), frame.getTimestampNs(), frame.getBuffer());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static native void nativeCapturerStarted(long source, boolean success);
|
|
||||||
private static native void nativeCapturerStopped(long source);
|
|
||||||
private static native void nativeOnFrameCaptured(
|
|
||||||
long source, int width, int height, int rotation, long timestampNs, VideoFrame.Buffer frame);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -10,6 +10,8 @@
|
|||||||
|
|
||||||
#include "sdk/android/src/jni/android_video_track_source.h"
|
#include "sdk/android/src/jni/android_video_track_source.h"
|
||||||
|
|
||||||
|
#include "sdk/android/generated_video_jni/jni/NativeAndroidVideoTrackSource_jni.h"
|
||||||
|
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
|
||||||
#include "rtc_base/logging.h"
|
#include "rtc_base/logging.h"
|
||||||
@ -20,6 +22,20 @@ namespace jni {
|
|||||||
namespace {
|
namespace {
|
||||||
// MediaCodec wants resolution to be divisible by 2.
|
// MediaCodec wants resolution to be divisible by 2.
|
||||||
const int kRequiredResolutionAlignment = 2;
|
const int kRequiredResolutionAlignment = 2;
|
||||||
|
|
||||||
|
VideoRotation jintToVideoRotation(jint rotation) {
|
||||||
|
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
|
||||||
|
rotation == 270);
|
||||||
|
return static_cast<VideoRotation>(rotation);
|
||||||
|
}
|
||||||
|
|
||||||
|
absl::optional<std::pair<int, int>> OptionalAspectRatio(jint j_width,
|
||||||
|
jint j_height) {
|
||||||
|
if (j_width > 0 && j_height > 0)
|
||||||
|
return std::pair<int, int>(j_width, j_height);
|
||||||
|
return absl::nullopt;
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
|
AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
|
||||||
@ -42,11 +58,17 @@ absl::optional<bool> AndroidVideoTrackSource::needs_denoising() const {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoTrackSource::SetState(SourceState state) {
|
void AndroidVideoTrackSource::SetState(JNIEnv* env,
|
||||||
|
const JavaRef<jobject>& j_caller,
|
||||||
|
jboolean j_is_live) {
|
||||||
|
InternalSetState(j_is_live ? kLive : kEnded);
|
||||||
|
}
|
||||||
|
|
||||||
|
void AndroidVideoTrackSource::InternalSetState(SourceState state) {
|
||||||
if (rtc::Thread::Current() != signaling_thread_) {
|
if (rtc::Thread::Current() != signaling_thread_) {
|
||||||
invoker_.AsyncInvoke<void>(
|
invoker_.AsyncInvoke<void>(
|
||||||
RTC_FROM_HERE, signaling_thread_,
|
RTC_FROM_HERE, signaling_thread_,
|
||||||
rtc::Bind(&AndroidVideoTrackSource::SetState, this, state));
|
rtc::Bind(&AndroidVideoTrackSource::InternalSetState, this, state));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,13 +87,16 @@ bool AndroidVideoTrackSource::remote() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoTrackSource::OnFrameCaptured(
|
void AndroidVideoTrackSource::OnFrameCaptured(
|
||||||
JNIEnv* jni,
|
JNIEnv* env,
|
||||||
int width,
|
const JavaRef<jobject>& j_caller,
|
||||||
int height,
|
jint j_width,
|
||||||
int64_t timestamp_ns,
|
jint j_height,
|
||||||
VideoRotation rotation,
|
jint j_rotation,
|
||||||
|
jlong j_timestamp_ns,
|
||||||
const JavaRef<jobject>& j_video_frame_buffer) {
|
const JavaRef<jobject>& j_video_frame_buffer) {
|
||||||
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
|
const VideoRotation rotation = jintToVideoRotation(j_rotation);
|
||||||
|
|
||||||
|
int64_t camera_time_us = j_timestamp_ns / rtc::kNumNanosecsPerMicrosec;
|
||||||
int64_t translated_camera_time_us =
|
int64_t translated_camera_time_us =
|
||||||
align_timestamps_ ? timestamp_aligner_.TranslateTimestamp(
|
align_timestamps_ ? timestamp_aligner_.TranslateTimestamp(
|
||||||
camera_time_us, rtc::TimeMicros())
|
camera_time_us, rtc::TimeMicros())
|
||||||
@ -85,14 +110,14 @@ void AndroidVideoTrackSource::OnFrameCaptured(
|
|||||||
int crop_y;
|
int crop_y;
|
||||||
|
|
||||||
if (rotation % 180 == 0) {
|
if (rotation % 180 == 0) {
|
||||||
if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
|
if (!AdaptFrame(j_width, j_height, camera_time_us, &adapted_width,
|
||||||
&adapted_height, &crop_width, &crop_height, &crop_x,
|
&adapted_height, &crop_width, &crop_height, &crop_x,
|
||||||
&crop_y)) {
|
&crop_y)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Swap all width/height and x/y.
|
// Swap all width/height and x/y.
|
||||||
if (!AdaptFrame(height, width, camera_time_us, &adapted_height,
|
if (!AdaptFrame(j_height, j_width, camera_time_us, &adapted_height,
|
||||||
&adapted_width, &crop_height, &crop_width, &crop_y,
|
&adapted_width, &crop_height, &crop_width, &crop_y,
|
||||||
&crop_x)) {
|
&crop_x)) {
|
||||||
return;
|
return;
|
||||||
@ -100,8 +125,8 @@ void AndroidVideoTrackSource::OnFrameCaptured(
|
|||||||
}
|
}
|
||||||
|
|
||||||
rtc::scoped_refptr<VideoFrameBuffer> buffer =
|
rtc::scoped_refptr<VideoFrameBuffer> buffer =
|
||||||
AndroidVideoBuffer::Create(jni, j_video_frame_buffer)
|
AndroidVideoBuffer::Create(env, j_video_frame_buffer)
|
||||||
->CropAndScale(jni, crop_x, crop_y, crop_width, crop_height,
|
->CropAndScale(env, crop_x, crop_y, crop_width, crop_height,
|
||||||
adapted_width, adapted_height);
|
adapted_width, adapted_height);
|
||||||
|
|
||||||
// AdaptedVideoTrackSource handles applying rotation for I420 frames.
|
// AdaptedVideoTrackSource handles applying rotation for I420 frames.
|
||||||
@ -116,16 +141,22 @@ void AndroidVideoTrackSource::OnFrameCaptured(
|
|||||||
.build());
|
.build());
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoTrackSource::OnOutputFormatRequest(int landscape_width,
|
void AndroidVideoTrackSource::AdaptOutputFormat(
|
||||||
int landscape_height,
|
JNIEnv* env,
|
||||||
int portrait_width,
|
const JavaRef<jobject>& j_caller,
|
||||||
int portrait_height,
|
jint j_landscape_width,
|
||||||
int fps) {
|
jint j_landscape_height,
|
||||||
|
const JavaRef<jobject>& j_max_landscape_pixel_count,
|
||||||
|
jint j_portrait_width,
|
||||||
|
jint j_portrait_height,
|
||||||
|
const JavaRef<jobject>& j_max_portrait_pixel_count,
|
||||||
|
const JavaRef<jobject>& j_max_fps) {
|
||||||
video_adapter()->OnOutputFormatRequest(
|
video_adapter()->OnOutputFormatRequest(
|
||||||
std::make_pair(landscape_width, landscape_height),
|
OptionalAspectRatio(j_landscape_width, j_landscape_height),
|
||||||
landscape_width * landscape_height,
|
JavaToNativeOptionalInt(env, j_max_landscape_pixel_count),
|
||||||
std::make_pair(portrait_width, portrait_height),
|
OptionalAspectRatio(j_portrait_width, j_portrait_height),
|
||||||
portrait_width * portrait_height, fps);
|
JavaToNativeOptionalInt(env, j_max_portrait_pixel_count),
|
||||||
|
JavaToNativeOptionalInt(env, j_max_fps));
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace jni
|
} // namespace jni
|
||||||
|
|||||||
@ -39,27 +39,37 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
|
|||||||
// depending on video codec.
|
// depending on video codec.
|
||||||
absl::optional<bool> needs_denoising() const override;
|
absl::optional<bool> needs_denoising() const override;
|
||||||
|
|
||||||
// Called by the native capture observer
|
|
||||||
void SetState(SourceState state);
|
void SetState(SourceState state);
|
||||||
|
|
||||||
SourceState state() const override;
|
SourceState state() const override;
|
||||||
|
|
||||||
bool remote() const override;
|
bool remote() const override;
|
||||||
|
|
||||||
void OnFrameCaptured(JNIEnv* jni,
|
void OnFrameCaptured(JNIEnv* env,
|
||||||
int width,
|
const JavaRef<jobject>& j_caller,
|
||||||
int height,
|
jint j_width,
|
||||||
int64_t timestamp_ns,
|
jint j_height,
|
||||||
VideoRotation rotation,
|
jint j_rotation,
|
||||||
|
jlong j_timestamp_ns,
|
||||||
const JavaRef<jobject>& j_video_frame_buffer);
|
const JavaRef<jobject>& j_video_frame_buffer);
|
||||||
|
|
||||||
void OnOutputFormatRequest(int landscape_width,
|
void SetState(JNIEnv* env,
|
||||||
int landscape_height,
|
const JavaRef<jobject>& j_caller,
|
||||||
int portrait_width,
|
jboolean j_is_live);
|
||||||
int portrait_height,
|
|
||||||
int fps);
|
void AdaptOutputFormat(JNIEnv* env,
|
||||||
|
const JavaRef<jobject>& j_caller,
|
||||||
|
jint j_landscape_width,
|
||||||
|
jint j_landscape_height,
|
||||||
|
const JavaRef<jobject>& j_max_landscape_pixel_count,
|
||||||
|
jint j_portrait_width,
|
||||||
|
jint j_portrait_height,
|
||||||
|
const JavaRef<jobject>& j_max_portrait_pixel_count,
|
||||||
|
const JavaRef<jobject>& j_max_fps);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
void InternalSetState(SourceState state);
|
||||||
|
|
||||||
rtc::Thread* signaling_thread_;
|
rtc::Thread* signaling_thread_;
|
||||||
rtc::AsyncInvoker invoker_;
|
rtc::AsyncInvoker invoker_;
|
||||||
SourceState state_;
|
SourceState state_;
|
||||||
|
|||||||
@ -18,14 +18,6 @@
|
|||||||
namespace webrtc {
|
namespace webrtc {
|
||||||
namespace jni {
|
namespace jni {
|
||||||
|
|
||||||
namespace {
|
|
||||||
VideoRotation jintToVideoRotation(jint rotation) {
|
|
||||||
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
|
|
||||||
rotation == 270);
|
|
||||||
return static_cast<VideoRotation>(rotation);
|
|
||||||
}
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
|
ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
|
||||||
JNIEnv* env,
|
JNIEnv* env,
|
||||||
rtc::scoped_refptr<AndroidVideoTrackSource> native_source) {
|
rtc::scoped_refptr<AndroidVideoTrackSource> native_source) {
|
||||||
@ -33,40 +25,5 @@ ScopedJavaLocalRef<jobject> CreateJavaNativeCapturerObserver(
|
|||||||
env, NativeToJavaPointer(native_source.release()));
|
env, NativeToJavaPointer(native_source.release()));
|
||||||
}
|
}
|
||||||
|
|
||||||
static void JNI_NativeCapturerObserver_OnFrameCaptured(
|
|
||||||
JNIEnv* jni,
|
|
||||||
jlong j_source,
|
|
||||||
jint j_width,
|
|
||||||
jint j_height,
|
|
||||||
jint j_rotation,
|
|
||||||
jlong j_timestamp_ns,
|
|
||||||
const JavaParamRef<jobject>& j_video_frame_buffer) {
|
|
||||||
AndroidVideoTrackSource* source =
|
|
||||||
reinterpret_cast<AndroidVideoTrackSource*>(j_source);
|
|
||||||
source->OnFrameCaptured(jni, j_width, j_height, j_timestamp_ns,
|
|
||||||
jintToVideoRotation(j_rotation),
|
|
||||||
j_video_frame_buffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
static void JNI_NativeCapturerObserver_CapturerStarted(
|
|
||||||
JNIEnv* jni,
|
|
||||||
jlong j_source,
|
|
||||||
jboolean j_success) {
|
|
||||||
RTC_LOG(LS_INFO) << "NativeCapturerObserver_nativeCapturerStarted";
|
|
||||||
AndroidVideoTrackSource* source =
|
|
||||||
reinterpret_cast<AndroidVideoTrackSource*>(j_source);
|
|
||||||
source->SetState(j_success ? AndroidVideoTrackSource::SourceState::kLive
|
|
||||||
: AndroidVideoTrackSource::SourceState::kEnded);
|
|
||||||
}
|
|
||||||
|
|
||||||
static void JNI_NativeCapturerObserver_CapturerStopped(
|
|
||||||
JNIEnv* jni,
|
|
||||||
jlong j_source) {
|
|
||||||
RTC_LOG(LS_INFO) << "NativeCapturerObserver_nativeCapturerStopped";
|
|
||||||
AndroidVideoTrackSource* source =
|
|
||||||
reinterpret_cast<AndroidVideoTrackSource*>(j_source);
|
|
||||||
source->SetState(AndroidVideoTrackSource::SourceState::kEnded);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace jni
|
} // namespace jni
|
||||||
} // namespace webrtc
|
} // namespace webrtc
|
||||||
|
|||||||
@ -1,33 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
|
||||||
*
|
|
||||||
* Use of this source code is governed by a BSD-style license
|
|
||||||
* that can be found in the LICENSE file in the root of the source
|
|
||||||
* tree. An additional intellectual property rights grant can be found
|
|
||||||
* in the file PATENTS. All contributing project authors may
|
|
||||||
* be found in the AUTHORS file in the root of the source tree.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include "rtc_base/logging.h"
|
|
||||||
#include "sdk/android/generated_video_jni/jni/VideoSource_jni.h"
|
|
||||||
#include "sdk/android/native_api/jni/java_types.h"
|
|
||||||
#include "sdk/android/src/jni/android_video_track_source.h"
|
|
||||||
|
|
||||||
namespace webrtc {
|
|
||||||
namespace jni {
|
|
||||||
|
|
||||||
static void JNI_VideoSource_AdaptOutputFormat(JNIEnv* jni,
|
|
||||||
jlong j_source,
|
|
||||||
jint j_landscape_width,
|
|
||||||
jint j_landscape_height,
|
|
||||||
jint j_portrait_width,
|
|
||||||
jint j_portrait_height,
|
|
||||||
jint j_fps) {
|
|
||||||
RTC_LOG(LS_INFO) << "VideoSource_nativeAdaptOutputFormat";
|
|
||||||
reinterpret_cast<AndroidVideoTrackSource*>(j_source)->OnOutputFormatRequest(
|
|
||||||
j_landscape_width, j_landscape_height, j_portrait_width,
|
|
||||||
j_portrait_height, j_fps);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace jni
|
|
||||||
} // namespace webrtc
|
|
||||||
Reference in New Issue
Block a user