Add support for adding VideoSinks to VideoTracks.

VideoSinks receive the new kind of VideoFrames and will replace
VideoRenderers. Converting from old texture frames to VideoFrames will
involve conversion to I420 so it is not recommended to use VideoSinks
before all sources produce VideoFrames.

BUG=webrtc:7749, webrtc:7760

Review-Url: https://codereview.webrtc.org/3002553002
Cr-Commit-Position: refs/heads/master@{#19335}
This commit is contained in:
sakal
2017-08-14 05:17:49 -07:00
committed by Commit Bot
parent dc5fc82c62
commit 0ba43b5a20
9 changed files with 157 additions and 30 deletions

View File

@ -417,6 +417,7 @@ android_library("libjingle_peerconnection_java") {
"api/org/webrtc/VideoFileRenderer.java",
"api/org/webrtc/VideoFrame.java",
"api/org/webrtc/VideoRenderer.java",
"api/org/webrtc/VideoSink.java",
"api/org/webrtc/VideoSource.java",
"api/org/webrtc/VideoTrack.java",
"src/java/org/webrtc/AndroidVideoTrackSourceObserver.java",

View File

@ -0,0 +1,23 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Java version of rtc::VideoSinkInterface.
*/
public interface VideoSink {
/**
* Implementations should call frame.retain() if they need to hold a reference to the frame after
* this function returns. Each call to retain() should be followed by a call to frame.release()
* when the reference is no longer needed.
*/
void onFrame(VideoFrame frame);
}

View File

@ -10,26 +10,55 @@
package org.webrtc;
import java.util.IdentityHashMap;
import java.util.LinkedList;
/** Java version of VideoTrackInterface. */
public class VideoTrack extends MediaStreamTrack {
private final LinkedList<VideoRenderer> renderers = new LinkedList<VideoRenderer>();
private final IdentityHashMap<VideoSink, Long> sinks = new IdentityHashMap<VideoSink, Long>();
public VideoTrack(long nativeTrack) {
super(nativeTrack);
}
/**
* Adds a VideoSink to the track.
*
* A track can have any number of VideoSinks. VideoSinks will replace
* renderers. However, converting old style texture frames will involve costly
* conversion to I420 so it is not recommended to upgrade before all your
* sources produce VideoFrames.
*/
public void addSink(VideoSink sink) {
final long nativeSink = nativeWrapSink(sink);
sinks.put(sink, nativeSink);
nativeAddSink(nativeTrack, nativeSink);
}
/**
* Removes a VideoSink from the track.
*
* If the VideoSink was not attached to the track, this is a no-op.
*/
public void removeSink(VideoSink sink) {
final long nativeSink = sinks.remove(sink);
if (nativeSink != 0) {
nativeRemoveSink(nativeTrack, nativeSink);
nativeFreeSink(nativeSink);
}
}
public void addRenderer(VideoRenderer renderer) {
renderers.add(renderer);
nativeAddRenderer(nativeTrack, renderer.nativeVideoRenderer);
nativeAddSink(nativeTrack, renderer.nativeVideoRenderer);
}
public void removeRenderer(VideoRenderer renderer) {
if (!renderers.remove(renderer)) {
return;
}
nativeRemoveRenderer(nativeTrack, renderer.nativeVideoRenderer);
nativeRemoveSink(nativeTrack, renderer.nativeVideoRenderer);
renderer.dispose();
}
@ -37,10 +66,17 @@ public class VideoTrack extends MediaStreamTrack {
while (!renderers.isEmpty()) {
removeRenderer(renderers.getFirst());
}
for (long nativeSink : sinks.values()) {
nativeRemoveSink(nativeTrack, nativeSink);
nativeFreeSink(nativeSink);
}
sinks.clear();
super.dispose();
}
private static native void nativeAddRenderer(long nativeTrack, long nativeRenderer);
private static native void nativeAddSink(long nativeTrack, long nativeSink);
private static native void nativeRemoveSink(long nativeTrack, long nativeSink);
private static native void nativeRemoveRenderer(long nativeTrack, long nativeRenderer);
private static native long nativeWrapSink(VideoSink sink);
private static native void nativeFreeSink(long nativeSink);
}

View File

@ -37,6 +37,8 @@ class WrappedNativeI420Buffer implements VideoFrame.I420Buffer {
this.dataV = dataV;
this.strideV = strideV;
this.nativeBuffer = nativeBuffer;
retain();
}
@Override

View File

@ -113,6 +113,7 @@ ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "org/webrtc/VideoFrame$I420Buffer");
LoadClass(jni, "org/webrtc/VideoFrame$TextureBuffer");
LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
LoadClass(jni, "org/webrtc/VideoSink");
LoadClass(jni, "org/webrtc/VideoTrack");
LoadClass(jni, "org/webrtc/WrappedNativeI420Buffer");
}

View File

@ -21,6 +21,7 @@
#include "webrtc/rtc_base/timeutils.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h"
#include "webrtc/system_wrappers/include/aligned_malloc.h"
namespace webrtc_jni {
@ -474,20 +475,37 @@ JavaVideoFrameFactory::JavaVideoFrameFactory(JNIEnv* jni)
"(Lorg/webrtc/VideoFrame$Buffer;IJ)V");
}
static bool IsJavaVideoBuffer(
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer) {
if (buffer->type() != webrtc::VideoFrameBuffer::Type::kNative) {
return false;
}
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(buffer.get());
return android_buffer->android_type() ==
AndroidVideoFrameBuffer::AndroidType::kJavaBuffer;
}
jobject JavaVideoFrameFactory::ToJavaFrame(
JNIEnv* jni,
const webrtc::VideoFrame& frame) const {
RTC_DCHECK(frame.video_frame_buffer()->type() ==
webrtc::VideoFrameBuffer::Type::kNative);
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
frame.video_frame_buffer();
jobject j_buffer;
if (IsJavaVideoBuffer(buffer)) {
RTC_DCHECK(buffer->type() == webrtc::VideoFrameBuffer::Type::kNative);
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(frame.video_frame_buffer().get());
static_cast<AndroidVideoFrameBuffer*>(buffer.get());
RTC_DCHECK(android_buffer->android_type() ==
AndroidVideoFrameBuffer::AndroidType::kJavaBuffer);
AndroidVideoBuffer* android_video_buffer =
static_cast<AndroidVideoBuffer*>(android_buffer);
jobject buffer = android_video_buffer->video_frame_buffer();
j_buffer = android_video_buffer->video_frame_buffer();
} else {
j_buffer = WrapI420Buffer(jni, buffer->ToI420());
}
return jni->NewObject(
*j_video_frame_class_, j_video_frame_constructor_id_, buffer,
*j_video_frame_class_, j_video_frame_constructor_id_, j_buffer,
static_cast<jint>(frame.rotation()),
static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec));
}

View File

@ -12,31 +12,78 @@
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/native_handle_impl.h"
namespace webrtc_jni {
namespace {
class VideoSinkWrapper : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoSinkWrapper(JNIEnv* jni, jobject j_sink);
~VideoSinkWrapper() override {}
private:
void OnFrame(const webrtc::VideoFrame& frame) override;
jmethodID j_on_frame_method_;
const JavaVideoFrameFactory java_video_frame_factory_;
const ScopedGlobalRef<jobject> j_sink_;
};
VideoSinkWrapper::VideoSinkWrapper(JNIEnv* jni, jobject j_sink)
: java_video_frame_factory_(jni), j_sink_(jni, j_sink) {
jclass j_video_sink_class = FindClass(jni, "org/webrtc/VideoSink");
j_on_frame_method_ = jni->GetMethodID(j_video_sink_class, "onFrame",
"(Lorg/webrtc/VideoFrame;)V");
}
void VideoSinkWrapper::OnFrame(const webrtc::VideoFrame& frame) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jni->CallVoidMethod(*j_sink_, j_on_frame_method_,
java_video_frame_factory_.ToJavaFrame(jni, frame));
}
} // namespace
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoTrack_nativeAddRenderer(JNIEnv* jni,
Java_org_webrtc_VideoTrack_nativeAddSink(JNIEnv* jni,
jclass,
jlong j_video_track_pointer,
jlong j_renderer_pointer) {
LOG(LS_INFO) << "VideoTrack::nativeAddRenderer";
reinterpret_cast<webrtc::VideoTrackInterface*>(j_video_track_pointer)
jlong j_native_track,
jlong j_native_sink) {
reinterpret_cast<webrtc::VideoTrackInterface*>(j_native_track)
->AddOrUpdateSink(
reinterpret_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(
j_renderer_pointer),
j_native_sink),
rtc::VideoSinkWants());
}
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoTrack_nativeRemoveRenderer(JNIEnv* jni,
Java_org_webrtc_VideoTrack_nativeRemoveSink(JNIEnv* jni,
jclass,
jlong j_video_track_pointer,
jlong j_renderer_pointer) {
reinterpret_cast<webrtc::VideoTrackInterface*>(j_video_track_pointer)
jlong j_native_track,
jlong j_native_sink) {
reinterpret_cast<webrtc::VideoTrackInterface*>(j_native_track)
->RemoveSink(
reinterpret_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(
j_renderer_pointer));
j_native_sink));
}
extern "C" JNIEXPORT jlong JNICALL
Java_org_webrtc_VideoTrack_nativeWrapSink(JNIEnv* jni, jclass, jobject sink) {
return jlongFromPointer(new VideoSinkWrapper(jni, sink));
}
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_VideoTrack_nativeFreeSink(JNIEnv* jni,
jclass,
jlong j_native_sink) {
delete reinterpret_cast<rtc::VideoSinkInterface<webrtc::VideoFrame>*>(
j_native_sink);
}
} // namespace webrtc_jni

View File

@ -17,10 +17,8 @@ namespace webrtc_jni {
// TODO(magjed): Write a test for this function.
jobject WrapI420Buffer(
JNIEnv* jni,
const rtc::scoped_refptr<webrtc::I420BufferInterface>& i420_buffer) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jclass j_wrapped_native_i420_buffer_class =
FindClass(jni, "org/webrtc/WrappedNativeI420Buffer");
jmethodID j_wrapped_native_i420_buffer_ctor_id =

View File

@ -20,6 +20,7 @@ namespace webrtc_jni {
// This function wraps the C++ I420 buffer and returns a Java
// VideoFrame.I420Buffer as a jobject.
jobject WrapI420Buffer(
JNIEnv* jni,
const rtc::scoped_refptr<webrtc::I420BufferInterface>& i420_buffer);
} // namespace webrtc_jni