Remove proxy layer from AndroidVideoTrackSource

This layer is not needed since the methods are thread safe, and the
classes those method touches (VideoBroadcaster, cricket::VideoAdapter)
are thread safe.

Bug: webrtc:10247
Change-Id: Id4e309de4ac1b9669052aaa60d3bd1ed965aaa29
Reviewed-on: https://webrtc-review.googlesource.com/c/120801
Reviewed-by: Niels Moller <nisse@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26543}
This commit is contained in:
Magnus Jedvert
2019-01-31 13:23:46 +01:00
committed by Commit Bot
parent 69b761e52b
commit 167316b833
9 changed files with 10 additions and 50 deletions

View File

@ -16,7 +16,6 @@
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/create_peerconnection_factory.h"
#include "api/video_track_source_proxy.h"
#include "media/engine/internal_decoder_factory.h"
#include "media/engine/internal_encoder_factory.h"
#include "media/engine/multiplex_codec_factory.h"
@ -447,9 +446,6 @@ void SimplePeerConnection::AddStreams(bool audio_only) {
new rtc::RefCountedObject<webrtc::jni::AndroidVideoTrackSource>(
g_signaling_thread.get(), env, /* is_screencast= */ false,
/* align_timestamps= */ true));
rtc::scoped_refptr<webrtc::VideoTrackSourceProxy> proxy_source =
webrtc::VideoTrackSourceProxy::Create(g_signaling_thread.get(),
g_worker_thread.get(), source);
// link with VideoCapturer (Camera);
jmethodID link_camera_method = webrtc::GetStaticMethodID(
@ -457,13 +453,13 @@ void SimplePeerConnection::AddStreams(bool audio_only) {
"(JLorg/webrtc/SurfaceTextureHelper;)Lorg/webrtc/VideoCapturer;");
jobject camera_tmp =
env->CallStaticObjectMethod(pc_factory_class, link_camera_method,
(jlong)proxy_source.get(), texture_helper);
(jlong)source.get(), texture_helper);
CHECK_EXCEPTION(env);
g_camera = (jobject)env->NewGlobalRef(camera_tmp);
rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
g_peer_connection_factory->CreateVideoTrack(kVideoLabel,
proxy_source.release()));
source.release()));
stream->AddTrack(video_track);
#else
rtc::scoped_refptr<CapturerTrackSource> video_device =

View File

@ -19,14 +19,11 @@
namespace rtc {
AdaptedVideoTrackSource::AdaptedVideoTrackSource() {
thread_checker_.DetachFromThread();
}
AdaptedVideoTrackSource::AdaptedVideoTrackSource() = default;
AdaptedVideoTrackSource::AdaptedVideoTrackSource(int required_alignment)
: video_adapter_(required_alignment) {
thread_checker_.DetachFromThread();
}
: video_adapter_(required_alignment) {}
AdaptedVideoTrackSource::~AdaptedVideoTrackSource() = default;
bool AdaptedVideoTrackSource::GetStats(Stats* stats) {
@ -71,16 +68,12 @@ void AdaptedVideoTrackSource::OnFrame(const webrtc::VideoFrame& frame) {
void AdaptedVideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.AddOrUpdateSink(sink, wants);
OnSinkWantsChanged(broadcaster_.wants());
}
void AdaptedVideoTrackSource::RemoveSink(
rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
broadcaster_.RemoveSink(sink);
OnSinkWantsChanged(broadcaster_.wants());
}
@ -91,7 +84,6 @@ bool AdaptedVideoTrackSource::apply_rotation() {
void AdaptedVideoTrackSource::OnSinkWantsChanged(
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
video_adapter_.OnResolutionFramerateRequest(
wants.target_pixel_count, wants.max_pixel_count, wants.max_framerate_fps);
}

View File

@ -23,7 +23,6 @@
#include "media/base/video_broadcaster.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/thread_annotations.h"
#include "rtc_base/thread_checker.h"
namespace rtc {
@ -79,8 +78,6 @@ class AdaptedVideoTrackSource
void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
rtc::ThreadChecker thread_checker_;
cricket::VideoAdapter video_adapter_;
rtc::CriticalSection stats_crit_;

View File

@ -20,7 +20,7 @@ public class VideoSource extends MediaSource {
public VideoSource(long nativeSource) {
super(nativeSource);
this.capturerObserver = new NativeCapturerObserver(nativeGetInternalSource(nativeSource));
this.capturerObserver = new NativeCapturerObserver(nativeSource);
}
/**
@ -55,8 +55,6 @@ public class VideoSource extends MediaSource {
return getNativeMediaSource();
}
// Returns source->internal() from webrtc::VideoTrackSourceProxy.
private static native long nativeGetInternalSource(long source);
private static native void nativeAdaptOutputFormat(long source, int landscapeWidth,
int landscapeHeight, int portraitWidth, int portraitHeight, int fps);
}

View File

@ -12,7 +12,6 @@
#include <utility>
#include "api/video_track_source_proxy.h"
#include "rtc_base/logging.h"
namespace webrtc {
@ -32,7 +31,6 @@ AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
is_screencast_(is_screencast),
align_timestamps_(align_timestamps) {
RTC_LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
camera_thread_checker_.DetachFromThread();
}
AndroidVideoTrackSource::~AndroidVideoTrackSource() = default;
@ -73,8 +71,6 @@ void AndroidVideoTrackSource::OnFrameCaptured(
int64_t timestamp_ns,
VideoRotation rotation,
const JavaRef<jobject>& j_video_frame_buffer) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
align_timestamps_ ? timestamp_aligner_.TranslateTimestamp(

View File

@ -18,7 +18,6 @@
#include "media/base/adapted_video_track_source.h"
#include "rtc_base/async_invoker.h"
#include "rtc_base/checks.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/timestamp_aligner.h"
#include "sdk/android/src/jni/video_frame.h"
@ -63,7 +62,6 @@ class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
private:
rtc::Thread* signaling_thread_;
rtc::AsyncInvoker invoker_;
rtc::ThreadChecker camera_thread_checker_;
SourceState state_;
const bool is_screencast_;
rtc::TimestampAligner timestamp_aligner_;

View File

@ -10,7 +10,6 @@
#include "sdk/android/src/jni/native_capturer_observer.h"
#include "api/video_track_source_proxy.h"
#include "rtc_base/logging.h"
#include "sdk/android/generated_video_jni/jni/NativeCapturerObserver_jni.h"
#include "sdk/android/native_api/jni/java_types.h"

View File

@ -15,7 +15,6 @@
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "api/video_track_source_proxy.h"
#include "rtc_base/logging.h"
#include "sdk/android/native_api/jni/java_types.h"
#include "sdk/android/src/jni/android_video_track_source.h"
@ -49,8 +48,7 @@ void* CreateVideoSource(JNIEnv* env,
rtc::scoped_refptr<AndroidVideoTrackSource> source(
new rtc::RefCountedObject<AndroidVideoTrackSource>(
signaling_thread, env, is_screencast, align_timestamps));
return VideoTrackSourceProxy::Create(signaling_thread, worker_thread, source)
.release();
return source.release();
}
} // namespace jni

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/video_track_source_proxy.h"
#include "rtc_base/logging.h"
#include "sdk/android/generated_video_jni/jni/VideoSource_jni.h"
#include "sdk/android/native_api/jni/java_types.h"
@ -17,18 +16,6 @@
namespace webrtc {
namespace jni {
namespace {
AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy(jlong j_proxy) {
auto* proxy_source = reinterpret_cast<VideoTrackSourceProxy*>(j_proxy);
return reinterpret_cast<AndroidVideoTrackSource*>(proxy_source->internal());
}
} // namespace
static jlong JNI_VideoSource_GetInternalSource(JNIEnv* jni,
jlong j_source) {
return NativeToJavaPointer(AndroidVideoTrackSourceFromJavaProxy(j_source));
}
static void JNI_VideoSource_AdaptOutputFormat(JNIEnv* jni,
jlong j_source,
jint j_landscape_width,
@ -37,10 +24,9 @@ static void JNI_VideoSource_AdaptOutputFormat(JNIEnv* jni,
jint j_portrait_height,
jint j_fps) {
RTC_LOG(LS_INFO) << "VideoSource_nativeAdaptOutputFormat";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnOutputFormatRequest(j_landscape_width, j_landscape_height,
j_portrait_width, j_portrait_height, j_fps);
reinterpret_cast<AndroidVideoTrackSource*>(j_source)->OnOutputFormatRequest(
j_landscape_width, j_landscape_height, j_portrait_width,
j_portrait_height, j_fps);
}
} // namespace jni