Moving src/webrtc into src/.
In order to eliminate the WebRTC Subtree mirror in Chromium, WebRTC is moving the content of the src/webrtc directory up to the src/ directory. NOPRESUBMIT=true NOTREECHECKS=true NOTRY=true TBR=tommi@webrtc.org Bug: chromium:611808 Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38 Reviewed-on: https://webrtc-review.googlesource.com/1560 Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org> Reviewed-by: Henrik Kjellander <kjellander@webrtc.org> Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
committed by
Commit Bot
parent
6674846b4a
commit
bb547203bf
14
sdk/android/AndroidManifest.xml
Normal file
14
sdk/android/AndroidManifest.xml
Normal file
@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!--
|
||||
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
-->
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="org.webrtc">
|
||||
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="23" />
|
||||
</manifest>
|
||||
524
sdk/android/BUILD.gn
Normal file
524
sdk/android/BUILD.gn
Normal file
@ -0,0 +1,524 @@
|
||||
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
import("../../webrtc.gni")
|
||||
import("//build/config/android/config.gni")
|
||||
import("//build/config/android/rules.gni")
|
||||
|
||||
group("android") {
|
||||
if (!build_with_chromium && is_android) {
|
||||
public_deps = [
|
||||
":libjingle_peerconnection_datachannelonly_so",
|
||||
":libjingle_peerconnection_jni",
|
||||
":libjingle_peerconnection_so",
|
||||
":libwebrtc",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
config("libjingle_peerconnection_jni_warnings_config") {
|
||||
# The warnings below are enabled by default. Since GN orders compiler flags
|
||||
# for a target before flags from configs, the only way to disable such
|
||||
# warnings is by having them in a separate config, loaded from the target.
|
||||
if (!is_win) {
|
||||
cflags = [
|
||||
"-Wno-sign-compare",
|
||||
"-Wno-unused-variable",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
rtc_source_set("base_jni") {
|
||||
sources = [
|
||||
"src/jni/androidhistogram_jni.cc",
|
||||
"src/jni/classreferenceholder.cc",
|
||||
"src/jni/classreferenceholder.h",
|
||||
"src/jni/jni_common.cc",
|
||||
"src/jni/jni_helpers.cc",
|
||||
"src/jni/jni_helpers.h",
|
||||
"src/jni/pc/audio_jni.h",
|
||||
"src/jni/pc/media_jni.h",
|
||||
"src/jni/pc/video_jni.h",
|
||||
]
|
||||
|
||||
deps = [
|
||||
"../../api:libjingle_peerconnection_api",
|
||||
"../../rtc_base:rtc_base",
|
||||
"../../rtc_base:rtc_base_approved",
|
||||
"../../system_wrappers:metrics_api",
|
||||
]
|
||||
|
||||
if (is_clang) {
|
||||
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
|
||||
suppressed_configs += [
|
||||
"//build/config/clang:extra_warnings",
|
||||
"//build/config/clang:find_bad_constructs",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
rtc_static_library("audio_jni") {
|
||||
sources = [
|
||||
"src/jni/pc/audio_jni.cc",
|
||||
]
|
||||
|
||||
deps = [
|
||||
":base_jni",
|
||||
"../../api/audio_codecs:builtin_audio_decoder_factory",
|
||||
"../../api/audio_codecs:builtin_audio_encoder_factory",
|
||||
"../../voice_engine:voice_engine",
|
||||
]
|
||||
}
|
||||
|
||||
rtc_static_library("null_audio_jni") {
|
||||
sources = [
|
||||
"src/jni/pc/null_audio_jni.cc",
|
||||
]
|
||||
|
||||
deps = [
|
||||
":base_jni",
|
||||
]
|
||||
}
|
||||
|
||||
rtc_static_library("video_jni") {
|
||||
sources = [
|
||||
"src/jni/androidmediacodeccommon.h",
|
||||
"src/jni/androidmediadecoder_jni.cc",
|
||||
"src/jni/androidmediadecoder_jni.h",
|
||||
"src/jni/androidmediaencoder_jni.cc",
|
||||
"src/jni/androidmediaencoder_jni.h",
|
||||
"src/jni/androidvideotracksource.cc",
|
||||
"src/jni/androidvideotracksource.h",
|
||||
"src/jni/androidvideotracksource_jni.cc",
|
||||
"src/jni/filevideocapturer_jni.cc",
|
||||
"src/jni/native_handle_impl.cc",
|
||||
"src/jni/native_handle_impl.h",
|
||||
"src/jni/nv12buffer_jni.cc",
|
||||
"src/jni/nv21buffer_jni.cc",
|
||||
"src/jni/pc/video_jni.cc",
|
||||
"src/jni/surfacetexturehelper_jni.cc",
|
||||
"src/jni/surfacetexturehelper_jni.h",
|
||||
"src/jni/video_renderer_jni.cc",
|
||||
"src/jni/videodecoderfactorywrapper.cc",
|
||||
"src/jni/videodecoderfactorywrapper.h",
|
||||
"src/jni/videodecoderwrapper.cc",
|
||||
"src/jni/videodecoderwrapper.h",
|
||||
"src/jni/videoencoderfactorywrapper.cc",
|
||||
"src/jni/videoencoderfactorywrapper.h",
|
||||
"src/jni/videoencoderwrapper.cc",
|
||||
"src/jni/videoencoderwrapper.h",
|
||||
"src/jni/videofilerenderer_jni.cc",
|
||||
"src/jni/videoframe_jni.cc",
|
||||
"src/jni/videotrack_jni.cc",
|
||||
"src/jni/wrapped_native_i420_buffer.cc",
|
||||
"src/jni/wrapped_native_i420_buffer.h",
|
||||
]
|
||||
|
||||
configs += [ ":libjingle_peerconnection_jni_warnings_config" ]
|
||||
|
||||
if (is_clang) {
|
||||
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
|
||||
suppressed_configs += [
|
||||
"//build/config/clang:extra_warnings",
|
||||
"//build/config/clang:find_bad_constructs",
|
||||
]
|
||||
}
|
||||
|
||||
# TODO(jschuh): Bug 1348: fix this warning.
|
||||
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
|
||||
|
||||
if (is_win) {
|
||||
cflags += [
|
||||
"/wd4245", # conversion from "int" to "size_t", signed/unsigned mismatch.
|
||||
"/wd4389", # signed/unsigned mismatch.
|
||||
]
|
||||
}
|
||||
|
||||
deps = [
|
||||
":base_jni",
|
||||
":peerconnection_jni",
|
||||
"../..:webrtc_common",
|
||||
"../../api:libjingle_peerconnection_api",
|
||||
"../../api:video_frame_api",
|
||||
"../../api/video_codecs:video_codecs_api",
|
||||
"../../common_video:common_video",
|
||||
"../../media:rtc_audio_video",
|
||||
"../../media:rtc_media_base",
|
||||
"../../modules:module_api",
|
||||
"../../modules/utility:utility",
|
||||
"../../modules/video_coding:video_coding_utility",
|
||||
"../../rtc_base:rtc_base",
|
||||
"../../rtc_base:rtc_base_approved",
|
||||
"../../rtc_base:rtc_task_queue",
|
||||
"../../rtc_base:sequenced_task_checker",
|
||||
"../../rtc_base:weak_ptr",
|
||||
"../../system_wrappers:system_wrappers",
|
||||
]
|
||||
|
||||
if (rtc_build_libyuv) {
|
||||
deps += [ "$rtc_libyuv_dir" ]
|
||||
public_deps = [
|
||||
"$rtc_libyuv_dir",
|
||||
]
|
||||
} else {
|
||||
# Need to add a directory normally exported by libyuv.
|
||||
include_dirs = [ "$rtc_libyuv_dir/include" ]
|
||||
}
|
||||
}
|
||||
|
||||
rtc_static_library("null_video_jni") {
|
||||
sources = [
|
||||
"src/jni/pc/null_video_jni.cc",
|
||||
]
|
||||
|
||||
deps = [
|
||||
":base_jni",
|
||||
]
|
||||
}
|
||||
|
||||
rtc_static_library("media_jni") {
|
||||
sources = [
|
||||
"src/jni/pc/media_jni.cc",
|
||||
]
|
||||
|
||||
deps = [
|
||||
":base_jni",
|
||||
"../../call:call_interfaces",
|
||||
"../../logging:rtc_event_log_api",
|
||||
"../../media:rtc_audio_video",
|
||||
"../../modules/audio_processing:audio_processing",
|
||||
]
|
||||
|
||||
if (is_clang) {
|
||||
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
|
||||
suppressed_configs += [
|
||||
"//build/config/clang:extra_warnings",
|
||||
"//build/config/clang:find_bad_constructs",
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
rtc_static_library("null_media_jni") {
|
||||
sources = [
|
||||
"src/jni/pc/null_media_jni.cc",
|
||||
]
|
||||
|
||||
deps = [
|
||||
":base_jni",
|
||||
]
|
||||
}
|
||||
|
||||
rtc_static_library("peerconnection_jni") {
|
||||
sources = [
|
||||
"src/jni/androidnetworkmonitor_jni.h",
|
||||
"src/jni/pc/androidnetworkmonitor_jni.cc",
|
||||
"src/jni/pc/androidnetworkmonitor_jni.h",
|
||||
"src/jni/pc/audiotrack_jni.cc",
|
||||
"src/jni/pc/callsessionfilerotatinglogsink_jni.cc",
|
||||
"src/jni/pc/datachannel_jni.cc",
|
||||
"src/jni/pc/datachannelobserver_jni.cc",
|
||||
"src/jni/pc/datachannelobserver_jni.h",
|
||||
"src/jni/pc/dtmfsender_jni.cc",
|
||||
"src/jni/pc/java_native_conversion.cc",
|
||||
"src/jni/pc/java_native_conversion.h",
|
||||
"src/jni/pc/logging_jni.cc",
|
||||
"src/jni/pc/mediaconstraints_jni.cc",
|
||||
"src/jni/pc/mediaconstraints_jni.h",
|
||||
"src/jni/pc/mediasource_jni.cc",
|
||||
"src/jni/pc/mediastream_jni.cc",
|
||||
"src/jni/pc/mediastreamtrack_jni.cc",
|
||||
"src/jni/pc/ownedfactoryandthreads.cc",
|
||||
"src/jni/pc/ownedfactoryandthreads.h",
|
||||
"src/jni/pc/peerconnection_jni.cc",
|
||||
"src/jni/pc/peerconnectionfactory_jni.cc",
|
||||
"src/jni/pc/peerconnectionobserver_jni.cc",
|
||||
"src/jni/pc/peerconnectionobserver_jni.h",
|
||||
"src/jni/pc/rtcstatscollectorcallbackwrapper.cc",
|
||||
"src/jni/pc/rtcstatscollectorcallbackwrapper.h",
|
||||
"src/jni/pc/rtpreceiver_jni.cc",
|
||||
"src/jni/pc/rtpreceiverobserver_jni.cc",
|
||||
"src/jni/pc/rtpreceiverobserver_jni.h",
|
||||
"src/jni/pc/rtpsender_jni.cc",
|
||||
"src/jni/pc/sdpobserver_jni.h",
|
||||
"src/jni/pc/statsobserver_jni.cc",
|
||||
"src/jni/pc/statsobserver_jni.h",
|
||||
]
|
||||
|
||||
configs += [ ":libjingle_peerconnection_jni_warnings_config" ]
|
||||
|
||||
if (is_clang) {
|
||||
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
|
||||
suppressed_configs += [
|
||||
"//build/config/clang:extra_warnings",
|
||||
"//build/config/clang:find_bad_constructs",
|
||||
]
|
||||
}
|
||||
|
||||
# TODO(jschuh): Bug 1348: fix this warning.
|
||||
configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
|
||||
|
||||
if (is_win) {
|
||||
cflags += [
|
||||
"/wd4245", # conversion from "int" to "size_t", signed/unsigned mismatch.
|
||||
"/wd4389", # signed/unsigned mismatch.
|
||||
]
|
||||
}
|
||||
|
||||
deps = [
|
||||
":base_jni",
|
||||
"../..:webrtc_common",
|
||||
"../../media:rtc_data",
|
||||
"../../media:rtc_media_base",
|
||||
"../../modules/utility:utility",
|
||||
"../../pc:peerconnection",
|
||||
"../../rtc_base:rtc_base",
|
||||
"../../rtc_base:rtc_base_approved",
|
||||
"../../rtc_base:rtc_task_queue",
|
||||
"../../system_wrappers:system_wrappers",
|
||||
]
|
||||
}
|
||||
|
||||
rtc_static_library("libjingle_peerconnection_jni") {
|
||||
public_deps = [
|
||||
":audio_jni",
|
||||
":base_jni",
|
||||
":media_jni",
|
||||
":peerconnection_jni",
|
||||
":video_jni",
|
||||
"../../pc:create_pc_factory",
|
||||
]
|
||||
}
|
||||
|
||||
rtc_static_library("libjingle_peerconnection_metrics_default_jni") {
|
||||
sources = [
|
||||
"src/jni/androidmetrics_jni.cc",
|
||||
]
|
||||
|
||||
configs += [ ":libjingle_peerconnection_jni_warnings_config" ]
|
||||
|
||||
deps = [
|
||||
":base_jni",
|
||||
":peerconnection_jni",
|
||||
"../../pc:peerconnection",
|
||||
"../../system_wrappers",
|
||||
"../../system_wrappers:field_trial_default",
|
||||
"../../system_wrappers:metrics_default",
|
||||
]
|
||||
}
|
||||
|
||||
# The modular build targets can be used to build WebRTC with different
|
||||
# functionalities. The users can choose either the real implemenation or the
|
||||
# null implementation of the audio/video modules based on their requirments.
|
||||
rtc_shared_library("libjingle_peerconnection_datachannelonly_so") {
|
||||
sources = [
|
||||
"src/jni/jni_onload.cc",
|
||||
]
|
||||
|
||||
suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
|
||||
configs += [ "//build/config/android:hide_all_but_jni" ]
|
||||
|
||||
deps = [
|
||||
":base_jni",
|
||||
":libjingle_peerconnection_metrics_default_jni",
|
||||
":null_audio_jni",
|
||||
":null_media_jni",
|
||||
":null_video_jni",
|
||||
":peerconnection_jni",
|
||||
"../../pc:peerconnection",
|
||||
"../../rtc_base:rtc_base",
|
||||
"../../rtc_base:rtc_base_approved",
|
||||
]
|
||||
output_extension = "so"
|
||||
}
|
||||
|
||||
rtc_shared_library("libjingle_peerconnection_so") {
|
||||
sources = [
|
||||
"src/jni/jni_onload.cc",
|
||||
]
|
||||
|
||||
suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ]
|
||||
configs += [ "//build/config/android:hide_all_but_jni" ]
|
||||
|
||||
deps = [
|
||||
":libjingle_peerconnection_jni",
|
||||
":libjingle_peerconnection_metrics_default_jni",
|
||||
"../../pc:libjingle_peerconnection",
|
||||
"../../rtc_base:rtc_base",
|
||||
]
|
||||
output_extension = "so"
|
||||
}
|
||||
|
||||
dist_jar("libwebrtc") {
|
||||
_target_dir_name = get_label_info(":$target_name", "dir")
|
||||
output = "${root_out_dir}/lib.java${_target_dir_name}/${target_name}.jar"
|
||||
direct_deps_only = true
|
||||
use_interface_jars = false
|
||||
deps = [
|
||||
":libjingle_peerconnection_java",
|
||||
":libjingle_peerconnection_metrics_default_java",
|
||||
"../../modules/audio_device:audio_device_java",
|
||||
"../../rtc_base:base_java",
|
||||
]
|
||||
}
|
||||
|
||||
android_library("libjingle_peerconnection_java") {
|
||||
java_files = [
|
||||
"api/org/webrtc/AudioSource.java",
|
||||
"api/org/webrtc/AudioTrack.java",
|
||||
"api/org/webrtc/CallSessionFileRotatingLogSink.java",
|
||||
"api/org/webrtc/Camera1Capturer.java",
|
||||
"api/org/webrtc/Camera1Enumerator.java",
|
||||
"api/org/webrtc/Camera2Capturer.java",
|
||||
"api/org/webrtc/Camera2Enumerator.java",
|
||||
"api/org/webrtc/CameraEnumerationAndroid.java",
|
||||
"api/org/webrtc/CameraEnumerator.java",
|
||||
"api/org/webrtc/CameraVideoCapturer.java",
|
||||
"api/org/webrtc/DataChannel.java",
|
||||
"api/org/webrtc/DtmfSender.java",
|
||||
"api/org/webrtc/EglBase.java",
|
||||
"api/org/webrtc/EglRenderer.java",
|
||||
"api/org/webrtc/EncodedImage.java",
|
||||
"api/org/webrtc/FileVideoCapturer.java",
|
||||
"api/org/webrtc/GlRectDrawer.java",
|
||||
"api/org/webrtc/GlShader.java",
|
||||
"api/org/webrtc/GlTextureFrameBuffer.java",
|
||||
"api/org/webrtc/GlUtil.java",
|
||||
"api/org/webrtc/HardwareVideoDecoderFactory.java",
|
||||
"api/org/webrtc/HardwareVideoEncoderFactory.java",
|
||||
"api/org/webrtc/IceCandidate.java",
|
||||
"api/org/webrtc/MediaCodecVideoDecoder.java",
|
||||
"api/org/webrtc/MediaCodecVideoEncoder.java",
|
||||
"api/org/webrtc/MediaConstraints.java",
|
||||
"api/org/webrtc/MediaSource.java",
|
||||
"api/org/webrtc/MediaStream.java",
|
||||
"api/org/webrtc/MediaStreamTrack.java",
|
||||
"api/org/webrtc/NetworkMonitor.java",
|
||||
"api/org/webrtc/NetworkMonitorAutoDetect.java",
|
||||
"api/org/webrtc/PeerConnection.java",
|
||||
"api/org/webrtc/PeerConnectionFactory.java",
|
||||
"api/org/webrtc/RendererCommon.java",
|
||||
"api/org/webrtc/RTCStats.java",
|
||||
"api/org/webrtc/RTCStatsCollectorCallback.java",
|
||||
"api/org/webrtc/RTCStatsReport.java",
|
||||
"api/org/webrtc/RtpParameters.java",
|
||||
"api/org/webrtc/RtpReceiver.java",
|
||||
"api/org/webrtc/RtpSender.java",
|
||||
"api/org/webrtc/ScreenCapturerAndroid.java",
|
||||
"api/org/webrtc/SdpObserver.java",
|
||||
"api/org/webrtc/SessionDescription.java",
|
||||
"api/org/webrtc/StatsObserver.java",
|
||||
"api/org/webrtc/StatsReport.java",
|
||||
"api/org/webrtc/SurfaceTextureHelper.java",
|
||||
"api/org/webrtc/SurfaceViewRenderer.java",
|
||||
"api/org/webrtc/VideoCapturer.java",
|
||||
"api/org/webrtc/VideoCodecInfo.java",
|
||||
"api/org/webrtc/VideoCodecStatus.java",
|
||||
"api/org/webrtc/VideoDecoder.java",
|
||||
"api/org/webrtc/VideoDecoderFactory.java",
|
||||
"api/org/webrtc/VideoEncoder.java",
|
||||
"api/org/webrtc/VideoEncoderFactory.java",
|
||||
"api/org/webrtc/VideoFileRenderer.java",
|
||||
"api/org/webrtc/VideoFrame.java",
|
||||
"api/org/webrtc/VideoFrameDrawer.java",
|
||||
"api/org/webrtc/VideoRenderer.java",
|
||||
"api/org/webrtc/VideoSink.java",
|
||||
"api/org/webrtc/VideoSource.java",
|
||||
"api/org/webrtc/VideoTrack.java",
|
||||
"src/java/org/webrtc/AndroidVideoTrackSourceObserver.java",
|
||||
"src/java/org/webrtc/BaseBitrateAdjuster.java",
|
||||
"src/java/org/webrtc/BitrateAdjuster.java",
|
||||
"src/java/org/webrtc/Camera1Session.java",
|
||||
"src/java/org/webrtc/Camera2Session.java",
|
||||
"src/java/org/webrtc/CameraCapturer.java",
|
||||
"src/java/org/webrtc/CameraSession.java",
|
||||
"src/java/org/webrtc/DynamicBitrateAdjuster.java",
|
||||
"src/java/org/webrtc/EglBase10.java",
|
||||
"src/java/org/webrtc/EglBase14.java",
|
||||
"src/java/org/webrtc/FramerateBitrateAdjuster.java",
|
||||
"src/java/org/webrtc/HardwareVideoDecoder.java",
|
||||
"src/java/org/webrtc/HardwareVideoEncoder.java",
|
||||
"src/java/org/webrtc/Histogram.java",
|
||||
"src/java/org/webrtc/I420BufferImpl.java",
|
||||
"src/java/org/webrtc/JniCommon.java",
|
||||
"src/java/org/webrtc/MediaCodecUtils.java",
|
||||
"src/java/org/webrtc/NV12Buffer.java",
|
||||
"src/java/org/webrtc/NV21Buffer.java",
|
||||
"src/java/org/webrtc/TextureBufferImpl.java",
|
||||
"src/java/org/webrtc/VideoCodecType.java",
|
||||
"src/java/org/webrtc/VideoDecoderWrapperCallback.java",
|
||||
"src/java/org/webrtc/VideoEncoderWrapperCallback.java",
|
||||
"src/java/org/webrtc/WrappedNativeI420Buffer.java",
|
||||
"src/java/org/webrtc/YuvConverter.java",
|
||||
]
|
||||
|
||||
deps = [
|
||||
"../../modules/audio_device:audio_device_java",
|
||||
"../../rtc_base:base_java",
|
||||
]
|
||||
|
||||
# TODO(sakal): Fix build hooks crbug.com/webrtc/8148
|
||||
no_build_hooks = true
|
||||
}
|
||||
|
||||
android_library("libjingle_peerconnection_metrics_default_java") {
|
||||
java_files = [ "api/org/webrtc/Metrics.java" ]
|
||||
|
||||
deps = [
|
||||
"../../rtc_base:base_java",
|
||||
]
|
||||
|
||||
# TODO(sakal): Fix build hooks crbug.com/webrtc/8148
|
||||
no_build_hooks = true
|
||||
}
|
||||
|
||||
if (rtc_include_tests) {
|
||||
instrumentation_test_apk("libjingle_peerconnection_android_unittest") {
|
||||
apk_name = "libjingle_peerconnection_android_unittest"
|
||||
android_manifest = "instrumentationtests/AndroidManifest.xml"
|
||||
|
||||
java_files = [
|
||||
"instrumentationtests/src/org/webrtc/Camera1CapturerUsingByteBufferTest.java",
|
||||
"instrumentationtests/src/org/webrtc/Camera1CapturerUsingTextureTest.java",
|
||||
"instrumentationtests/src/org/webrtc/Camera2CapturerTest.java",
|
||||
"instrumentationtests/src/org/webrtc/CameraVideoCapturerTestFixtures.java",
|
||||
"instrumentationtests/src/org/webrtc/EglRendererTest.java",
|
||||
"instrumentationtests/src/org/webrtc/FileVideoCapturerTest.java",
|
||||
"instrumentationtests/src/org/webrtc/GlRectDrawerTest.java",
|
||||
"instrumentationtests/src/org/webrtc/HardwareVideoEncoderTest.java",
|
||||
"instrumentationtests/src/org/webrtc/HardwareVideoDecoderTest.java",
|
||||
"instrumentationtests/src/org/webrtc/MediaCodecVideoEncoderTest.java",
|
||||
"instrumentationtests/src/org/webrtc/NetworkMonitorTest.java",
|
||||
"instrumentationtests/src/org/webrtc/PeerConnectionTest.java",
|
||||
"instrumentationtests/src/org/webrtc/RendererCommonTest.java",
|
||||
"instrumentationtests/src/org/webrtc/SurfaceTextureHelperTest.java",
|
||||
"instrumentationtests/src/org/webrtc/SurfaceViewRendererOnMeasureTest.java",
|
||||
"instrumentationtests/src/org/webrtc/VideoFileRendererTest.java",
|
||||
"instrumentationtests/src/org/webrtc/WebRtcJniBootTest.java",
|
||||
]
|
||||
|
||||
data = [
|
||||
"../../sdk/android/instrumentationtests/src/org/webrtc/capturetestvideo.y4m",
|
||||
]
|
||||
|
||||
deps = [
|
||||
"../../rtc_base:base_java",
|
||||
"../../sdk/android:libjingle_peerconnection_java",
|
||||
"../../sdk/android:libjingle_peerconnection_metrics_default_java",
|
||||
"//base:base_java",
|
||||
"//base:base_java_test_support",
|
||||
"//third_party/android_support_test_runner:rules_java",
|
||||
"//third_party/android_support_test_runner:runner_java",
|
||||
"//third_party/junit",
|
||||
]
|
||||
|
||||
shared_libraries = [ "../../sdk/android:libjingle_peerconnection_so" ]
|
||||
|
||||
# TODO(sakal): Fix build hooks crbug.com/webrtc/8148
|
||||
no_build_hooks = true
|
||||
}
|
||||
}
|
||||
3
sdk/android/OWNERS
Normal file
3
sdk/android/OWNERS
Normal file
@ -0,0 +1,3 @@
|
||||
glaznev@webrtc.org
|
||||
magjed@webrtc.org
|
||||
sakal@webrtc.org
|
||||
28
sdk/android/PRESUBMIT.py
Normal file
28
sdk/android/PRESUBMIT.py
Normal file
@ -0,0 +1,28 @@
|
||||
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
def CheckChangeOnUpload(input_api, output_api):
|
||||
results = []
|
||||
results.extend(CheckPatchFormatted(input_api, output_api))
|
||||
return results
|
||||
|
||||
def CheckPatchFormatted(input_api, output_api):
|
||||
import git_cl
|
||||
cmd = ['cl', 'format', '--dry-run', input_api.PresubmitLocalPath()]
|
||||
code, _ = git_cl.RunGitWithCode(cmd, suppress_stderr=True)
|
||||
if code == 2:
|
||||
short_path = input_api.basename(input_api.PresubmitLocalPath())
|
||||
full_path = input_api.os_path.relpath(input_api.PresubmitLocalPath(),
|
||||
input_api.change.RepositoryRoot())
|
||||
return [output_api.PresubmitPromptWarning(
|
||||
'The %s directory requires source formatting. '
|
||||
'Please run git cl format %s' %
|
||||
(short_path, full_path))]
|
||||
# As this is just a warning, ignore all other errors if the user
|
||||
# happens to have a broken clang-format, doesn't use git, etc etc.
|
||||
return []
|
||||
11
sdk/android/README
Normal file
11
sdk/android/README
Normal file
@ -0,0 +1,11 @@
|
||||
This directory holds a Java implementation of the webrtc::PeerConnection API, as
|
||||
well as the JNI glue C++ code that lets the Java implementation reuse the C++
|
||||
implementation of the same API.
|
||||
|
||||
To build the Java API and related tests, generate GN projects with:
|
||||
--args='target_os="android"'
|
||||
|
||||
To use the Java API, start by looking at the public interface of
|
||||
org.webrtc.PeerConnection{,Factory} and the org.webrtc.PeerConnectionTest.
|
||||
|
||||
To understand the implementation of the API, see the native code in jni/.
|
||||
21
sdk/android/api/org/webrtc/AudioSource.java
Normal file
21
sdk/android/api/org/webrtc/AudioSource.java
Normal file
@ -0,0 +1,21 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Java wrapper for a C++ AudioSourceInterface. Used as the source for one or
|
||||
* more {@code AudioTrack} objects.
|
||||
*/
|
||||
public class AudioSource extends MediaSource {
|
||||
public AudioSource(long nativeSource) {
|
||||
super(nativeSource);
|
||||
}
|
||||
}
|
||||
27
sdk/android/api/org/webrtc/AudioTrack.java
Normal file
27
sdk/android/api/org/webrtc/AudioTrack.java
Normal file
@ -0,0 +1,27 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Java wrapper for a C++ AudioTrackInterface */
|
||||
public class AudioTrack extends MediaStreamTrack {
|
||||
public AudioTrack(long nativeTrack) {
|
||||
super(nativeTrack);
|
||||
}
|
||||
|
||||
/** Sets the volume for the underlying MediaSource. Volume is a gain value in the range
|
||||
* 0 to 10.
|
||||
*/
|
||||
public void setVolume(double volume) {
|
||||
nativeSetVolume(super.nativeTrack, volume);
|
||||
}
|
||||
|
||||
private static native void nativeSetVolume(long nativeTrack, double volume);
|
||||
}
|
||||
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
public class CallSessionFileRotatingLogSink {
|
||||
static {
|
||||
System.loadLibrary("jingle_peerconnection_so");
|
||||
}
|
||||
|
||||
private long nativeSink;
|
||||
|
||||
public static byte[] getLogData(String dirPath) {
|
||||
return nativeGetLogData(dirPath);
|
||||
}
|
||||
|
||||
public CallSessionFileRotatingLogSink(
|
||||
String dirPath, int maxFileSize, Logging.Severity severity) {
|
||||
nativeSink = nativeAddSink(dirPath, maxFileSize, severity.ordinal());
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
if (nativeSink != 0) {
|
||||
nativeDeleteSink(nativeSink);
|
||||
nativeSink = 0;
|
||||
}
|
||||
}
|
||||
|
||||
private static native long nativeAddSink(String dirPath, int maxFileSize, int severity);
|
||||
private static native void nativeDeleteSink(long nativeSink);
|
||||
private static native byte[] nativeGetLogData(String dirPath);
|
||||
}
|
||||
35
sdk/android/api/org/webrtc/Camera1Capturer.java
Normal file
35
sdk/android/api/org/webrtc/Camera1Capturer.java
Normal file
@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.MediaRecorder;
|
||||
|
||||
public class Camera1Capturer extends CameraCapturer {
|
||||
private final boolean captureToTexture;
|
||||
|
||||
public Camera1Capturer(
|
||||
String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
|
||||
super(cameraName, eventsHandler, new Camera1Enumerator(captureToTexture));
|
||||
|
||||
this.captureToTexture = captureToTexture;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
|
||||
CameraSession.Events events, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraName,
|
||||
int width, int height, int framerate) {
|
||||
Camera1Session.create(createSessionCallback, events,
|
||||
captureToTexture || (mediaRecorder != null), applicationContext, surfaceTextureHelper,
|
||||
mediaRecorder, Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate);
|
||||
}
|
||||
}
|
||||
184
sdk/android/api/org/webrtc/Camera1Enumerator.java
Normal file
184
sdk/android/api/org/webrtc/Camera1Enumerator.java
Normal file
@ -0,0 +1,184 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.os.SystemClock;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class Camera1Enumerator implements CameraEnumerator {
|
||||
private final static String TAG = "Camera1Enumerator";
|
||||
// Each entry contains the supported formats for corresponding camera index. The formats for all
|
||||
// cameras are enumerated on the first call to getSupportedFormats(), and cached for future
|
||||
// reference.
|
||||
private static List<List<CaptureFormat>> cachedSupportedFormats;
|
||||
|
||||
private final boolean captureToTexture;
|
||||
|
||||
public Camera1Enumerator() {
|
||||
this(true /* captureToTexture */);
|
||||
}
|
||||
|
||||
public Camera1Enumerator(boolean captureToTexture) {
|
||||
this.captureToTexture = captureToTexture;
|
||||
}
|
||||
|
||||
// Returns device names that can be used to create a new VideoCapturerAndroid.
|
||||
@Override
|
||||
public String[] getDeviceNames() {
|
||||
ArrayList<String> namesList = new ArrayList<>();
|
||||
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||
String name = getDeviceName(i);
|
||||
if (name != null) {
|
||||
namesList.add(name);
|
||||
Logging.d(TAG, "Index: " + i + ". " + name);
|
||||
} else {
|
||||
Logging.e(TAG, "Index: " + i + ". Failed to query camera name.");
|
||||
}
|
||||
}
|
||||
String[] namesArray = new String[namesList.size()];
|
||||
return namesList.toArray(namesArray);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFrontFacing(String deviceName) {
|
||||
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
|
||||
return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBackFacing(String deviceName) {
|
||||
android.hardware.Camera.CameraInfo info = getCameraInfo(getCameraIndex(deviceName));
|
||||
return info != null && info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CaptureFormat> getSupportedFormats(String deviceName) {
|
||||
return getSupportedFormats(getCameraIndex(deviceName));
|
||||
}
|
||||
|
||||
@Override
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return new Camera1Capturer(deviceName, eventsHandler, captureToTexture);
|
||||
}
|
||||
|
||||
private static android.hardware.Camera.CameraInfo getCameraInfo(int index) {
|
||||
android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
|
||||
try {
|
||||
android.hardware.Camera.getCameraInfo(index, info);
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "getCameraInfo failed on index " + index, e);
|
||||
return null;
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
static synchronized List<CaptureFormat> getSupportedFormats(int cameraId) {
|
||||
if (cachedSupportedFormats == null) {
|
||||
cachedSupportedFormats = new ArrayList<List<CaptureFormat>>();
|
||||
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||
cachedSupportedFormats.add(enumerateFormats(i));
|
||||
}
|
||||
}
|
||||
return cachedSupportedFormats.get(cameraId);
|
||||
}
|
||||
|
||||
private static List<CaptureFormat> enumerateFormats(int cameraId) {
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
|
||||
final long startTimeMs = SystemClock.elapsedRealtime();
|
||||
final android.hardware.Camera.Parameters parameters;
|
||||
android.hardware.Camera camera = null;
|
||||
try {
|
||||
Logging.d(TAG, "Opening camera with index " + cameraId);
|
||||
camera = android.hardware.Camera.open(cameraId);
|
||||
parameters = camera.getParameters();
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, "Open camera failed on camera index " + cameraId, e);
|
||||
return new ArrayList<CaptureFormat>();
|
||||
} finally {
|
||||
if (camera != null) {
|
||||
camera.release();
|
||||
}
|
||||
}
|
||||
|
||||
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
|
||||
try {
|
||||
int minFps = 0;
|
||||
int maxFps = 0;
|
||||
final List<int[]> listFpsRange = parameters.getSupportedPreviewFpsRange();
|
||||
if (listFpsRange != null) {
|
||||
// getSupportedPreviewFpsRange() returns a sorted list. Take the fps range
|
||||
// corresponding to the highest fps.
|
||||
final int[] range = listFpsRange.get(listFpsRange.size() - 1);
|
||||
minFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
|
||||
maxFps = range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
|
||||
}
|
||||
for (android.hardware.Camera.Size size : parameters.getSupportedPreviewSizes()) {
|
||||
formatList.add(new CaptureFormat(size.width, size.height, minFps, maxFps));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "getSupportedFormats() failed on camera index " + cameraId, e);
|
||||
}
|
||||
|
||||
final long endTimeMs = SystemClock.elapsedRealtime();
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
|
||||
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
|
||||
return formatList;
|
||||
}
|
||||
|
||||
// Convert from android.hardware.Camera.Size to Size.
|
||||
static List<Size> convertSizes(List<android.hardware.Camera.Size> cameraSizes) {
|
||||
final List<Size> sizes = new ArrayList<Size>();
|
||||
for (android.hardware.Camera.Size size : cameraSizes) {
|
||||
sizes.add(new Size(size.width, size.height));
|
||||
}
|
||||
return sizes;
|
||||
}
|
||||
|
||||
// Convert from int[2] to CaptureFormat.FramerateRange.
|
||||
static List<CaptureFormat.FramerateRange> convertFramerates(List<int[]> arrayRanges) {
|
||||
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
|
||||
for (int[] range : arrayRanges) {
|
||||
ranges.add(new CaptureFormat.FramerateRange(
|
||||
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
|
||||
range[android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
|
||||
// Returns the camera index for camera with name |deviceName|, or throws IllegalArgumentException
|
||||
// if no such camera can be found.
|
||||
static int getCameraIndex(String deviceName) {
|
||||
Logging.d(TAG, "getCameraIndex: " + deviceName);
|
||||
for (int i = 0; i < android.hardware.Camera.getNumberOfCameras(); ++i) {
|
||||
if (deviceName.equals(getDeviceName(i))) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("No such camera: " + deviceName);
|
||||
}
|
||||
|
||||
// Returns the name of the camera with camera index. Returns null if the
|
||||
// camera can not be used.
|
||||
static String getDeviceName(int index) {
|
||||
android.hardware.Camera.CameraInfo info = getCameraInfo(index);
|
||||
if (info == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String facing =
|
||||
(info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) ? "front" : "back";
|
||||
return "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation;
|
||||
}
|
||||
}
|
||||
38
sdk/android/api/org/webrtc/Camera2Capturer.java
Normal file
38
sdk/android/api/org/webrtc/Camera2Capturer.java
Normal file
@ -0,0 +1,38 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.media.MediaRecorder;
|
||||
|
||||
@TargetApi(21)
|
||||
public class Camera2Capturer extends CameraCapturer {
|
||||
private final Context context;
|
||||
private final CameraManager cameraManager;
|
||||
|
||||
public Camera2Capturer(Context context, String cameraName, CameraEventsHandler eventsHandler) {
|
||||
super(cameraName, eventsHandler, new Camera2Enumerator(context));
|
||||
|
||||
this.context = context;
|
||||
cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
|
||||
CameraSession.Events events, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecoder, String cameraName,
|
||||
int width, int height, int framerate) {
|
||||
Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
|
||||
surfaceTextureHelper, mediaRecoder, cameraName, width, height, framerate);
|
||||
}
|
||||
}
|
||||
248
sdk/android/api/org/webrtc/Camera2Enumerator.java
Normal file
248
sdk/android/api/org/webrtc/Camera2Enumerator.java
Normal file
@ -0,0 +1,248 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.graphics.Rect;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.CameraMetadata;
|
||||
import android.hardware.camera2.params.StreamConfigurationMap;
|
||||
import android.os.Build;
|
||||
import android.os.SystemClock;
|
||||
import android.util.AndroidException;
|
||||
import android.util.Range;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@TargetApi(21)
|
||||
public class Camera2Enumerator implements CameraEnumerator {
|
||||
private final static String TAG = "Camera2Enumerator";
|
||||
private final static double NANO_SECONDS_PER_SECOND = 1.0e9;
|
||||
|
||||
// Each entry contains the supported formats for a given camera index. The formats are enumerated
|
||||
// lazily in getSupportedFormats(), and cached for future reference.
|
||||
private static final Map<String, List<CaptureFormat>> cachedSupportedFormats =
|
||||
new HashMap<String, List<CaptureFormat>>();
|
||||
|
||||
final Context context;
|
||||
final CameraManager cameraManager;
|
||||
|
||||
public Camera2Enumerator(Context context) {
|
||||
this.context = context;
|
||||
this.cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getDeviceNames() {
|
||||
try {
|
||||
return cameraManager.getCameraIdList();
|
||||
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
|
||||
// catch statement with an Exception from a newer API, even if the code is never executed.
|
||||
// https://code.google.com/p/android/issues/detail?id=209129
|
||||
} catch (/* CameraAccessException */ AndroidException e) {
|
||||
Logging.e(TAG, "Camera access exception: " + e);
|
||||
return new String[] {};
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isFrontFacing(String deviceName) {
|
||||
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
|
||||
|
||||
return characteristics != null
|
||||
&& characteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_FRONT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBackFacing(String deviceName) {
|
||||
CameraCharacteristics characteristics = getCameraCharacteristics(deviceName);
|
||||
|
||||
return characteristics != null
|
||||
&& characteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_BACK;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<CaptureFormat> getSupportedFormats(String deviceName) {
|
||||
return getSupportedFormats(context, deviceName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return new Camera2Capturer(context, deviceName, eventsHandler);
|
||||
}
|
||||
|
||||
private CameraCharacteristics getCameraCharacteristics(String deviceName) {
|
||||
try {
|
||||
return cameraManager.getCameraCharacteristics(deviceName);
|
||||
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
|
||||
// catch statement with an Exception from a newer API, even if the code is never executed.
|
||||
// https://code.google.com/p/android/issues/detail?id=209129
|
||||
} catch (/* CameraAccessException */ AndroidException e) {
|
||||
Logging.e(TAG, "Camera access exception: " + e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if API is supported and all cameras have better than legacy support.
|
||||
*/
|
||||
public static boolean isSupported(Context context) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
|
||||
return false;
|
||||
}
|
||||
|
||||
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
try {
|
||||
String[] cameraIds = cameraManager.getCameraIdList();
|
||||
for (String id : cameraIds) {
|
||||
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(id);
|
||||
if (characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL)
|
||||
== CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
// On Android OS pre 4.4.2, a class will not load because of VerifyError if it contains a
|
||||
// catch statement with an Exception from a newer API, even if the code is never executed.
|
||||
// https://code.google.com/p/android/issues/detail?id=209129
|
||||
} catch (/* CameraAccessException */ AndroidException e) {
|
||||
Logging.e(TAG, "Camera access exception: " + e);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static int getFpsUnitFactor(Range<Integer>[] fpsRanges) {
|
||||
if (fpsRanges.length == 0) {
|
||||
return 1000;
|
||||
}
|
||||
return fpsRanges[0].getUpper() < 1000 ? 1000 : 1;
|
||||
}
|
||||
|
||||
static List<Size> getSupportedSizes(CameraCharacteristics cameraCharacteristics) {
|
||||
final StreamConfigurationMap streamMap =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
final int supportLevel =
|
||||
cameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
|
||||
|
||||
final android.util.Size[] nativeSizes = streamMap.getOutputSizes(SurfaceTexture.class);
|
||||
final List<Size> sizes = convertSizes(nativeSizes);
|
||||
|
||||
// Video may be stretched pre LMR1 on legacy implementations.
|
||||
// Filter out formats that have different aspect ratio than the sensor array.
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP_MR1
|
||||
&& supportLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
|
||||
final Rect activeArraySize =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
|
||||
final ArrayList<Size> filteredSizes = new ArrayList<Size>();
|
||||
|
||||
for (Size size : sizes) {
|
||||
if (activeArraySize.width() * size.height == activeArraySize.height() * size.width) {
|
||||
filteredSizes.add(size);
|
||||
}
|
||||
}
|
||||
|
||||
return filteredSizes;
|
||||
} else {
|
||||
return sizes;
|
||||
}
|
||||
}
|
||||
|
||||
static List<CaptureFormat> getSupportedFormats(Context context, String cameraId) {
|
||||
return getSupportedFormats(
|
||||
(CameraManager) context.getSystemService(Context.CAMERA_SERVICE), cameraId);
|
||||
}
|
||||
|
||||
static List<CaptureFormat> getSupportedFormats(CameraManager cameraManager, String cameraId) {
|
||||
synchronized (cachedSupportedFormats) {
|
||||
if (cachedSupportedFormats.containsKey(cameraId)) {
|
||||
return cachedSupportedFormats.get(cameraId);
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + ".");
|
||||
final long startTimeMs = SystemClock.elapsedRealtime();
|
||||
|
||||
final CameraCharacteristics cameraCharacteristics;
|
||||
try {
|
||||
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
|
||||
} catch (Exception ex) {
|
||||
Logging.e(TAG, "getCameraCharacteristics(): " + ex);
|
||||
return new ArrayList<CaptureFormat>();
|
||||
}
|
||||
|
||||
final StreamConfigurationMap streamMap =
|
||||
cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
|
||||
|
||||
Range<Integer>[] fpsRanges =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
|
||||
List<CaptureFormat.FramerateRange> framerateRanges =
|
||||
convertFramerates(fpsRanges, getFpsUnitFactor(fpsRanges));
|
||||
List<Size> sizes = getSupportedSizes(cameraCharacteristics);
|
||||
|
||||
int defaultMaxFps = 0;
|
||||
for (CaptureFormat.FramerateRange framerateRange : framerateRanges) {
|
||||
defaultMaxFps = Math.max(defaultMaxFps, framerateRange.max);
|
||||
}
|
||||
|
||||
final List<CaptureFormat> formatList = new ArrayList<CaptureFormat>();
|
||||
for (Size size : sizes) {
|
||||
long minFrameDurationNs = 0;
|
||||
try {
|
||||
minFrameDurationNs = streamMap.getOutputMinFrameDuration(
|
||||
SurfaceTexture.class, new android.util.Size(size.width, size.height));
|
||||
} catch (Exception e) {
|
||||
// getOutputMinFrameDuration() is not supported on all devices. Ignore silently.
|
||||
}
|
||||
final int maxFps = (minFrameDurationNs == 0)
|
||||
? defaultMaxFps
|
||||
: (int) Math.round(NANO_SECONDS_PER_SECOND / minFrameDurationNs) * 1000;
|
||||
formatList.add(new CaptureFormat(size.width, size.height, 0, maxFps));
|
||||
Logging.d(TAG, "Format: " + size.width + "x" + size.height + "@" + maxFps);
|
||||
}
|
||||
|
||||
cachedSupportedFormats.put(cameraId, formatList);
|
||||
final long endTimeMs = SystemClock.elapsedRealtime();
|
||||
Logging.d(TAG, "Get supported formats for camera index " + cameraId + " done."
|
||||
+ " Time spent: " + (endTimeMs - startTimeMs) + " ms.");
|
||||
return formatList;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert from android.util.Size to Size.
|
||||
private static List<Size> convertSizes(android.util.Size[] cameraSizes) {
|
||||
final List<Size> sizes = new ArrayList<Size>();
|
||||
for (android.util.Size size : cameraSizes) {
|
||||
sizes.add(new Size(size.getWidth(), size.getHeight()));
|
||||
}
|
||||
return sizes;
|
||||
}
|
||||
|
||||
// Convert from android.util.Range<Integer> to CaptureFormat.FramerateRange.
|
||||
static List<CaptureFormat.FramerateRange> convertFramerates(
|
||||
Range<Integer>[] arrayRanges, int unitFactor) {
|
||||
final List<CaptureFormat.FramerateRange> ranges = new ArrayList<CaptureFormat.FramerateRange>();
|
||||
for (Range<Integer> range : arrayRanges) {
|
||||
ranges.add(new CaptureFormat.FramerateRange(
|
||||
range.getLower() * unitFactor, range.getUpper() * unitFactor));
|
||||
}
|
||||
return ranges;
|
||||
}
|
||||
}
|
||||
206
sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
Normal file
206
sdk/android/api/org/webrtc/CameraEnumerationAndroid.java
Normal file
@ -0,0 +1,206 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static java.lang.Math.abs;
|
||||
|
||||
import android.graphics.ImageFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class CameraEnumerationAndroid {
|
||||
private final static String TAG = "CameraEnumerationAndroid";
|
||||
|
||||
static final ArrayList<Size> COMMON_RESOLUTIONS = new ArrayList<Size>(Arrays.asList(
|
||||
// 0, Unknown resolution
|
||||
new Size(160, 120), // 1, QQVGA
|
||||
new Size(240, 160), // 2, HQVGA
|
||||
new Size(320, 240), // 3, QVGA
|
||||
new Size(400, 240), // 4, WQVGA
|
||||
new Size(480, 320), // 5, HVGA
|
||||
new Size(640, 360), // 6, nHD
|
||||
new Size(640, 480), // 7, VGA
|
||||
new Size(768, 480), // 8, WVGA
|
||||
new Size(854, 480), // 9, FWVGA
|
||||
new Size(800, 600), // 10, SVGA
|
||||
new Size(960, 540), // 11, qHD
|
||||
new Size(960, 640), // 12, DVGA
|
||||
new Size(1024, 576), // 13, WSVGA
|
||||
new Size(1024, 600), // 14, WVSGA
|
||||
new Size(1280, 720), // 15, HD
|
||||
new Size(1280, 1024), // 16, SXGA
|
||||
new Size(1920, 1080), // 17, Full HD
|
||||
new Size(1920, 1440), // 18, Full HD 4:3
|
||||
new Size(2560, 1440), // 19, QHD
|
||||
new Size(3840, 2160) // 20, UHD
|
||||
));
|
||||
|
||||
public static class CaptureFormat {
|
||||
// Class to represent a framerate range. The framerate varies because of lightning conditions.
|
||||
// The values are multiplied by 1000, so 1000 represents one frame per second.
|
||||
public static class FramerateRange {
|
||||
public int min;
|
||||
public int max;
|
||||
|
||||
public FramerateRange(int min, int max) {
|
||||
this.min = min;
|
||||
this.max = max;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "[" + (min / 1000.0f) + ":" + (max / 1000.0f) + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (!(other instanceof FramerateRange)) {
|
||||
return false;
|
||||
}
|
||||
final FramerateRange otherFramerate = (FramerateRange) other;
|
||||
return min == otherFramerate.min && max == otherFramerate.max;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// Use prime close to 2^16 to avoid collisions for normal values less than 2^16.
|
||||
return 1 + 65537 * min + max;
|
||||
}
|
||||
}
|
||||
|
||||
public final int width;
|
||||
public final int height;
|
||||
public final FramerateRange framerate;
|
||||
|
||||
// TODO(hbos): If VideoCapturer.startCapture is updated to support other image formats then this
|
||||
// needs to be updated and VideoCapturer.getSupportedFormats need to return CaptureFormats of
|
||||
// all imageFormats.
|
||||
public final int imageFormat = ImageFormat.NV21;
|
||||
|
||||
public CaptureFormat(int width, int height, int minFramerate, int maxFramerate) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = new FramerateRange(minFramerate, maxFramerate);
|
||||
}
|
||||
|
||||
public CaptureFormat(int width, int height, FramerateRange framerate) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
}
|
||||
|
||||
// Calculates the frame size of this capture format.
|
||||
public int frameSize() {
|
||||
return frameSize(width, height, imageFormat);
|
||||
}
|
||||
|
||||
// Calculates the frame size of the specified image format. Currently only
|
||||
// supporting ImageFormat.NV21.
|
||||
// The size is width * height * number of bytes per pixel.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
|
||||
public static int frameSize(int width, int height, int imageFormat) {
|
||||
if (imageFormat != ImageFormat.NV21) {
|
||||
throw new UnsupportedOperationException("Don't know how to calculate "
|
||||
+ "the frame size of non-NV21 image formats.");
|
||||
}
|
||||
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return width + "x" + height + "@" + framerate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (!(other instanceof CaptureFormat)) {
|
||||
return false;
|
||||
}
|
||||
final CaptureFormat otherFormat = (CaptureFormat) other;
|
||||
return width == otherFormat.width && height == otherFormat.height
|
||||
&& framerate.equals(otherFormat.framerate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return 1 + (width * 65497 + height) * 251 + framerate.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
// Helper class for finding the closest supported format for the two functions below. It creates a
|
||||
// comparator based on the difference to some requested parameters, where the element with the
|
||||
// minimum difference is the element that is closest to the requested parameters.
|
||||
private static abstract class ClosestComparator<T> implements Comparator<T> {
|
||||
// Difference between supported and requested parameter.
|
||||
abstract int diff(T supportedParameter);
|
||||
|
||||
@Override
|
||||
public int compare(T t1, T t2) {
|
||||
return diff(t1) - diff(t2);
|
||||
}
|
||||
}
|
||||
|
||||
// Prefer a fps range with an upper bound close to |framerate|. Also prefer a fps range with a low
|
||||
// lower bound, to allow the framerate to fluctuate based on lightning conditions.
|
||||
public static CaptureFormat.FramerateRange getClosestSupportedFramerateRange(
|
||||
List<CaptureFormat.FramerateRange> supportedFramerates, final int requestedFps) {
|
||||
return Collections.min(
|
||||
supportedFramerates, new ClosestComparator<CaptureFormat.FramerateRange>() {
|
||||
// Progressive penalty if the upper bound is further away than |MAX_FPS_DIFF_THRESHOLD|
|
||||
// from requested.
|
||||
private static final int MAX_FPS_DIFF_THRESHOLD = 5000;
|
||||
private static final int MAX_FPS_LOW_DIFF_WEIGHT = 1;
|
||||
private static final int MAX_FPS_HIGH_DIFF_WEIGHT = 3;
|
||||
|
||||
// Progressive penalty if the lower bound is bigger than |MIN_FPS_THRESHOLD|.
|
||||
private static final int MIN_FPS_THRESHOLD = 8000;
|
||||
private static final int MIN_FPS_LOW_VALUE_WEIGHT = 1;
|
||||
private static final int MIN_FPS_HIGH_VALUE_WEIGHT = 4;
|
||||
|
||||
// Use one weight for small |value| less than |threshold|, and another weight above.
|
||||
private int progressivePenalty(int value, int threshold, int lowWeight, int highWeight) {
|
||||
return (value < threshold) ? value * lowWeight
|
||||
: threshold * lowWeight + (value - threshold) * highWeight;
|
||||
}
|
||||
|
||||
@Override
|
||||
int diff(CaptureFormat.FramerateRange range) {
|
||||
final int minFpsError = progressivePenalty(
|
||||
range.min, MIN_FPS_THRESHOLD, MIN_FPS_LOW_VALUE_WEIGHT, MIN_FPS_HIGH_VALUE_WEIGHT);
|
||||
final int maxFpsError = progressivePenalty(Math.abs(requestedFps * 1000 - range.max),
|
||||
MAX_FPS_DIFF_THRESHOLD, MAX_FPS_LOW_DIFF_WEIGHT, MAX_FPS_HIGH_DIFF_WEIGHT);
|
||||
return minFpsError + maxFpsError;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static Size getClosestSupportedSize(
|
||||
List<Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
|
||||
return Collections.min(supportedSizes, new ClosestComparator<Size>() {
|
||||
@Override
|
||||
int diff(Size size) {
|
||||
return abs(requestedWidth - size.width) + abs(requestedHeight - size.height);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Helper method for camera classes.
|
||||
static void reportCameraResolution(Histogram histogram, Size resolution) {
|
||||
int index = COMMON_RESOLUTIONS.indexOf(resolution);
|
||||
// 0 is reserved for unknown resolution, so add 1.
|
||||
// indexOf returns -1 for unknown resolutions so it becomes 0 automatically.
|
||||
histogram.addSample(index + 1);
|
||||
}
|
||||
}
|
||||
25
sdk/android/api/org/webrtc/CameraEnumerator.java
Normal file
25
sdk/android/api/org/webrtc/CameraEnumerator.java
Normal file
@ -0,0 +1,25 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface CameraEnumerator {
|
||||
public String[] getDeviceNames();
|
||||
public boolean isFrontFacing(String deviceName);
|
||||
public boolean isBackFacing(String deviceName);
|
||||
public List<CaptureFormat> getSupportedFormats(String deviceName);
|
||||
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler);
|
||||
}
|
||||
158
sdk/android/api/org/webrtc/CameraVideoCapturer.java
Normal file
158
sdk/android/api/org/webrtc/CameraVideoCapturer.java
Normal file
@ -0,0 +1,158 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.media.MediaRecorder;
|
||||
|
||||
/**
|
||||
* Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
|
||||
* switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
|
||||
* class for detecting camera freezes.
|
||||
*/
|
||||
public interface CameraVideoCapturer extends VideoCapturer {
|
||||
/**
|
||||
* Camera events handler - can be used to be notifed about camera events. The callbacks are
|
||||
* executed from an arbitrary thread.
|
||||
*/
|
||||
public interface CameraEventsHandler {
|
||||
// Camera error handler - invoked when camera can not be opened
|
||||
// or any camera exception happens on camera thread.
|
||||
void onCameraError(String errorDescription);
|
||||
|
||||
// Called when camera is disconnected.
|
||||
void onCameraDisconnected();
|
||||
|
||||
// Invoked when camera stops receiving frames.
|
||||
void onCameraFreezed(String errorDescription);
|
||||
|
||||
// Callback invoked when camera is opening.
|
||||
void onCameraOpening(String cameraName);
|
||||
|
||||
// Callback invoked when first camera frame is available after camera is started.
|
||||
void onFirstFrameAvailable();
|
||||
|
||||
// Callback invoked when camera is closed.
|
||||
void onCameraClosed();
|
||||
}
|
||||
|
||||
/**
|
||||
* Camera switch handler - one of these functions are invoked with the result of switchCamera().
|
||||
* The callback may be called on an arbitrary thread.
|
||||
*/
|
||||
public interface CameraSwitchHandler {
|
||||
// Invoked on success. |isFrontCamera| is true if the new camera is front facing.
|
||||
void onCameraSwitchDone(boolean isFrontCamera);
|
||||
|
||||
// Invoked on failure, e.g. camera is stopped or only one camera available.
|
||||
void onCameraSwitchError(String errorDescription);
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch camera to the next valid camera id. This can only be called while the camera is running.
|
||||
* This function can be called from any thread.
|
||||
*/
|
||||
void switchCamera(CameraSwitchHandler switchEventsHandler);
|
||||
|
||||
/**
|
||||
* MediaRecorder add/remove handler - one of these functions are invoked with the result of
|
||||
* addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
|
||||
* The callback may be called on an arbitrary thread.
|
||||
*/
|
||||
public interface MediaRecorderHandler {
|
||||
// Invoked on success.
|
||||
void onMediaRecorderSuccess();
|
||||
|
||||
// Invoked on failure, e.g. camera is stopped or any exception happens.
|
||||
void onMediaRecorderError(String errorDescription);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
|
||||
* Once MediaRecorder is added to camera pipeline camera switch is not allowed.
|
||||
* This function can be called from any thread.
|
||||
*/
|
||||
void addMediaRecorderToCamera(MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler);
|
||||
|
||||
/**
|
||||
* Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
|
||||
* This function can be called from any thread.
|
||||
*/
|
||||
void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler);
|
||||
|
||||
/**
|
||||
* Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
|
||||
* on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that
|
||||
* thread.
|
||||
*/
|
||||
public static class CameraStatistics {
|
||||
private final static String TAG = "CameraStatistics";
|
||||
private final static int CAMERA_OBSERVER_PERIOD_MS = 2000;
|
||||
private final static int CAMERA_FREEZE_REPORT_TIMOUT_MS = 4000;
|
||||
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
private int frameCount;
|
||||
private int freezePeriodCount;
|
||||
// Camera observer - monitors camera framerate. Observer is executed on camera thread.
|
||||
private final Runnable cameraObserver = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
final int cameraFps = Math.round(frameCount * 1000.0f / CAMERA_OBSERVER_PERIOD_MS);
|
||||
Logging.d(TAG, "Camera fps: " + cameraFps + ".");
|
||||
if (frameCount == 0) {
|
||||
++freezePeriodCount;
|
||||
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount >= CAMERA_FREEZE_REPORT_TIMOUT_MS
|
||||
&& eventsHandler != null) {
|
||||
Logging.e(TAG, "Camera freezed.");
|
||||
if (surfaceTextureHelper.isTextureInUse()) {
|
||||
// This can only happen if we are capturing to textures.
|
||||
eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
|
||||
} else {
|
||||
eventsHandler.onCameraFreezed("Camera failure.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
freezePeriodCount = 0;
|
||||
}
|
||||
frameCount = 0;
|
||||
surfaceTextureHelper.getHandler().postDelayed(this, CAMERA_OBSERVER_PERIOD_MS);
|
||||
}
|
||||
};
|
||||
|
||||
public CameraStatistics(
|
||||
SurfaceTextureHelper surfaceTextureHelper, CameraEventsHandler eventsHandler) {
|
||||
if (surfaceTextureHelper == null) {
|
||||
throw new IllegalArgumentException("SurfaceTextureHelper is null");
|
||||
}
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.eventsHandler = eventsHandler;
|
||||
this.frameCount = 0;
|
||||
this.freezePeriodCount = 0;
|
||||
surfaceTextureHelper.getHandler().postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
|
||||
}
|
||||
|
||||
private void checkThread() {
|
||||
if (Thread.currentThread() != surfaceTextureHelper.getHandler().getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
|
||||
public void addFrame() {
|
||||
checkThread();
|
||||
++frameCount;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
surfaceTextureHelper.getHandler().removeCallbacks(cameraObserver);
|
||||
}
|
||||
}
|
||||
}
|
||||
127
sdk/android/api/org/webrtc/DataChannel.java
Normal file
127
sdk/android/api/org/webrtc/DataChannel.java
Normal file
@ -0,0 +1,127 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/** Java wrapper for a C++ DataChannelInterface. */
|
||||
public class DataChannel {
|
||||
/** Java wrapper for WebIDL RTCDataChannel. */
|
||||
public static class Init {
|
||||
public boolean ordered = true;
|
||||
// Optional unsigned short in WebIDL, -1 means unspecified.
|
||||
public int maxRetransmitTimeMs = -1;
|
||||
// Optional unsigned short in WebIDL, -1 means unspecified.
|
||||
public int maxRetransmits = -1;
|
||||
public String protocol = "";
|
||||
public boolean negotiated = false;
|
||||
// Optional unsigned short in WebIDL, -1 means unspecified.
|
||||
public int id = -1;
|
||||
|
||||
public Init() {}
|
||||
|
||||
// Called only by native code.
|
||||
private Init(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, String protocol,
|
||||
boolean negotiated, int id) {
|
||||
this.ordered = ordered;
|
||||
this.maxRetransmitTimeMs = maxRetransmitTimeMs;
|
||||
this.maxRetransmits = maxRetransmits;
|
||||
this.protocol = protocol;
|
||||
this.negotiated = negotiated;
|
||||
this.id = id;
|
||||
}
|
||||
}
|
||||
|
||||
/** Java version of C++ DataBuffer. The atom of data in a DataChannel. */
|
||||
public static class Buffer {
|
||||
/** The underlying data. */
|
||||
public final ByteBuffer data;
|
||||
|
||||
/**
|
||||
* Indicates whether |data| contains UTF-8 text or "binary data"
|
||||
* (i.e. anything else).
|
||||
*/
|
||||
public final boolean binary;
|
||||
|
||||
public Buffer(ByteBuffer data, boolean binary) {
|
||||
this.data = data;
|
||||
this.binary = binary;
|
||||
}
|
||||
}
|
||||
|
||||
/** Java version of C++ DataChannelObserver. */
|
||||
public interface Observer {
|
||||
/** The data channel's bufferedAmount has changed. */
|
||||
public void onBufferedAmountChange(long previousAmount);
|
||||
/** The data channel state has changed. */
|
||||
public void onStateChange();
|
||||
/**
|
||||
* A data buffer was successfully received. NOTE: |buffer.data| will be
|
||||
* freed once this function returns so callers who want to use the data
|
||||
* asynchronously must make sure to copy it first.
|
||||
*/
|
||||
public void onMessage(Buffer buffer);
|
||||
}
|
||||
|
||||
/** Keep in sync with DataChannelInterface::DataState. */
|
||||
public enum State { CONNECTING, OPEN, CLOSING, CLOSED }
|
||||
|
||||
private final long nativeDataChannel;
|
||||
private long nativeObserver;
|
||||
|
||||
public DataChannel(long nativeDataChannel) {
|
||||
this.nativeDataChannel = nativeDataChannel;
|
||||
}
|
||||
|
||||
/** Register |observer|, replacing any previously-registered observer. */
|
||||
public void registerObserver(Observer observer) {
|
||||
if (nativeObserver != 0) {
|
||||
unregisterObserverNative(nativeObserver);
|
||||
}
|
||||
nativeObserver = registerObserverNative(observer);
|
||||
}
|
||||
private native long registerObserverNative(Observer observer);
|
||||
|
||||
/** Unregister the (only) observer. */
|
||||
public void unregisterObserver() {
|
||||
unregisterObserverNative(nativeObserver);
|
||||
}
|
||||
private native void unregisterObserverNative(long nativeObserver);
|
||||
|
||||
public native String label();
|
||||
|
||||
public native int id();
|
||||
|
||||
public native State state();
|
||||
|
||||
/**
|
||||
* Return the number of bytes of application data (UTF-8 text and binary data)
|
||||
* that have been queued using SendBuffer but have not yet been transmitted
|
||||
* to the network.
|
||||
*/
|
||||
public native long bufferedAmount();
|
||||
|
||||
/** Close the channel. */
|
||||
public native void close();
|
||||
|
||||
/** Send |data| to the remote peer; return success. */
|
||||
public boolean send(Buffer buffer) {
|
||||
// TODO(fischman): this could be cleverer about avoiding copies if the
|
||||
// ByteBuffer is direct and/or is backed by an array.
|
||||
byte[] data = new byte[buffer.data.remaining()];
|
||||
buffer.data.get(data);
|
||||
return sendNative(data, buffer.binary);
|
||||
}
|
||||
private native boolean sendNative(byte[] data, boolean binary);
|
||||
|
||||
/** Dispose of native resources attached to this channel. */
|
||||
public native void dispose();
|
||||
};
|
||||
83
sdk/android/api/org/webrtc/DtmfSender.java
Normal file
83
sdk/android/api/org/webrtc/DtmfSender.java
Normal file
@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Java wrapper for a C++ DtmfSenderInterface. */
|
||||
public class DtmfSender {
|
||||
final long nativeDtmfSender;
|
||||
|
||||
public DtmfSender(long nativeDtmfSender) {
|
||||
this.nativeDtmfSender = nativeDtmfSender;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return true if this DtmfSender is capable of sending DTMF. Otherwise false.
|
||||
*/
|
||||
public boolean canInsertDtmf() {
|
||||
return nativeCanInsertDtmf(nativeDtmfSender);
|
||||
}
|
||||
|
||||
/**
|
||||
* Queues a task that sends the provided DTMF tones.
|
||||
* <p>
|
||||
* If insertDtmf is called on the same object while an existing task for this
|
||||
* object to generate DTMF is still running, the previous task is canceled.
|
||||
*
|
||||
* @param tones This parameter is treated as a series of characters. The characters 0
|
||||
* through 9, A through D, #, and * generate the associated DTMF tones. The
|
||||
* characters a to d are equivalent to A to D. The character ',' indicates a
|
||||
* delay of 2 seconds before processing the next character in the tones
|
||||
* parameter. Unrecognized characters are ignored.
|
||||
* @param duration Indicates the duration in ms to use for each character passed in the tones
|
||||
* parameter. The duration cannot be more than 6000 or less than 70.
|
||||
* @param interToneGap Indicates the gap between tones in ms. Must be at least 50 ms but should be
|
||||
* as short as possible.
|
||||
* @return true on success and false on failure.
|
||||
*/
|
||||
public boolean insertDtmf(String tones, int duration, int interToneGap) {
|
||||
return nativeInsertDtmf(nativeDtmfSender, tones, duration, interToneGap);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The tones remaining to be played out
|
||||
*/
|
||||
public String tones() {
|
||||
return nativeTones(nativeDtmfSender);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The current tone duration value in ms. This value will be the value last set via the
|
||||
* insertDtmf() method, or the default value of 100 ms if insertDtmf() was never called.
|
||||
*/
|
||||
public int duration() {
|
||||
return nativeDuration(nativeDtmfSender);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The current value of the between-tone gap in ms. This value will be the value last set
|
||||
* via the insertDtmf() method, or the default value of 50 ms if insertDtmf() was never
|
||||
* called.
|
||||
*/
|
||||
public int interToneGap() {
|
||||
return nativeInterToneGap(nativeDtmfSender);
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
JniCommon.nativeReleaseRef(nativeDtmfSender);
|
||||
}
|
||||
|
||||
private static native boolean nativeCanInsertDtmf(long nativeDtmfSender);
|
||||
private static native boolean nativeInsertDtmf(
|
||||
long nativeDtmfSender, String tones, int duration, int interToneGap);
|
||||
private static native String nativeTones(long nativeDtmfSender);
|
||||
private static native int nativeDuration(long nativeDtmfSender);
|
||||
private static native int nativeInterToneGap(long nativeDtmfSender);
|
||||
};
|
||||
173
sdk/android/api/org/webrtc/EglBase.java
Normal file
173
sdk/android/api/org/webrtc/EglBase.java
Normal file
@ -0,0 +1,173 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.view.Surface;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
public abstract class EglBase {
|
||||
// EGL wrapper for an actual EGLContext.
|
||||
public static class Context {}
|
||||
|
||||
// According to the documentation, EGL can be used from multiple threads at the same time if each
|
||||
// thread has its own EGLContext, but in practice it deadlocks on some devices when doing this.
|
||||
// Therefore, synchronize on this global lock before calling dangerous EGL functions that might
|
||||
// deadlock. See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
|
||||
public static final Object lock = new Object();
|
||||
|
||||
// These constants are taken from EGL14.EGL_OPENGL_ES2_BIT and EGL14.EGL_CONTEXT_CLIENT_VERSION.
|
||||
// https://android.googlesource.com/platform/frameworks/base/+/master/opengl/java/android/opengl/EGL14.java
|
||||
// This is similar to how GlSurfaceView does:
|
||||
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/opengl/GLSurfaceView.java#760
|
||||
public static final int EGL_OPENGL_ES2_BIT = 4;
|
||||
// Android-specific extension.
|
||||
public static final int EGL_RECORDABLE_ANDROID = 0x3142;
|
||||
|
||||
// clang-format off
|
||||
public static final int[] CONFIG_PLAIN = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
public static final int[] CONFIG_RGBA = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_ALPHA_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
public static final int[] CONFIG_PIXEL_BUFFER = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
public static final int[] CONFIG_PIXEL_RGBA_BUFFER = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_ALPHA_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
public static final int[] CONFIG_RECORDABLE = {
|
||||
EGL10.EGL_RED_SIZE, 8,
|
||||
EGL10.EGL_GREEN_SIZE, 8,
|
||||
EGL10.EGL_BLUE_SIZE, 8,
|
||||
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
|
||||
EGL_RECORDABLE_ANDROID, 1,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
/**
|
||||
* Create a new context with the specified config attributes, sharing data with |sharedContext|.
|
||||
* If |sharedContext| is null, a root context is created. This function will try to create an EGL
|
||||
* 1.4 context if possible, and an EGL 1.0 context otherwise.
|
||||
*/
|
||||
public static EglBase create(Context sharedContext, int[] configAttributes) {
|
||||
return (EglBase14.isEGL14Supported()
|
||||
&& (sharedContext == null || sharedContext instanceof EglBase14.Context))
|
||||
? new EglBase14((EglBase14.Context) sharedContext, configAttributes)
|
||||
: new EglBase10((EglBase10.Context) sharedContext, configAttributes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function for creating a plain root context. This function will try to create an EGL 1.4
|
||||
* context if possible, and an EGL 1.0 context otherwise.
|
||||
*/
|
||||
public static EglBase create() {
|
||||
return create(null /* shaderContext */, CONFIG_PLAIN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function for creating a plain context, sharing data with |sharedContext|. This function
|
||||
* will try to create an EGL 1.4 context if possible, and an EGL 1.0 context otherwise.
|
||||
*/
|
||||
public static EglBase create(Context sharedContext) {
|
||||
return create(sharedContext, CONFIG_PLAIN);
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly create a root EGl 1.0 context with the specified config attributes.
|
||||
*/
|
||||
public static EglBase createEgl10(int[] configAttributes) {
|
||||
return new EglBase10(null /* shaderContext */, configAttributes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly create a root EGl 1.0 context with the specified config attributes
|
||||
* and shared context.
|
||||
*/
|
||||
public static EglBase createEgl10(
|
||||
javax.microedition.khronos.egl.EGLContext sharedContext, int[] configAttributes) {
|
||||
return new EglBase10(new EglBase10.Context(sharedContext), configAttributes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly create a root EGl 1.4 context with the specified config attributes.
|
||||
*/
|
||||
public static EglBase createEgl14(int[] configAttributes) {
|
||||
return new EglBase14(null /* shaderContext */, configAttributes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Explicitly create a root EGl 1.4 context with the specified config attributes
|
||||
* and shared context.
|
||||
*/
|
||||
public static EglBase createEgl14(
|
||||
android.opengl.EGLContext sharedContext, int[] configAttributes) {
|
||||
return new EglBase14(new EglBase14.Context(sharedContext), configAttributes);
|
||||
}
|
||||
|
||||
public abstract void createSurface(Surface surface);
|
||||
|
||||
// Create EGLSurface from the Android SurfaceTexture.
|
||||
public abstract void createSurface(SurfaceTexture surfaceTexture);
|
||||
|
||||
// Create dummy 1x1 pixel buffer surface so the context can be made current.
|
||||
public abstract void createDummyPbufferSurface();
|
||||
|
||||
public abstract void createPbufferSurface(int width, int height);
|
||||
|
||||
public abstract Context getEglBaseContext();
|
||||
|
||||
public abstract boolean hasSurface();
|
||||
|
||||
public abstract int surfaceWidth();
|
||||
|
||||
public abstract int surfaceHeight();
|
||||
|
||||
public abstract void releaseSurface();
|
||||
|
||||
public abstract void release();
|
||||
|
||||
public abstract void makeCurrent();
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
public abstract void detachCurrent();
|
||||
|
||||
public abstract void swapBuffers();
|
||||
|
||||
public abstract void swapBuffers(long presentationTimeStampNs);
|
||||
}
|
||||
685
sdk/android/api/org/webrtc/EglRenderer.java
Normal file
685
sdk/android/api/org/webrtc/EglRenderer.java
Normal file
@ -0,0 +1,685 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Matrix;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.Looper;
|
||||
import android.view.Surface;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on an EGL Surface.
|
||||
* This class is intended to be used as a helper class for rendering on SurfaceViews and
|
||||
* TextureViews.
|
||||
*/
|
||||
public class EglRenderer implements VideoRenderer.Callbacks, VideoSink {
|
||||
private static final String TAG = "EglRenderer";
|
||||
private static final long LOG_INTERVAL_SEC = 4;
|
||||
private static final int MAX_SURFACE_CLEAR_COUNT = 3;
|
||||
|
||||
public interface FrameListener { void onFrame(Bitmap frame); }
|
||||
|
||||
private static class FrameListenerAndParams {
|
||||
public final FrameListener listener;
|
||||
public final float scale;
|
||||
public final RendererCommon.GlDrawer drawer;
|
||||
public final boolean applyFpsReduction;
|
||||
|
||||
public FrameListenerAndParams(FrameListener listener, float scale,
|
||||
RendererCommon.GlDrawer drawer, boolean applyFpsReduction) {
|
||||
this.listener = listener;
|
||||
this.scale = scale;
|
||||
this.drawer = drawer;
|
||||
this.applyFpsReduction = applyFpsReduction;
|
||||
}
|
||||
}
|
||||
|
||||
private class EglSurfaceCreation implements Runnable {
|
||||
private Object surface;
|
||||
|
||||
public synchronized void setSurface(Object surface) {
|
||||
this.surface = surface;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void run() {
|
||||
if (surface != null && eglBase != null && !eglBase.hasSurface()) {
|
||||
if (surface instanceof Surface) {
|
||||
eglBase.createSurface((Surface) surface);
|
||||
} else if (surface instanceof SurfaceTexture) {
|
||||
eglBase.createSurface((SurfaceTexture) surface);
|
||||
} else {
|
||||
throw new IllegalStateException("Invalid surface: " + surface);
|
||||
}
|
||||
eglBase.makeCurrent();
|
||||
// Necessary for YUV frames with odd width.
|
||||
GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final String name;
|
||||
|
||||
// |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized
|
||||
// on |handlerLock|.
|
||||
private final Object handlerLock = new Object();
|
||||
private Handler renderThreadHandler;
|
||||
|
||||
private final ArrayList<FrameListenerAndParams> frameListeners = new ArrayList<>();
|
||||
|
||||
// Variables for fps reduction.
|
||||
private final Object fpsReductionLock = new Object();
|
||||
// Time for when next frame should be rendered.
|
||||
private long nextFrameTimeNs;
|
||||
// Minimum duration between frames when fps reduction is active, or -1 if video is completely
|
||||
// paused.
|
||||
private long minRenderPeriodNs;
|
||||
|
||||
// EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
|
||||
// from the render thread.
|
||||
private EglBase eglBase;
|
||||
private final VideoFrameDrawer frameDrawer = new VideoFrameDrawer();
|
||||
private RendererCommon.GlDrawer drawer;
|
||||
private final Matrix drawMatrix = new Matrix();
|
||||
|
||||
// Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|.
|
||||
private final Object frameLock = new Object();
|
||||
private VideoFrame pendingFrame;
|
||||
|
||||
// These variables are synchronized on |layoutLock|.
|
||||
private final Object layoutLock = new Object();
|
||||
private float layoutAspectRatio;
|
||||
// If true, mirrors the video stream horizontally.
|
||||
private boolean mirror;
|
||||
|
||||
// These variables are synchronized on |statisticsLock|.
|
||||
private final Object statisticsLock = new Object();
|
||||
// Total number of video frames received in renderFrame() call.
|
||||
private int framesReceived;
|
||||
// Number of video frames dropped by renderFrame() because previous frame has not been rendered
|
||||
// yet.
|
||||
private int framesDropped;
|
||||
// Number of rendered video frames.
|
||||
private int framesRendered;
|
||||
// Start time for counting these statistics, or 0 if we haven't started measuring yet.
|
||||
private long statisticsStartTimeNs;
|
||||
// Time in ns spent in renderFrameOnRenderThread() function.
|
||||
private long renderTimeNs;
|
||||
// Time in ns spent by the render thread in the swapBuffers() function.
|
||||
private long renderSwapBufferTimeNs;
|
||||
|
||||
// Used for bitmap capturing.
|
||||
private GlTextureFrameBuffer bitmapTextureFramebuffer;
|
||||
|
||||
private final Runnable logStatisticsRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
logStatistics();
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler != null) {
|
||||
renderThreadHandler.removeCallbacks(logStatisticsRunnable);
|
||||
renderThreadHandler.postDelayed(
|
||||
logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final EglSurfaceCreation eglSurfaceCreationRunnable = new EglSurfaceCreation();
|
||||
|
||||
/**
|
||||
* Standard constructor. The name will be used for the render thread name and included when
|
||||
* logging. In order to render something, you must first call init() and createEglSurface.
|
||||
*/
|
||||
public EglRenderer(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
|
||||
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
|
||||
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
|
||||
* init()/release() cycle.
|
||||
*/
|
||||
public void init(final EglBase.Context sharedContext, final int[] configAttributes,
|
||||
RendererCommon.GlDrawer drawer) {
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler != null) {
|
||||
throw new IllegalStateException(name + "Already initialized");
|
||||
}
|
||||
logD("Initializing EglRenderer");
|
||||
this.drawer = drawer;
|
||||
|
||||
final HandlerThread renderThread = new HandlerThread(name + "EglRenderer");
|
||||
renderThread.start();
|
||||
renderThreadHandler = new Handler(renderThread.getLooper());
|
||||
// Create EGL context on the newly created render thread. It should be possibly to create the
|
||||
// context on this thread and make it current on the render thread, but this causes failure on
|
||||
// some Marvel based JB devices. https://bugs.chromium.org/p/webrtc/issues/detail?id=6350.
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, () -> {
|
||||
// If sharedContext is null, then texture frames are disabled. This is typically for old
|
||||
// devices that might not be fully spec compliant, so force EGL 1.0 since EGL 1.4 has
|
||||
// caused trouble on some weird devices.
|
||||
if (sharedContext == null) {
|
||||
logD("EglBase10.create context");
|
||||
eglBase = EglBase.createEgl10(configAttributes);
|
||||
} else {
|
||||
logD("EglBase.create shared context");
|
||||
eglBase = EglBase.create(sharedContext, configAttributes);
|
||||
}
|
||||
});
|
||||
renderThreadHandler.post(eglSurfaceCreationRunnable);
|
||||
final long currentTimeNs = System.nanoTime();
|
||||
resetStatistics(currentTimeNs);
|
||||
renderThreadHandler.postDelayed(
|
||||
logStatisticsRunnable, TimeUnit.SECONDS.toMillis(LOG_INTERVAL_SEC));
|
||||
}
|
||||
}
|
||||
|
||||
public void createEglSurface(Surface surface) {
|
||||
createEglSurfaceInternal(surface);
|
||||
}
|
||||
|
||||
public void createEglSurface(SurfaceTexture surfaceTexture) {
|
||||
createEglSurfaceInternal(surfaceTexture);
|
||||
}
|
||||
|
||||
private void createEglSurfaceInternal(Object surface) {
|
||||
eglSurfaceCreationRunnable.setSurface(surface);
|
||||
postToRenderThread(eglSurfaceCreationRunnable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Block until any pending frame is returned and all GL resources released, even if an interrupt
|
||||
* occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
|
||||
* should be called before the Activity is destroyed and the EGLContext is still valid. If you
|
||||
* don't call this function, the GL resources might leak.
|
||||
*/
|
||||
public void release() {
|
||||
logD("Releasing.");
|
||||
final CountDownLatch eglCleanupBarrier = new CountDownLatch(1);
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler == null) {
|
||||
logD("Already released");
|
||||
return;
|
||||
}
|
||||
renderThreadHandler.removeCallbacks(logStatisticsRunnable);
|
||||
// Release EGL and GL resources on render thread.
|
||||
renderThreadHandler.postAtFrontOfQueue(() -> {
|
||||
if (drawer != null) {
|
||||
drawer.release();
|
||||
drawer = null;
|
||||
}
|
||||
frameDrawer.release();
|
||||
if (bitmapTextureFramebuffer != null) {
|
||||
bitmapTextureFramebuffer.release();
|
||||
bitmapTextureFramebuffer = null;
|
||||
}
|
||||
if (eglBase != null) {
|
||||
logD("eglBase detach and release.");
|
||||
eglBase.detachCurrent();
|
||||
eglBase.release();
|
||||
eglBase = null;
|
||||
}
|
||||
eglCleanupBarrier.countDown();
|
||||
});
|
||||
final Looper renderLooper = renderThreadHandler.getLooper();
|
||||
// TODO(magjed): Replace this post() with renderLooper.quitSafely() when API support >= 18.
|
||||
renderThreadHandler.post(() -> {
|
||||
logD("Quitting render thread.");
|
||||
renderLooper.quit();
|
||||
});
|
||||
// Don't accept any more frames or messages to the render thread.
|
||||
renderThreadHandler = null;
|
||||
}
|
||||
// Make sure the EGL/GL cleanup posted above is executed.
|
||||
ThreadUtils.awaitUninterruptibly(eglCleanupBarrier);
|
||||
synchronized (frameLock) {
|
||||
if (pendingFrame != null) {
|
||||
pendingFrame.release();
|
||||
pendingFrame = null;
|
||||
}
|
||||
}
|
||||
logD("Releasing done.");
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the statistics logged in logStatistics().
|
||||
*/
|
||||
private void resetStatistics(long currentTimeNs) {
|
||||
synchronized (statisticsLock) {
|
||||
statisticsStartTimeNs = currentTimeNs;
|
||||
framesReceived = 0;
|
||||
framesDropped = 0;
|
||||
framesRendered = 0;
|
||||
renderTimeNs = 0;
|
||||
renderSwapBufferTimeNs = 0;
|
||||
}
|
||||
}
|
||||
|
||||
public void printStackTrace() {
|
||||
synchronized (handlerLock) {
|
||||
final Thread renderThread =
|
||||
(renderThreadHandler == null) ? null : renderThreadHandler.getLooper().getThread();
|
||||
if (renderThread != null) {
|
||||
final StackTraceElement[] renderStackTrace = renderThread.getStackTrace();
|
||||
if (renderStackTrace.length > 0) {
|
||||
logD("EglRenderer stack trace:");
|
||||
for (StackTraceElement traceElem : renderStackTrace) {
|
||||
logD(traceElem.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set if the video stream should be mirrored or not.
|
||||
*/
|
||||
public void setMirror(final boolean mirror) {
|
||||
logD("setMirror: " + mirror);
|
||||
synchronized (layoutLock) {
|
||||
this.mirror = mirror;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set layout aspect ratio. This is used to crop frames when rendering to avoid stretched video.
|
||||
* Set this to 0 to disable cropping.
|
||||
*/
|
||||
public void setLayoutAspectRatio(float layoutAspectRatio) {
|
||||
logD("setLayoutAspectRatio: " + layoutAspectRatio);
|
||||
synchronized (layoutLock) {
|
||||
this.layoutAspectRatio = layoutAspectRatio;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Limit render framerate.
|
||||
*
|
||||
* @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
|
||||
* reduction.
|
||||
*/
|
||||
public void setFpsReduction(float fps) {
|
||||
logD("setFpsReduction: " + fps);
|
||||
synchronized (fpsReductionLock) {
|
||||
final long previousRenderPeriodNs = minRenderPeriodNs;
|
||||
if (fps <= 0) {
|
||||
minRenderPeriodNs = Long.MAX_VALUE;
|
||||
} else {
|
||||
minRenderPeriodNs = (long) (TimeUnit.SECONDS.toNanos(1) / fps);
|
||||
}
|
||||
if (minRenderPeriodNs != previousRenderPeriodNs) {
|
||||
// Fps reduction changed - reset frame time.
|
||||
nextFrameTimeNs = System.nanoTime();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void disableFpsReduction() {
|
||||
setFpsReduction(Float.POSITIVE_INFINITY /* fps */);
|
||||
}
|
||||
|
||||
public void pauseVideo() {
|
||||
setFpsReduction(0 /* fps */);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a callback to be invoked when a new video frame has been received. This version uses
|
||||
* the drawer of the EglRenderer that was passed in init.
|
||||
*
|
||||
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
|
||||
* It should be lightweight and must not call removeFrameListener.
|
||||
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
|
||||
* required.
|
||||
*/
|
||||
public void addFrameListener(final FrameListener listener, final float scale) {
|
||||
addFrameListener(listener, scale, null, false /* applyFpsReduction */);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a callback to be invoked when a new video frame has been received.
|
||||
*
|
||||
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
|
||||
* It should be lightweight and must not call removeFrameListener.
|
||||
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
|
||||
* required.
|
||||
* @param drawer Custom drawer to use for this frame listener or null to use the default one.
|
||||
*/
|
||||
public void addFrameListener(
|
||||
final FrameListener listener, final float scale, final RendererCommon.GlDrawer drawerParam) {
|
||||
addFrameListener(listener, scale, drawerParam, false /* applyFpsReduction */);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a callback to be invoked when a new video frame has been received.
|
||||
*
|
||||
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
|
||||
* It should be lightweight and must not call removeFrameListener.
|
||||
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
|
||||
* required.
|
||||
* @param drawer Custom drawer to use for this frame listener or null to use the default one.
|
||||
* @param applyFpsReduction This callback will not be called for frames that have been dropped by
|
||||
* FPS reduction.
|
||||
*/
|
||||
public void addFrameListener(final FrameListener listener, final float scale,
|
||||
final RendererCommon.GlDrawer drawerParam, final boolean applyFpsReduction) {
|
||||
postToRenderThread(() -> {
|
||||
final RendererCommon.GlDrawer listenerDrawer = drawerParam == null ? drawer : drawerParam;
|
||||
frameListeners.add(
|
||||
new FrameListenerAndParams(listener, scale, listenerDrawer, applyFpsReduction));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove any pending callback that was added with addFrameListener. If the callback is not in
|
||||
* the queue, nothing happens. It is ensured that callback won't be called after this method
|
||||
* returns.
|
||||
*
|
||||
* @param runnable The callback to remove.
|
||||
*/
|
||||
public void removeFrameListener(final FrameListener listener) {
|
||||
if (Thread.currentThread() == renderThreadHandler.getLooper().getThread()) {
|
||||
throw new RuntimeException("removeFrameListener must not be called on the render thread.");
|
||||
}
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
postToRenderThread(() -> {
|
||||
latch.countDown();
|
||||
final Iterator<FrameListenerAndParams> iter = frameListeners.iterator();
|
||||
while (iter.hasNext()) {
|
||||
if (iter.next().listener == listener) {
|
||||
iter.remove();
|
||||
}
|
||||
}
|
||||
});
|
||||
ThreadUtils.awaitUninterruptibly(latch);
|
||||
}
|
||||
|
||||
// VideoRenderer.Callbacks interface.
|
||||
@Override
|
||||
public void renderFrame(VideoRenderer.I420Frame frame) {
|
||||
VideoFrame videoFrame = frame.toVideoFrame();
|
||||
onFrame(videoFrame);
|
||||
videoFrame.release();
|
||||
}
|
||||
|
||||
// VideoSink interface.
|
||||
@Override
|
||||
public void onFrame(VideoFrame frame) {
|
||||
synchronized (statisticsLock) {
|
||||
++framesReceived;
|
||||
}
|
||||
final boolean dropOldFrame;
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler == null) {
|
||||
logD("Dropping frame - Not initialized or already released.");
|
||||
return;
|
||||
}
|
||||
synchronized (frameLock) {
|
||||
dropOldFrame = (pendingFrame != null);
|
||||
if (dropOldFrame) {
|
||||
pendingFrame.release();
|
||||
}
|
||||
pendingFrame = frame;
|
||||
pendingFrame.retain();
|
||||
renderThreadHandler.post(this ::renderFrameOnRenderThread);
|
||||
}
|
||||
}
|
||||
if (dropOldFrame) {
|
||||
synchronized (statisticsLock) {
|
||||
++framesDropped;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Release EGL surface. This function will block until the EGL surface is released.
|
||||
*/
|
||||
public void releaseEglSurface(final Runnable completionCallback) {
|
||||
// Ensure that the render thread is no longer touching the Surface before returning from this
|
||||
// function.
|
||||
eglSurfaceCreationRunnable.setSurface(null /* surface */);
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler != null) {
|
||||
renderThreadHandler.removeCallbacks(eglSurfaceCreationRunnable);
|
||||
renderThreadHandler.postAtFrontOfQueue(() -> {
|
||||
if (eglBase != null) {
|
||||
eglBase.detachCurrent();
|
||||
eglBase.releaseSurface();
|
||||
}
|
||||
completionCallback.run();
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
completionCallback.run();
|
||||
}
|
||||
|
||||
/**
|
||||
* Private helper function to post tasks safely.
|
||||
*/
|
||||
private void postToRenderThread(Runnable runnable) {
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler != null) {
|
||||
renderThreadHandler.post(runnable);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void clearSurfaceOnRenderThread(float r, float g, float b, float a) {
|
||||
if (eglBase != null && eglBase.hasSurface()) {
|
||||
logD("clearSurface");
|
||||
GLES20.glClearColor(r, g, b, a);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
eglBase.swapBuffers();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a task to clear the surface to a transparent uniform color.
|
||||
*/
|
||||
public void clearImage() {
|
||||
clearImage(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a task to clear the surface to a specific color.
|
||||
*/
|
||||
public void clearImage(final float r, final float g, final float b, final float a) {
|
||||
synchronized (handlerLock) {
|
||||
if (renderThreadHandler == null) {
|
||||
return;
|
||||
}
|
||||
renderThreadHandler.postAtFrontOfQueue(() -> clearSurfaceOnRenderThread(r, g, b, a));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Renders and releases |pendingFrame|.
|
||||
*/
|
||||
private void renderFrameOnRenderThread() {
|
||||
// Fetch and render |pendingFrame|.
|
||||
final VideoFrame frame;
|
||||
synchronized (frameLock) {
|
||||
if (pendingFrame == null) {
|
||||
return;
|
||||
}
|
||||
frame = pendingFrame;
|
||||
pendingFrame = null;
|
||||
}
|
||||
if (eglBase == null || !eglBase.hasSurface()) {
|
||||
logD("Dropping frame - No surface");
|
||||
frame.release();
|
||||
return;
|
||||
}
|
||||
// Check if fps reduction is active.
|
||||
final boolean shouldRenderFrame;
|
||||
synchronized (fpsReductionLock) {
|
||||
if (minRenderPeriodNs == Long.MAX_VALUE) {
|
||||
// Rendering is paused.
|
||||
shouldRenderFrame = false;
|
||||
} else if (minRenderPeriodNs <= 0) {
|
||||
// FPS reduction is disabled.
|
||||
shouldRenderFrame = true;
|
||||
} else {
|
||||
final long currentTimeNs = System.nanoTime();
|
||||
if (currentTimeNs < nextFrameTimeNs) {
|
||||
logD("Skipping frame rendering - fps reduction is active.");
|
||||
shouldRenderFrame = false;
|
||||
} else {
|
||||
nextFrameTimeNs += minRenderPeriodNs;
|
||||
// The time for the next frame should always be in the future.
|
||||
nextFrameTimeNs = Math.max(nextFrameTimeNs, currentTimeNs);
|
||||
shouldRenderFrame = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final long startTimeNs = System.nanoTime();
|
||||
|
||||
final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight();
|
||||
final float drawnAspectRatio;
|
||||
synchronized (layoutLock) {
|
||||
drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio;
|
||||
}
|
||||
|
||||
final float scaleX;
|
||||
final float scaleY;
|
||||
|
||||
if (frameAspectRatio > drawnAspectRatio) {
|
||||
scaleX = drawnAspectRatio / frameAspectRatio;
|
||||
scaleY = 1f;
|
||||
} else {
|
||||
scaleX = 1f;
|
||||
scaleY = frameAspectRatio / drawnAspectRatio;
|
||||
}
|
||||
|
||||
drawMatrix.reset();
|
||||
drawMatrix.preTranslate(0.5f, 0.5f);
|
||||
if (mirror)
|
||||
drawMatrix.preScale(-1f, 1f);
|
||||
drawMatrix.preScale(scaleX, scaleY);
|
||||
drawMatrix.preTranslate(-0.5f, -0.5f);
|
||||
|
||||
if (shouldRenderFrame) {
|
||||
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */,
|
||||
eglBase.surfaceWidth(), eglBase.surfaceHeight());
|
||||
|
||||
final long swapBuffersStartTimeNs = System.nanoTime();
|
||||
eglBase.swapBuffers();
|
||||
|
||||
final long currentTimeNs = System.nanoTime();
|
||||
synchronized (statisticsLock) {
|
||||
++framesRendered;
|
||||
renderTimeNs += (currentTimeNs - startTimeNs);
|
||||
renderSwapBufferTimeNs += (currentTimeNs - swapBuffersStartTimeNs);
|
||||
}
|
||||
}
|
||||
|
||||
notifyCallbacks(frame, shouldRenderFrame);
|
||||
frame.release();
|
||||
}
|
||||
|
||||
private void notifyCallbacks(VideoFrame frame, boolean wasRendered) {
|
||||
if (frameListeners.isEmpty())
|
||||
return;
|
||||
|
||||
drawMatrix.reset();
|
||||
drawMatrix.preTranslate(0.5f, 0.5f);
|
||||
if (mirror)
|
||||
drawMatrix.preScale(-1f, 1f);
|
||||
drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap.
|
||||
drawMatrix.preTranslate(-0.5f, -0.5f);
|
||||
|
||||
Iterator<FrameListenerAndParams> it = frameListeners.iterator();
|
||||
while (it.hasNext()) {
|
||||
FrameListenerAndParams listenerAndParams = it.next();
|
||||
if (!wasRendered && listenerAndParams.applyFpsReduction) {
|
||||
continue;
|
||||
}
|
||||
it.remove();
|
||||
|
||||
final int scaledWidth = (int) (listenerAndParams.scale * frame.getRotatedWidth());
|
||||
final int scaledHeight = (int) (listenerAndParams.scale * frame.getRotatedHeight());
|
||||
|
||||
if (scaledWidth == 0 || scaledHeight == 0) {
|
||||
listenerAndParams.listener.onFrame(null);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (bitmapTextureFramebuffer == null) {
|
||||
bitmapTextureFramebuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
|
||||
}
|
||||
bitmapTextureFramebuffer.setSize(scaledWidth, scaledHeight);
|
||||
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, bitmapTextureFramebuffer.getFrameBufferId());
|
||||
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
|
||||
GLES20.GL_TEXTURE_2D, bitmapTextureFramebuffer.getTextureId(), 0);
|
||||
|
||||
GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */,
|
||||
0 /* viewportY */, scaledWidth, scaledHeight);
|
||||
|
||||
final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4);
|
||||
GLES20.glViewport(0, 0, scaledWidth, scaledHeight);
|
||||
GLES20.glReadPixels(
|
||||
0, 0, scaledWidth, scaledHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, bitmapBuffer);
|
||||
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
|
||||
GlUtil.checkNoGLES2Error("EglRenderer.notifyCallbacks");
|
||||
|
||||
final Bitmap bitmap = Bitmap.createBitmap(scaledWidth, scaledHeight, Bitmap.Config.ARGB_8888);
|
||||
bitmap.copyPixelsFromBuffer(bitmapBuffer);
|
||||
listenerAndParams.listener.onFrame(bitmap);
|
||||
}
|
||||
}
|
||||
|
||||
private String averageTimeAsString(long sumTimeNs, int count) {
|
||||
return (count <= 0) ? "NA" : TimeUnit.NANOSECONDS.toMicros(sumTimeNs / count) + " μs";
|
||||
}
|
||||
|
||||
private void logStatistics() {
|
||||
final long currentTimeNs = System.nanoTime();
|
||||
synchronized (statisticsLock) {
|
||||
final long elapsedTimeNs = currentTimeNs - statisticsStartTimeNs;
|
||||
if (elapsedTimeNs <= 0) {
|
||||
return;
|
||||
}
|
||||
final float renderFps = framesRendered * TimeUnit.SECONDS.toNanos(1) / (float) elapsedTimeNs;
|
||||
logD("Duration: " + TimeUnit.NANOSECONDS.toMillis(elapsedTimeNs) + " ms."
|
||||
+ " Frames received: " + framesReceived + "."
|
||||
+ " Dropped: " + framesDropped + "."
|
||||
+ " Rendered: " + framesRendered + "."
|
||||
+ " Render fps: " + String.format(Locale.US, "%.1f", renderFps) + "."
|
||||
+ " Average render time: " + averageTimeAsString(renderTimeNs, framesRendered) + "."
|
||||
+ " Average swapBuffer time: "
|
||||
+ averageTimeAsString(renderSwapBufferTimeNs, framesRendered) + ".");
|
||||
resetStatistics(currentTimeNs);
|
||||
}
|
||||
}
|
||||
|
||||
private void logD(String string) {
|
||||
Logging.d(TAG, name + string);
|
||||
}
|
||||
}
|
||||
137
sdk/android/api/org/webrtc/EncodedImage.java
Normal file
137
sdk/android/api/org/webrtc/EncodedImage.java
Normal file
@ -0,0 +1,137 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* An encoded frame from a video stream. Used as an input for decoders and as an output for
|
||||
* encoders.
|
||||
*/
|
||||
public class EncodedImage {
|
||||
// Must be kept in sync with common_types.h FrameType.
|
||||
public enum FrameType {
|
||||
EmptyFrame(0),
|
||||
VideoFrameKey(3),
|
||||
VideoFrameDelta(4);
|
||||
|
||||
private final int nativeIndex;
|
||||
|
||||
private FrameType(int nativeIndex) {
|
||||
this.nativeIndex = nativeIndex;
|
||||
}
|
||||
|
||||
public int getNative() {
|
||||
return nativeIndex;
|
||||
}
|
||||
|
||||
public static FrameType fromNative(int nativeIndex) {
|
||||
for (FrameType type : FrameType.values()) {
|
||||
if (type.nativeIndex == nativeIndex) {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("Unknown native frame type: " + nativeIndex);
|
||||
}
|
||||
}
|
||||
|
||||
public final ByteBuffer buffer;
|
||||
public final int encodedWidth;
|
||||
public final int encodedHeight;
|
||||
public final long captureTimeMs; // Deprecated
|
||||
public final long captureTimeNs;
|
||||
public final FrameType frameType;
|
||||
public final int rotation;
|
||||
public final boolean completeFrame;
|
||||
public final Integer qp;
|
||||
|
||||
private EncodedImage(ByteBuffer buffer, int encodedWidth, int encodedHeight, long captureTimeNs,
|
||||
FrameType frameType, int rotation, boolean completeFrame, Integer qp) {
|
||||
this.buffer = buffer;
|
||||
this.encodedWidth = encodedWidth;
|
||||
this.encodedHeight = encodedHeight;
|
||||
this.captureTimeMs = TimeUnit.NANOSECONDS.toMillis(captureTimeNs);
|
||||
this.captureTimeNs = captureTimeNs;
|
||||
this.frameType = frameType;
|
||||
this.rotation = rotation;
|
||||
this.completeFrame = completeFrame;
|
||||
this.qp = qp;
|
||||
}
|
||||
|
||||
public static Builder builder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private ByteBuffer buffer;
|
||||
private int encodedWidth;
|
||||
private int encodedHeight;
|
||||
private long captureTimeNs;
|
||||
private EncodedImage.FrameType frameType;
|
||||
private int rotation;
|
||||
private boolean completeFrame;
|
||||
private Integer qp;
|
||||
|
||||
private Builder() {}
|
||||
|
||||
public Builder setBuffer(ByteBuffer buffer) {
|
||||
this.buffer = buffer;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setEncodedWidth(int encodedWidth) {
|
||||
this.encodedWidth = encodedWidth;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setEncodedHeight(int encodedHeight) {
|
||||
this.encodedHeight = encodedHeight;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public Builder setCaptureTimeMs(long captureTimeMs) {
|
||||
this.captureTimeNs = TimeUnit.MILLISECONDS.toNanos(captureTimeMs);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setCaptureTimeNs(long captureTimeNs) {
|
||||
this.captureTimeNs = captureTimeNs;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setFrameType(EncodedImage.FrameType frameType) {
|
||||
this.frameType = frameType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setRotation(int rotation) {
|
||||
this.rotation = rotation;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setCompleteFrame(boolean completeFrame) {
|
||||
this.completeFrame = completeFrame;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setQp(Integer qp) {
|
||||
this.qp = qp;
|
||||
return this;
|
||||
}
|
||||
|
||||
public EncodedImage createEncodedImage() {
|
||||
return new EncodedImage(buffer, encodedWidth, encodedHeight, captureTimeNs, frameType,
|
||||
rotation, completeFrame, qp);
|
||||
}
|
||||
}
|
||||
}
|
||||
215
sdk/android/api/org/webrtc/FileVideoCapturer.java
Normal file
215
sdk/android/api/org/webrtc/FileVideoCapturer.java
Normal file
@ -0,0 +1,215 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.SystemClock;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.io.IOException;
|
||||
|
||||
public class FileVideoCapturer implements VideoCapturer {
|
||||
static {
|
||||
System.loadLibrary("jingle_peerconnection_so");
|
||||
}
|
||||
|
||||
private interface VideoReader {
|
||||
int getFrameWidth();
|
||||
int getFrameHeight();
|
||||
byte[] getNextFrame();
|
||||
void close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Read video data from file for the .y4m container.
|
||||
*/
|
||||
private static class VideoReaderY4M implements VideoReader {
|
||||
private final static String TAG = "VideoReaderY4M";
|
||||
private final int frameWidth;
|
||||
private final int frameHeight;
|
||||
private final int frameSize;
|
||||
|
||||
// First char after header
|
||||
private final long videoStart;
|
||||
|
||||
private static final String Y4M_FRAME_DELIMETER = "FRAME";
|
||||
|
||||
private final RandomAccessFile mediaFileStream;
|
||||
|
||||
public int getFrameWidth() {
|
||||
return frameWidth;
|
||||
}
|
||||
|
||||
public int getFrameHeight() {
|
||||
return frameHeight;
|
||||
}
|
||||
|
||||
public VideoReaderY4M(String file) throws IOException {
|
||||
mediaFileStream = new RandomAccessFile(file, "r");
|
||||
StringBuilder builder = new StringBuilder();
|
||||
for (;;) {
|
||||
int c = mediaFileStream.read();
|
||||
if (c == -1) {
|
||||
// End of file reached.
|
||||
throw new RuntimeException("Found end of file before end of header for file: " + file);
|
||||
}
|
||||
if (c == '\n') {
|
||||
// End of header found.
|
||||
break;
|
||||
}
|
||||
builder.append((char) c);
|
||||
}
|
||||
videoStart = mediaFileStream.getFilePointer();
|
||||
String header = builder.toString();
|
||||
String[] headerTokens = header.split("[ ]");
|
||||
int w = 0;
|
||||
int h = 0;
|
||||
String colorSpace = "";
|
||||
for (String tok : headerTokens) {
|
||||
char c = tok.charAt(0);
|
||||
switch (c) {
|
||||
case 'W':
|
||||
w = Integer.parseInt(tok.substring(1));
|
||||
break;
|
||||
case 'H':
|
||||
h = Integer.parseInt(tok.substring(1));
|
||||
break;
|
||||
case 'C':
|
||||
colorSpace = tok.substring(1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "Color space: " + colorSpace);
|
||||
if (!colorSpace.equals("420") && !colorSpace.equals("420mpeg2")) {
|
||||
throw new IllegalArgumentException(
|
||||
"Does not support any other color space than I420 or I420mpeg2");
|
||||
}
|
||||
if ((w % 2) == 1 || (h % 2) == 1) {
|
||||
throw new IllegalArgumentException("Does not support odd width or height");
|
||||
}
|
||||
frameWidth = w;
|
||||
frameHeight = h;
|
||||
frameSize = w * h * 3 / 2;
|
||||
Logging.d(TAG, "frame dim: (" + w + ", " + h + ") frameSize: " + frameSize);
|
||||
}
|
||||
|
||||
public byte[] getNextFrame() {
|
||||
byte[] frame = new byte[frameSize];
|
||||
try {
|
||||
byte[] frameDelim = new byte[Y4M_FRAME_DELIMETER.length() + 1];
|
||||
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
|
||||
// We reach end of file, loop
|
||||
mediaFileStream.seek(videoStart);
|
||||
if (mediaFileStream.read(frameDelim) < frameDelim.length) {
|
||||
throw new RuntimeException("Error looping video");
|
||||
}
|
||||
}
|
||||
String frameDelimStr = new String(frameDelim);
|
||||
if (!frameDelimStr.equals(Y4M_FRAME_DELIMETER + "\n")) {
|
||||
throw new RuntimeException(
|
||||
"Frames should be delimited by FRAME plus newline, found delimter was: '"
|
||||
+ frameDelimStr + "'");
|
||||
}
|
||||
mediaFileStream.readFully(frame);
|
||||
byte[] nv21Frame = new byte[frameSize];
|
||||
nativeI420ToNV21(frame, frameWidth, frameHeight, nv21Frame);
|
||||
return nv21Frame;
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
public void close() {
|
||||
try {
|
||||
mediaFileStream.close();
|
||||
} catch (IOException e) {
|
||||
Logging.e(TAG, "Problem closing file", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final static String TAG = "FileVideoCapturer";
|
||||
private final VideoReader videoReader;
|
||||
private CapturerObserver capturerObserver;
|
||||
private final Timer timer = new Timer();
|
||||
|
||||
private final TimerTask tickTask = new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
tick();
|
||||
}
|
||||
};
|
||||
|
||||
private int getFrameWidth() {
|
||||
return videoReader.getFrameWidth();
|
||||
}
|
||||
|
||||
private int getFrameHeight() {
|
||||
return videoReader.getFrameHeight();
|
||||
}
|
||||
|
||||
public FileVideoCapturer(String inputFile) throws IOException {
|
||||
try {
|
||||
videoReader = new VideoReaderY4M(inputFile);
|
||||
} catch (IOException e) {
|
||||
Logging.d(TAG, "Could not open video file: " + inputFile);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private byte[] getNextFrame() {
|
||||
return videoReader.getNextFrame();
|
||||
}
|
||||
|
||||
public void tick() {
|
||||
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||
|
||||
byte[] frameData = getNextFrame();
|
||||
capturerObserver.onByteBufferFrameCaptured(
|
||||
frameData, getFrameWidth(), getFrameHeight(), 0, captureTimeNs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
|
||||
CapturerObserver capturerObserver) {
|
||||
this.capturerObserver = capturerObserver;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startCapture(int width, int height, int framerate) {
|
||||
timer.schedule(tickTask, 0, 1000 / framerate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopCapture() throws InterruptedException {
|
||||
timer.cancel();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void changeCaptureFormat(int width, int height, int framerate) {
|
||||
// Empty on purpose
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose() {
|
||||
videoReader.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isScreencast() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public static native void nativeI420ToNV21(byte[] src, int width, int height, byte[] dst);
|
||||
}
|
||||
210
sdk/android/api/org/webrtc/GlRectDrawer.java
Normal file
210
sdk/android/api/org/webrtc/GlRectDrawer.java
Normal file
@ -0,0 +1,210 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import java.nio.FloatBuffer;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
|
||||
* cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
|
||||
* be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
|
||||
* calls, this is intentional to maximize performance. The function release() must be called
|
||||
* manually to free the resources held by this object.
|
||||
*/
|
||||
public class GlRectDrawer implements RendererCommon.GlDrawer {
|
||||
// clang-format off
|
||||
// Simple vertex shader, used for both YUV and OES.
|
||||
private static final String VERTEX_SHADER_STRING =
|
||||
"varying vec2 interp_tc;\n"
|
||||
+ "attribute vec4 in_pos;\n"
|
||||
+ "attribute vec4 in_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform mat4 texMatrix;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
+ " gl_Position = in_pos;\n"
|
||||
+ " interp_tc = (texMatrix * in_tc).xy;\n"
|
||||
+ "}\n";
|
||||
|
||||
private static final String YUV_FRAGMENT_SHADER_STRING =
|
||||
"precision mediump float;\n"
|
||||
+ "varying vec2 interp_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform sampler2D y_tex;\n"
|
||||
+ "uniform sampler2D u_tex;\n"
|
||||
+ "uniform sampler2D v_tex;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
// CSC according to http://www.fourcc.org/fccyvrgb.php
|
||||
+ " float y = texture2D(y_tex, interp_tc).r;\n"
|
||||
+ " float u = texture2D(u_tex, interp_tc).r - 0.5;\n"
|
||||
+ " float v = texture2D(v_tex, interp_tc).r - 0.5;\n"
|
||||
+ " gl_FragColor = vec4(y + 1.403 * v, "
|
||||
+ " y - 0.344 * u - 0.714 * v, "
|
||||
+ " y + 1.77 * u, 1);\n"
|
||||
+ "}\n";
|
||||
|
||||
private static final String RGB_FRAGMENT_SHADER_STRING =
|
||||
"precision mediump float;\n"
|
||||
+ "varying vec2 interp_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform sampler2D rgb_tex;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
+ " gl_FragColor = texture2D(rgb_tex, interp_tc);\n"
|
||||
+ "}\n";
|
||||
|
||||
private static final String OES_FRAGMENT_SHADER_STRING =
|
||||
"#extension GL_OES_EGL_image_external : require\n"
|
||||
+ "precision mediump float;\n"
|
||||
+ "varying vec2 interp_tc;\n"
|
||||
+ "\n"
|
||||
+ "uniform samplerExternalOES oes_tex;\n"
|
||||
+ "\n"
|
||||
+ "void main() {\n"
|
||||
+ " gl_FragColor = texture2D(oes_tex, interp_tc);\n"
|
||||
+ "}\n";
|
||||
// clang-format on
|
||||
|
||||
// Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
|
||||
// top-right.
|
||||
private static final FloatBuffer FULL_RECTANGLE_BUF = GlUtil.createFloatBuffer(new float[] {
|
||||
-1.0f, -1.0f, // Bottom left.
|
||||
1.0f, -1.0f, // Bottom right.
|
||||
-1.0f, 1.0f, // Top left.
|
||||
1.0f, 1.0f, // Top right.
|
||||
});
|
||||
|
||||
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
|
||||
private static final FloatBuffer FULL_RECTANGLE_TEX_BUF = GlUtil.createFloatBuffer(new float[] {
|
||||
0.0f, 0.0f, // Bottom left.
|
||||
1.0f, 0.0f, // Bottom right.
|
||||
0.0f, 1.0f, // Top left.
|
||||
1.0f, 1.0f // Top right.
|
||||
});
|
||||
|
||||
private static class Shader {
|
||||
public final GlShader glShader;
|
||||
public final int texMatrixLocation;
|
||||
|
||||
public Shader(String fragmentShader) {
|
||||
this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
|
||||
this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
|
||||
}
|
||||
}
|
||||
|
||||
// The keys are one of the fragments shaders above.
|
||||
private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
|
||||
|
||||
/**
|
||||
* Draw an OES texture frame with specified texture transformation matrix. Required resources are
|
||||
* allocated at the first call to this function.
|
||||
*/
|
||||
@Override
|
||||
public void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
|
||||
prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
// updateTexImage() may be called from another thread in another EGL context, so we need to
|
||||
// bind/unbind the texture in each draw call so that GLES understads it's a new texture.
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
|
||||
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
|
||||
* are allocated at the first call to this function.
|
||||
*/
|
||||
@Override
|
||||
public void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
|
||||
prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
|
||||
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
// Unbind the texture as a precaution.
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Draw a YUV frame with specified texture transformation matrix. Required resources are
|
||||
* allocated at the first call to this function.
|
||||
*/
|
||||
@Override
|
||||
public void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight) {
|
||||
prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
|
||||
// Bind the textures.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
||||
}
|
||||
drawRectangle(viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
// Unbind the textures as a precaution..
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
}
|
||||
}
|
||||
|
||||
private void drawRectangle(int x, int y, int width, int height) {
|
||||
// Draw quad.
|
||||
GLES20.glViewport(x, y, width, height);
|
||||
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
|
||||
}
|
||||
|
||||
private void prepareShader(String fragmentShader, float[] texMatrix) {
|
||||
final Shader shader;
|
||||
if (shaders.containsKey(fragmentShader)) {
|
||||
shader = shaders.get(fragmentShader);
|
||||
} else {
|
||||
// Lazy allocation.
|
||||
shader = new Shader(fragmentShader);
|
||||
shaders.put(fragmentShader, shader);
|
||||
shader.glShader.useProgram();
|
||||
// Initialize fragment shader uniform values.
|
||||
if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
|
||||
} else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
|
||||
} else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
|
||||
GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
|
||||
} else {
|
||||
throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
|
||||
}
|
||||
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
|
||||
// Initialize vertex shader attributes.
|
||||
shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
|
||||
shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
|
||||
}
|
||||
shader.glShader.useProgram();
|
||||
// Copy the texture transformation matrix over.
|
||||
GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
|
||||
*/
|
||||
@Override
|
||||
public void release() {
|
||||
for (Shader shader : shaders.values()) {
|
||||
shader.glShader.release();
|
||||
}
|
||||
shaders.clear();
|
||||
}
|
||||
}
|
||||
121
sdk/android/api/org/webrtc/GlShader.java
Normal file
121
sdk/android/api/org/webrtc/GlShader.java
Normal file
@ -0,0 +1,121 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
// Helper class for handling OpenGL shaders and shader programs.
|
||||
public class GlShader {
|
||||
private static final String TAG = "GlShader";
|
||||
|
||||
private static int compileShader(int shaderType, String source) {
|
||||
final int shader = GLES20.glCreateShader(shaderType);
|
||||
if (shader == 0) {
|
||||
throw new RuntimeException("glCreateShader() failed. GLES20 error: " + GLES20.glGetError());
|
||||
}
|
||||
GLES20.glShaderSource(shader, source);
|
||||
GLES20.glCompileShader(shader);
|
||||
int[] compileStatus = new int[] {GLES20.GL_FALSE};
|
||||
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
|
||||
if (compileStatus[0] != GLES20.GL_TRUE) {
|
||||
Logging.e(
|
||||
TAG, "Could not compile shader " + shaderType + ":" + GLES20.glGetShaderInfoLog(shader));
|
||||
throw new RuntimeException(GLES20.glGetShaderInfoLog(shader));
|
||||
}
|
||||
GlUtil.checkNoGLES2Error("compileShader");
|
||||
return shader;
|
||||
}
|
||||
|
||||
private int program;
|
||||
|
||||
public GlShader(String vertexSource, String fragmentSource) {
|
||||
final int vertexShader = compileShader(GLES20.GL_VERTEX_SHADER, vertexSource);
|
||||
final int fragmentShader = compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
|
||||
program = GLES20.glCreateProgram();
|
||||
if (program == 0) {
|
||||
throw new RuntimeException("glCreateProgram() failed. GLES20 error: " + GLES20.glGetError());
|
||||
}
|
||||
GLES20.glAttachShader(program, vertexShader);
|
||||
GLES20.glAttachShader(program, fragmentShader);
|
||||
GLES20.glLinkProgram(program);
|
||||
int[] linkStatus = new int[] {GLES20.GL_FALSE};
|
||||
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
||||
if (linkStatus[0] != GLES20.GL_TRUE) {
|
||||
Logging.e(TAG, "Could not link program: " + GLES20.glGetProgramInfoLog(program));
|
||||
throw new RuntimeException(GLES20.glGetProgramInfoLog(program));
|
||||
}
|
||||
// According to the documentation of glLinkProgram():
|
||||
// "After the link operation, applications are free to modify attached shader objects, compile
|
||||
// attached shader objects, detach shader objects, delete shader objects, and attach additional
|
||||
// shader objects. None of these operations affects the information log or the program that is
|
||||
// part of the program object."
|
||||
// But in practice, detaching shaders from the program seems to break some devices. Deleting the
|
||||
// shaders are fine however - it will delete them when they are no longer attached to a program.
|
||||
GLES20.glDeleteShader(vertexShader);
|
||||
GLES20.glDeleteShader(fragmentShader);
|
||||
GlUtil.checkNoGLES2Error("Creating GlShader");
|
||||
}
|
||||
|
||||
public int getAttribLocation(String label) {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
int location = GLES20.glGetAttribLocation(program, label);
|
||||
if (location < 0) {
|
||||
throw new RuntimeException("Could not locate '" + label + "' in program");
|
||||
}
|
||||
return location;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable and upload a vertex array for attribute |label|. The vertex data is specified in
|
||||
* |buffer| with |dimension| number of components per vertex.
|
||||
*/
|
||||
public void setVertexAttribArray(String label, int dimension, FloatBuffer buffer) {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
int location = getAttribLocation(label);
|
||||
GLES20.glEnableVertexAttribArray(location);
|
||||
GLES20.glVertexAttribPointer(location, dimension, GLES20.GL_FLOAT, false, 0, buffer);
|
||||
GlUtil.checkNoGLES2Error("setVertexAttribArray");
|
||||
}
|
||||
|
||||
public int getUniformLocation(String label) {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
int location = GLES20.glGetUniformLocation(program, label);
|
||||
if (location < 0) {
|
||||
throw new RuntimeException("Could not locate uniform '" + label + "' in program");
|
||||
}
|
||||
return location;
|
||||
}
|
||||
|
||||
public void useProgram() {
|
||||
if (program == -1) {
|
||||
throw new RuntimeException("The program has been released");
|
||||
}
|
||||
GLES20.glUseProgram(program);
|
||||
GlUtil.checkNoGLES2Error("glUseProgram");
|
||||
}
|
||||
|
||||
public void release() {
|
||||
Logging.d(TAG, "Deleting shader.");
|
||||
// Delete program, automatically detaching any shaders from it.
|
||||
if (program != -1) {
|
||||
GLES20.glDeleteProgram(program);
|
||||
program = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
117
sdk/android/api/org/webrtc/GlTextureFrameBuffer.java
Normal file
117
sdk/android/api/org/webrtc/GlTextureFrameBuffer.java
Normal file
@ -0,0 +1,117 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
/**
|
||||
* Helper class for handling OpenGL framebuffer with only color attachment and no depth or stencil
|
||||
* buffer. Intended for simple tasks such as texture copy, texture downscaling, and texture color
|
||||
* conversion.
|
||||
*/
|
||||
// TODO(magjed): Add unittests for this class.
|
||||
public class GlTextureFrameBuffer {
|
||||
private final int frameBufferId;
|
||||
private final int textureId;
|
||||
private final int pixelFormat;
|
||||
private int width;
|
||||
private int height;
|
||||
|
||||
/**
|
||||
* Generate texture and framebuffer resources. An EGLContext must be bound on the current thread
|
||||
* when calling this function. The framebuffer is not complete until setSize() is called.
|
||||
*/
|
||||
public GlTextureFrameBuffer(int pixelFormat) {
|
||||
switch (pixelFormat) {
|
||||
case GLES20.GL_LUMINANCE:
|
||||
case GLES20.GL_RGB:
|
||||
case GLES20.GL_RGBA:
|
||||
this.pixelFormat = pixelFormat;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("Invalid pixel format: " + pixelFormat);
|
||||
}
|
||||
|
||||
// Create texture.
|
||||
textureId = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
this.width = 0;
|
||||
this.height = 0;
|
||||
|
||||
// Create framebuffer object.
|
||||
final int frameBuffers[] = new int[1];
|
||||
GLES20.glGenFramebuffers(1, frameBuffers, 0);
|
||||
frameBufferId = frameBuffers[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* (Re)allocate texture. Will do nothing if the requested size equals the current size. An
|
||||
* EGLContext must be bound on the current thread when calling this function. Must be called at
|
||||
* least once before using the framebuffer. May be called multiple times to change size.
|
||||
*/
|
||||
public void setSize(int width, int height) {
|
||||
if (width == 0 || height == 0) {
|
||||
throw new IllegalArgumentException("Invalid size: " + width + "x" + height);
|
||||
}
|
||||
if (width == this.width && height == this.height) {
|
||||
return;
|
||||
}
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
|
||||
// Allocate texture.
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, pixelFormat, width, height, 0, pixelFormat,
|
||||
GLES20.GL_UNSIGNED_BYTE, null);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
|
||||
GlUtil.checkNoGLES2Error("GlTextureFrameBuffer setSize");
|
||||
|
||||
// Attach the texture to the framebuffer as color attachment.
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId);
|
||||
GLES20.glFramebufferTexture2D(
|
||||
GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0);
|
||||
|
||||
// Check that the framebuffer is in a good state.
|
||||
final int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
|
||||
if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
|
||||
throw new IllegalStateException("Framebuffer not complete, status: " + status);
|
||||
}
|
||||
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
|
||||
}
|
||||
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
public int getFrameBufferId() {
|
||||
return frameBufferId;
|
||||
}
|
||||
|
||||
public int getTextureId() {
|
||||
return textureId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Release texture and framebuffer. An EGLContext must be bound on the current thread when calling
|
||||
* this function. This object should not be used after this call.
|
||||
*/
|
||||
public void release() {
|
||||
GLES20.glDeleteTextures(1, new int[] {textureId}, 0);
|
||||
GLES20.glDeleteFramebuffers(1, new int[] {frameBufferId}, 0);
|
||||
width = 0;
|
||||
height = 0;
|
||||
}
|
||||
}
|
||||
58
sdk/android/api/org/webrtc/GlUtil.java
Normal file
58
sdk/android/api/org/webrtc/GlUtil.java
Normal file
@ -0,0 +1,58 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.ByteOrder;
|
||||
import java.nio.FloatBuffer;
|
||||
|
||||
/**
|
||||
* Some OpenGL static utility functions.
|
||||
*/
|
||||
public class GlUtil {
|
||||
private GlUtil() {}
|
||||
|
||||
// Assert that no OpenGL ES 2.0 error has been raised.
|
||||
public static void checkNoGLES2Error(String msg) {
|
||||
int error = GLES20.glGetError();
|
||||
if (error != GLES20.GL_NO_ERROR) {
|
||||
throw new RuntimeException(msg + ": GLES20 error: " + error);
|
||||
}
|
||||
}
|
||||
|
||||
public static FloatBuffer createFloatBuffer(float[] coords) {
|
||||
// Allocate a direct ByteBuffer, using 4 bytes per float, and copy coords into it.
|
||||
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length * 4);
|
||||
bb.order(ByteOrder.nativeOrder());
|
||||
FloatBuffer fb = bb.asFloatBuffer();
|
||||
fb.put(coords);
|
||||
fb.position(0);
|
||||
return fb;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate texture with standard parameters.
|
||||
*/
|
||||
public static int generateTexture(int target) {
|
||||
final int textureArray[] = new int[1];
|
||||
GLES20.glGenTextures(1, textureArray, 0);
|
||||
final int textureId = textureArray[0];
|
||||
GLES20.glBindTexture(target, textureId);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameterf(target, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
||||
checkNoGLES2Error("generateTexture");
|
||||
return textureId;
|
||||
}
|
||||
}
|
||||
117
sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
Normal file
117
sdk/android/api/org/webrtc/HardwareVideoDecoderFactory.java
Normal file
@ -0,0 +1,117 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
|
||||
import static org.webrtc.MediaCodecUtils.INTEL_PREFIX;
|
||||
import static org.webrtc.MediaCodecUtils.NVIDIA_PREFIX;
|
||||
import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||
import android.media.MediaCodecList;
|
||||
import android.os.Build;
|
||||
|
||||
/** Factory for Android hardware VideoDecoders. */
|
||||
@SuppressWarnings("deprecation") // API level 16 requires use of deprecated methods.
|
||||
public class HardwareVideoDecoderFactory implements VideoDecoderFactory {
|
||||
private static final String TAG = "HardwareVideoDecoderFactory";
|
||||
|
||||
private final EglBase.Context sharedContext;
|
||||
|
||||
/** Creates a HardwareVideoDecoderFactory that does not use surface textures. */
|
||||
@Deprecated // Not removed yet to avoid breaking callers.
|
||||
public HardwareVideoDecoderFactory() {
|
||||
this(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a HardwareVideoDecoderFactory that supports surface texture rendering using the given
|
||||
* shared context. The context may be null. If it is null, then surface support is disabled.
|
||||
*/
|
||||
public HardwareVideoDecoderFactory(EglBase.Context sharedContext) {
|
||||
this.sharedContext = sharedContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoDecoder createDecoder(String codecType) {
|
||||
VideoCodecType type = VideoCodecType.valueOf(codecType);
|
||||
MediaCodecInfo info = findCodecForType(type);
|
||||
|
||||
if (info == null) {
|
||||
return null; // No support for this codec type.
|
||||
}
|
||||
|
||||
CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType());
|
||||
return new HardwareVideoDecoder(info.getName(), type,
|
||||
MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities),
|
||||
sharedContext);
|
||||
}
|
||||
|
||||
private MediaCodecInfo findCodecForType(VideoCodecType type) {
|
||||
// HW decoding is not supported on builds before KITKAT.
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
|
||||
MediaCodecInfo info = null;
|
||||
try {
|
||||
info = MediaCodecList.getCodecInfoAt(i);
|
||||
} catch (IllegalArgumentException e) {
|
||||
Logging.e(TAG, "Cannot retrieve encoder codec info", e);
|
||||
}
|
||||
|
||||
if (info == null || info.isEncoder()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isSupportedCodec(info, type)) {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
return null; // No support for this type.
|
||||
}
|
||||
|
||||
// Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
|
||||
private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) {
|
||||
if (!MediaCodecUtils.codecSupportsType(info, type)) {
|
||||
return false;
|
||||
}
|
||||
// Check for a supported color format.
|
||||
if (MediaCodecUtils.selectColorFormat(
|
||||
MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
|
||||
== null) {
|
||||
return false;
|
||||
}
|
||||
return isHardwareSupported(info, type);
|
||||
}
|
||||
|
||||
private boolean isHardwareSupported(MediaCodecInfo info, VideoCodecType type) {
|
||||
String name = info.getName();
|
||||
switch (type) {
|
||||
case VP8:
|
||||
// QCOM, Intel, Exynos, and Nvidia all supported for VP8.
|
||||
return name.startsWith(QCOM_PREFIX) || name.startsWith(INTEL_PREFIX)
|
||||
|| name.startsWith(EXYNOS_PREFIX) || name.startsWith(NVIDIA_PREFIX);
|
||||
case VP9:
|
||||
// QCOM and Exynos supported for VP9.
|
||||
return name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX);
|
||||
case H264:
|
||||
// QCOM, Intel, and Exynos supported for H264.
|
||||
return name.startsWith(QCOM_PREFIX) || name.startsWith(INTEL_PREFIX)
|
||||
|| name.startsWith(EXYNOS_PREFIX);
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
247
sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
Normal file
247
sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java
Normal file
@ -0,0 +1,247 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX;
|
||||
import static org.webrtc.MediaCodecUtils.INTEL_PREFIX;
|
||||
import static org.webrtc.MediaCodecUtils.QCOM_PREFIX;
|
||||
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
import android.os.Build;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/** Factory for android hardware video encoders. */
|
||||
@SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods.
|
||||
public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||
private static final String TAG = "HardwareVideoEncoderFactory";
|
||||
|
||||
// Forced key frame interval - used to reduce color distortions on Qualcomm platforms.
|
||||
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000;
|
||||
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
|
||||
private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
|
||||
|
||||
// List of devices with poor H.264 encoder quality.
|
||||
// HW H.264 encoder on below devices has poor bitrate control - actual
|
||||
// bitrates deviates a lot from the target value.
|
||||
private static final List<String> H264_HW_EXCEPTION_MODELS =
|
||||
Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4");
|
||||
|
||||
private final EglBase14.Context sharedContext;
|
||||
private final boolean enableIntelVp8Encoder;
|
||||
private final boolean enableH264HighProfile;
|
||||
|
||||
public HardwareVideoEncoderFactory(
|
||||
EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
|
||||
// Texture mode requires EglBase14.
|
||||
if (sharedContext instanceof EglBase14.Context) {
|
||||
this.sharedContext = (EglBase14.Context) sharedContext;
|
||||
} else {
|
||||
Logging.w(TAG, "No shared EglBase.Context. Encoders will not use texture mode.");
|
||||
this.sharedContext = null;
|
||||
}
|
||||
this.enableIntelVp8Encoder = enableIntelVp8Encoder;
|
||||
this.enableH264HighProfile = enableH264HighProfile;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public HardwareVideoEncoderFactory(boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
|
||||
this(null, enableIntelVp8Encoder, enableH264HighProfile);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoEncoder createEncoder(VideoCodecInfo input) {
|
||||
VideoCodecType type = VideoCodecType.valueOf(input.name);
|
||||
MediaCodecInfo info = findCodecForType(type);
|
||||
|
||||
if (info == null) {
|
||||
return null; // No support for this type.
|
||||
}
|
||||
|
||||
String codecName = info.getName();
|
||||
String mime = type.mimeType();
|
||||
int colorFormat = MediaCodecUtils.selectColorFormat(sharedContext == null
|
||||
? MediaCodecUtils.ENCODER_COLOR_FORMATS
|
||||
: MediaCodecUtils.TEXTURE_COLOR_FORMATS,
|
||||
info.getCapabilitiesForType(mime));
|
||||
|
||||
return new HardwareVideoEncoder(codecName, type, colorFormat, input.params,
|
||||
getKeyFrameIntervalSec(type), getForcedKeyFrameIntervalMs(type, codecName),
|
||||
createBitrateAdjuster(type, codecName), sharedContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoCodecInfo[] getSupportedCodecs() {
|
||||
List<VideoCodecInfo> supportedCodecInfos = new ArrayList<VideoCodecInfo>();
|
||||
// Generate a list of supported codecs in order of preference:
|
||||
// VP8, VP9, H264 (high profile), and H264 (baseline profile).
|
||||
for (VideoCodecType type :
|
||||
new VideoCodecType[] {VideoCodecType.VP8, VideoCodecType.VP9, VideoCodecType.H264}) {
|
||||
MediaCodecInfo codec = findCodecForType(type);
|
||||
if (codec != null) {
|
||||
String name = type.name();
|
||||
if (type == VideoCodecType.H264 && isH264HighProfileSupported(codec)) {
|
||||
supportedCodecInfos.add(new VideoCodecInfo(0, name, getCodecProperties(type, true)));
|
||||
}
|
||||
|
||||
supportedCodecInfos.add(new VideoCodecInfo(0, name, getCodecProperties(type, false)));
|
||||
}
|
||||
}
|
||||
return supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]);
|
||||
}
|
||||
|
||||
private MediaCodecInfo findCodecForType(VideoCodecType type) {
|
||||
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
|
||||
MediaCodecInfo info = null;
|
||||
try {
|
||||
info = MediaCodecList.getCodecInfoAt(i);
|
||||
} catch (IllegalArgumentException e) {
|
||||
Logging.e(TAG, "Cannot retrieve encoder codec info", e);
|
||||
}
|
||||
|
||||
if (info == null || !info.isEncoder()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isSupportedCodec(info, type)) {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
return null; // No support for this type.
|
||||
}
|
||||
|
||||
// Returns true if the given MediaCodecInfo indicates a supported encoder for the given type.
|
||||
private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecType type) {
|
||||
if (!MediaCodecUtils.codecSupportsType(info, type)) {
|
||||
return false;
|
||||
}
|
||||
// Check for a supported color format.
|
||||
if (MediaCodecUtils.selectColorFormat(sharedContext == null
|
||||
? MediaCodecUtils.ENCODER_COLOR_FORMATS
|
||||
: MediaCodecUtils.TEXTURE_COLOR_FORMATS,
|
||||
info.getCapabilitiesForType(type.mimeType()))
|
||||
== null) {
|
||||
return false;
|
||||
}
|
||||
return isHardwareSupportedInCurrentSdk(info, type);
|
||||
}
|
||||
|
||||
// Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
|
||||
// current SDK.
|
||||
private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecType type) {
|
||||
switch (type) {
|
||||
case VP8:
|
||||
return isHardwareSupportedInCurrentSdkVp8(info);
|
||||
case VP9:
|
||||
return isHardwareSupportedInCurrentSdkVp9(info);
|
||||
case H264:
|
||||
return isHardwareSupportedInCurrentSdkH264(info);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
|
||||
String name = info.getName();
|
||||
// QCOM Vp8 encoder is supported in KITKAT or later.
|
||||
return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
|
||||
// Exynos VP8 encoder is supported in M or later.
|
||||
|| (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M)
|
||||
// Intel Vp8 encoder is supported in LOLLIPOP or later, with the intel encoder enabled.
|
||||
|| (name.startsWith(INTEL_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
|
||||
&& enableIntelVp8Encoder);
|
||||
}
|
||||
|
||||
private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
|
||||
String name = info.getName();
|
||||
return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX))
|
||||
// Both QCOM and Exynos VP9 encoders are supported in N or later.
|
||||
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.N;
|
||||
}
|
||||
|
||||
private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
|
||||
// First, H264 hardware might perform poorly on this model.
|
||||
if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) {
|
||||
return false;
|
||||
}
|
||||
String name = info.getName();
|
||||
// QCOM H264 encoder is supported in KITKAT or later.
|
||||
return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)
|
||||
// Exynos H264 encoder is supported in LOLLIPOP or later.
|
||||
|| (name.startsWith(EXYNOS_PREFIX)
|
||||
&& Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP);
|
||||
}
|
||||
|
||||
private int getKeyFrameIntervalSec(VideoCodecType type) {
|
||||
switch (type) {
|
||||
case VP8: // Fallthrough intended.
|
||||
case VP9:
|
||||
return 100;
|
||||
case H264:
|
||||
return 20;
|
||||
}
|
||||
throw new IllegalArgumentException("Unsupported VideoCodecType " + type);
|
||||
}
|
||||
|
||||
private int getForcedKeyFrameIntervalMs(VideoCodecType type, String codecName) {
|
||||
if (type == VideoCodecType.VP8 && codecName.startsWith(QCOM_PREFIX)) {
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
|
||||
|| Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) {
|
||||
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
|
||||
} else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
|
||||
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
|
||||
} else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
|
||||
return QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
|
||||
}
|
||||
}
|
||||
// Other codecs don't need key frame forcing.
|
||||
return 0;
|
||||
}
|
||||
|
||||
private BitrateAdjuster createBitrateAdjuster(VideoCodecType type, String codecName) {
|
||||
if (codecName.startsWith(EXYNOS_PREFIX)) {
|
||||
if (type == VideoCodecType.VP8) {
|
||||
// Exynos VP8 encoders need dynamic bitrate adjustment.
|
||||
return new DynamicBitrateAdjuster();
|
||||
} else {
|
||||
// Exynos VP9 and H264 encoders need framerate-based bitrate adjustment.
|
||||
return new FramerateBitrateAdjuster();
|
||||
}
|
||||
}
|
||||
// Other codecs don't need bitrate adjustment.
|
||||
return new BaseBitrateAdjuster();
|
||||
}
|
||||
|
||||
private boolean isH264HighProfileSupported(MediaCodecInfo info) {
|
||||
return enableH264HighProfile && info.getName().startsWith(QCOM_PREFIX);
|
||||
}
|
||||
|
||||
private Map<String, String> getCodecProperties(VideoCodecType type, boolean highProfile) {
|
||||
switch (type) {
|
||||
case VP8:
|
||||
case VP9:
|
||||
return new HashMap<String, String>();
|
||||
case H264:
|
||||
Map<String, String> properties = new HashMap<>();
|
||||
properties.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1");
|
||||
properties.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1");
|
||||
properties.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID,
|
||||
highProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1
|
||||
: VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1);
|
||||
return properties;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported codec: " + type);
|
||||
}
|
||||
}
|
||||
}
|
||||
41
sdk/android/api/org/webrtc/IceCandidate.java
Normal file
41
sdk/android/api/org/webrtc/IceCandidate.java
Normal file
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Representation of a single ICE Candidate, mirroring
|
||||
* {@code IceCandidateInterface} in the C++ API.
|
||||
*/
|
||||
public class IceCandidate {
|
||||
public final String sdpMid;
|
||||
public final int sdpMLineIndex;
|
||||
public final String sdp;
|
||||
public final String serverUrl;
|
||||
|
||||
public IceCandidate(String sdpMid, int sdpMLineIndex, String sdp) {
|
||||
this.sdpMid = sdpMid;
|
||||
this.sdpMLineIndex = sdpMLineIndex;
|
||||
this.sdp = sdp;
|
||||
this.serverUrl = "";
|
||||
}
|
||||
|
||||
// Only be called internally from JNI.
|
||||
private IceCandidate(String sdpMid, int sdpMLineIndex, String sdp, String serverUrl) {
|
||||
this.sdpMid = sdpMid;
|
||||
this.sdpMLineIndex = sdpMLineIndex;
|
||||
this.sdp = sdp;
|
||||
this.serverUrl = serverUrl;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return sdpMid + ":" + sdpMLineIndex + ":" + sdp + ":" + serverUrl;
|
||||
}
|
||||
}
|
||||
751
sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
Normal file
751
sdk/android/api/org/webrtc/MediaCodecVideoDecoder.java
Normal file
@ -0,0 +1,751 @@
|
||||
/*
|
||||
* Copyright 2014 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaFormat;
|
||||
import android.os.Build;
|
||||
import android.os.SystemClock;
|
||||
import android.view.Surface;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Queue;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
|
||||
// This class is an implementation detail of the Java PeerConnection API.
|
||||
@SuppressWarnings("deprecation")
|
||||
public class MediaCodecVideoDecoder {
|
||||
// This class is constructed, operated, and destroyed by its C++ incarnation,
|
||||
// so the class and its methods have non-public visibility. The API this
|
||||
// class exposes aims to mimic the webrtc::VideoDecoder API as closely as
|
||||
// possibly to minimize the amount of translation work necessary.
|
||||
|
||||
private static final String TAG = "MediaCodecVideoDecoder";
|
||||
private static final long MAX_DECODE_TIME_MS = 200;
|
||||
|
||||
// TODO(magjed): Use MediaFormat constants when part of the public API.
|
||||
private static final String FORMAT_KEY_STRIDE = "stride";
|
||||
private static final String FORMAT_KEY_SLICE_HEIGHT = "slice-height";
|
||||
private static final String FORMAT_KEY_CROP_LEFT = "crop-left";
|
||||
private static final String FORMAT_KEY_CROP_RIGHT = "crop-right";
|
||||
private static final String FORMAT_KEY_CROP_TOP = "crop-top";
|
||||
private static final String FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
|
||||
|
||||
// Tracks webrtc::VideoCodecType.
|
||||
public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
|
||||
|
||||
// Timeout for input buffer dequeue.
|
||||
private static final int DEQUEUE_INPUT_TIMEOUT = 500000;
|
||||
// Timeout for codec releasing.
|
||||
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
|
||||
// Max number of output buffers queued before starting to drop decoded frames.
|
||||
private static final int MAX_QUEUED_OUTPUTBUFFERS = 3;
|
||||
// Active running decoder instance. Set in initDecode() (called from native code)
|
||||
// and reset to null in release() call.
|
||||
private static MediaCodecVideoDecoder runningInstance = null;
|
||||
private static MediaCodecVideoDecoderErrorCallback errorCallback = null;
|
||||
private static int codecErrors = 0;
|
||||
// List of disabled codec types - can be set from application.
|
||||
private static Set<String> hwDecoderDisabledTypes = new HashSet<String>();
|
||||
|
||||
private Thread mediaCodecThread;
|
||||
private MediaCodec mediaCodec;
|
||||
private ByteBuffer[] inputBuffers;
|
||||
private ByteBuffer[] outputBuffers;
|
||||
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
|
||||
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
|
||||
private static final String H264_MIME_TYPE = "video/avc";
|
||||
// List of supported HW VP8 decoders.
|
||||
private static final String[] supportedVp8HwCodecPrefixes = {
|
||||
"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos.", "OMX.Intel."};
|
||||
// List of supported HW VP9 decoders.
|
||||
private static final String[] supportedVp9HwCodecPrefixes = {"OMX.qcom.", "OMX.Exynos."};
|
||||
// List of supported HW H.264 decoders.
|
||||
private static final String[] supportedH264HwCodecPrefixes = {
|
||||
"OMX.qcom.", "OMX.Intel.", "OMX.Exynos."};
|
||||
// List of supported HW H.264 high profile decoders.
|
||||
private static final String supportedQcomH264HighProfileHwCodecPrefix = "OMX.qcom.";
|
||||
private static final String supportedExynosH264HighProfileHwCodecPrefix = "OMX.Exynos.";
|
||||
|
||||
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
|
||||
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
|
||||
private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
|
||||
private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
|
||||
private static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
|
||||
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
|
||||
// Allowable color formats supported by codec - in order of preference.
|
||||
private static final List<Integer> supportedColorList = Arrays.asList(
|
||||
CodecCapabilities.COLOR_FormatYUV420Planar, CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
|
||||
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
|
||||
COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
|
||||
COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m);
|
||||
|
||||
private int colorFormat;
|
||||
private int width;
|
||||
private int height;
|
||||
private int stride;
|
||||
private int sliceHeight;
|
||||
private boolean hasDecodedFirstFrame;
|
||||
private final Queue<TimeStamps> decodeStartTimeMs = new LinkedList<TimeStamps>();
|
||||
private boolean useSurface;
|
||||
|
||||
// The below variables are only used when decoding to a Surface.
|
||||
private TextureListener textureListener;
|
||||
private int droppedFrames;
|
||||
private Surface surface = null;
|
||||
private final Queue<DecodedOutputBuffer> dequeuedSurfaceOutputBuffers =
|
||||
new LinkedList<DecodedOutputBuffer>();
|
||||
|
||||
// MediaCodec error handler - invoked when critical error happens which may prevent
|
||||
// further use of media codec API. Now it means that one of media codec instances
|
||||
// is hanging and can no longer be used in the next call.
|
||||
public static interface MediaCodecVideoDecoderErrorCallback {
|
||||
void onMediaCodecVideoDecoderCriticalError(int codecErrors);
|
||||
}
|
||||
|
||||
public static void setErrorCallback(MediaCodecVideoDecoderErrorCallback errorCallback) {
|
||||
Logging.d(TAG, "Set error callback");
|
||||
MediaCodecVideoDecoder.errorCallback = errorCallback;
|
||||
}
|
||||
|
||||
// Functions to disable HW decoding - can be called from applications for platforms
|
||||
// which have known HW decoding problems.
|
||||
public static void disableVp8HwCodec() {
|
||||
Logging.w(TAG, "VP8 decoding is disabled by application.");
|
||||
hwDecoderDisabledTypes.add(VP8_MIME_TYPE);
|
||||
}
|
||||
|
||||
public static void disableVp9HwCodec() {
|
||||
Logging.w(TAG, "VP9 decoding is disabled by application.");
|
||||
hwDecoderDisabledTypes.add(VP9_MIME_TYPE);
|
||||
}
|
||||
|
||||
public static void disableH264HwCodec() {
|
||||
Logging.w(TAG, "H.264 decoding is disabled by application.");
|
||||
hwDecoderDisabledTypes.add(H264_MIME_TYPE);
|
||||
}
|
||||
|
||||
// Functions to query if HW decoding is supported.
|
||||
public static boolean isVp8HwSupported() {
|
||||
return !hwDecoderDisabledTypes.contains(VP8_MIME_TYPE)
|
||||
&& (findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null);
|
||||
}
|
||||
|
||||
public static boolean isVp9HwSupported() {
|
||||
return !hwDecoderDisabledTypes.contains(VP9_MIME_TYPE)
|
||||
&& (findDecoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null);
|
||||
}
|
||||
|
||||
public static boolean isH264HwSupported() {
|
||||
return !hwDecoderDisabledTypes.contains(H264_MIME_TYPE)
|
||||
&& (findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null);
|
||||
}
|
||||
|
||||
public static boolean isH264HighProfileHwSupported() {
|
||||
if (hwDecoderDisabledTypes.contains(H264_MIME_TYPE)) {
|
||||
return false;
|
||||
}
|
||||
// Support H.264 HP decoding on QCOM chips for Android L and above.
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
|
||||
&& findDecoder(H264_MIME_TYPE, new String[] {supportedQcomH264HighProfileHwCodecPrefix})
|
||||
!= null) {
|
||||
return true;
|
||||
}
|
||||
// Support H.264 HP decoding on Exynos chips for Android M and above.
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M
|
||||
&& findDecoder(H264_MIME_TYPE, new String[] {supportedExynosH264HighProfileHwCodecPrefix})
|
||||
!= null) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public static void printStackTrace() {
|
||||
if (runningInstance != null && runningInstance.mediaCodecThread != null) {
|
||||
StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
|
||||
if (mediaCodecStackTraces.length > 0) {
|
||||
Logging.d(TAG, "MediaCodecVideoDecoder stacks trace:");
|
||||
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
|
||||
Logging.d(TAG, stackTrace.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Helper struct for findDecoder() below.
|
||||
private static class DecoderProperties {
|
||||
public DecoderProperties(String codecName, int colorFormat) {
|
||||
this.codecName = codecName;
|
||||
this.colorFormat = colorFormat;
|
||||
}
|
||||
public final String codecName; // OpenMax component name for VP8 codec.
|
||||
public final int colorFormat; // Color format supported by codec.
|
||||
}
|
||||
|
||||
private static DecoderProperties findDecoder(String mime, String[] supportedCodecPrefixes) {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
return null; // MediaCodec.setParameters is missing.
|
||||
}
|
||||
Logging.d(TAG, "Trying to find HW decoder for mime " + mime);
|
||||
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
|
||||
MediaCodecInfo info = null;
|
||||
try {
|
||||
info = MediaCodecList.getCodecInfoAt(i);
|
||||
} catch (IllegalArgumentException e) {
|
||||
Logging.e(TAG, "Cannot retrieve decoder codec info", e);
|
||||
}
|
||||
if (info == null || info.isEncoder()) {
|
||||
continue;
|
||||
}
|
||||
String name = null;
|
||||
for (String mimeType : info.getSupportedTypes()) {
|
||||
if (mimeType.equals(mime)) {
|
||||
name = info.getName();
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (name == null) {
|
||||
continue; // No HW support in this codec; try the next one.
|
||||
}
|
||||
Logging.d(TAG, "Found candidate decoder " + name);
|
||||
|
||||
// Check if this is supported decoder.
|
||||
boolean supportedCodec = false;
|
||||
for (String codecPrefix : supportedCodecPrefixes) {
|
||||
if (name.startsWith(codecPrefix)) {
|
||||
supportedCodec = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!supportedCodec) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if codec supports either yuv420 or nv12.
|
||||
CodecCapabilities capabilities;
|
||||
try {
|
||||
capabilities = info.getCapabilitiesForType(mime);
|
||||
} catch (IllegalArgumentException e) {
|
||||
Logging.e(TAG, "Cannot retrieve decoder capabilities", e);
|
||||
continue;
|
||||
}
|
||||
for (int colorFormat : capabilities.colorFormats) {
|
||||
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
|
||||
}
|
||||
for (int supportedColorFormat : supportedColorList) {
|
||||
for (int codecColorFormat : capabilities.colorFormats) {
|
||||
if (codecColorFormat == supportedColorFormat) {
|
||||
// Found supported HW decoder.
|
||||
Logging.d(TAG, "Found target decoder " + name + ". Color: 0x"
|
||||
+ Integer.toHexString(codecColorFormat));
|
||||
return new DecoderProperties(name, codecColorFormat);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "No HW decoder found for mime " + mime);
|
||||
return null; // No HW decoder.
|
||||
}
|
||||
|
||||
private void checkOnMediaCodecThread() throws IllegalStateException {
|
||||
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
|
||||
throw new IllegalStateException("MediaCodecVideoDecoder previously operated on "
|
||||
+ mediaCodecThread + " but is now called on " + Thread.currentThread());
|
||||
}
|
||||
}
|
||||
|
||||
// Pass null in |surfaceTextureHelper| to configure the codec for ByteBuffer output.
|
||||
private boolean initDecode(
|
||||
VideoCodecType type, int width, int height, SurfaceTextureHelper surfaceTextureHelper) {
|
||||
if (mediaCodecThread != null) {
|
||||
throw new RuntimeException("initDecode: Forgot to release()?");
|
||||
}
|
||||
|
||||
String mime = null;
|
||||
useSurface = (surfaceTextureHelper != null);
|
||||
String[] supportedCodecPrefixes = null;
|
||||
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
|
||||
mime = VP8_MIME_TYPE;
|
||||
supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
|
||||
} else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
|
||||
mime = VP9_MIME_TYPE;
|
||||
supportedCodecPrefixes = supportedVp9HwCodecPrefixes;
|
||||
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
|
||||
mime = H264_MIME_TYPE;
|
||||
supportedCodecPrefixes = supportedH264HwCodecPrefixes;
|
||||
} else {
|
||||
throw new RuntimeException("initDecode: Non-supported codec " + type);
|
||||
}
|
||||
DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
|
||||
if (properties == null) {
|
||||
throw new RuntimeException("Cannot find HW decoder for " + type);
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x"
|
||||
+ Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface);
|
||||
|
||||
runningInstance = this; // Decoder is now running and can be queried for stack traces.
|
||||
mediaCodecThread = Thread.currentThread();
|
||||
try {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
stride = width;
|
||||
sliceHeight = height;
|
||||
|
||||
if (useSurface) {
|
||||
textureListener = new TextureListener(surfaceTextureHelper);
|
||||
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
|
||||
}
|
||||
|
||||
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
|
||||
if (!useSurface) {
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
|
||||
}
|
||||
Logging.d(TAG, " Format: " + format);
|
||||
mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName);
|
||||
if (mediaCodec == null) {
|
||||
Logging.e(TAG, "Can not create media decoder");
|
||||
return false;
|
||||
}
|
||||
mediaCodec.configure(format, surface, null, 0);
|
||||
mediaCodec.start();
|
||||
|
||||
colorFormat = properties.colorFormat;
|
||||
outputBuffers = mediaCodec.getOutputBuffers();
|
||||
inputBuffers = mediaCodec.getInputBuffers();
|
||||
decodeStartTimeMs.clear();
|
||||
hasDecodedFirstFrame = false;
|
||||
dequeuedSurfaceOutputBuffers.clear();
|
||||
droppedFrames = 0;
|
||||
Logging.d(TAG,
|
||||
"Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length);
|
||||
return true;
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "initDecode failed", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Resets the decoder so it can start decoding frames with new resolution.
|
||||
// Flushes MediaCodec and clears decoder output buffers.
|
||||
private void reset(int width, int height) {
|
||||
if (mediaCodecThread == null || mediaCodec == null) {
|
||||
throw new RuntimeException("Incorrect reset call for non-initialized decoder.");
|
||||
}
|
||||
Logging.d(TAG, "Java reset: " + width + " x " + height);
|
||||
|
||||
mediaCodec.flush();
|
||||
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
decodeStartTimeMs.clear();
|
||||
dequeuedSurfaceOutputBuffers.clear();
|
||||
hasDecodedFirstFrame = false;
|
||||
droppedFrames = 0;
|
||||
}
|
||||
|
||||
private void release() {
|
||||
Logging.d(TAG, "Java releaseDecoder. Total number of dropped frames: " + droppedFrames);
|
||||
checkOnMediaCodecThread();
|
||||
|
||||
// Run Mediacodec stop() and release() on separate thread since sometime
|
||||
// Mediacodec.stop() may hang.
|
||||
final CountDownLatch releaseDone = new CountDownLatch(1);
|
||||
|
||||
Runnable runMediaCodecRelease = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
Logging.d(TAG, "Java releaseDecoder on release thread");
|
||||
mediaCodec.stop();
|
||||
mediaCodec.release();
|
||||
Logging.d(TAG, "Java releaseDecoder on release thread done");
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "Media decoder release failed", e);
|
||||
}
|
||||
releaseDone.countDown();
|
||||
}
|
||||
};
|
||||
new Thread(runMediaCodecRelease).start();
|
||||
|
||||
if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
|
||||
Logging.e(TAG, "Media decoder release timeout");
|
||||
codecErrors++;
|
||||
if (errorCallback != null) {
|
||||
Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
|
||||
errorCallback.onMediaCodecVideoDecoderCriticalError(codecErrors);
|
||||
}
|
||||
}
|
||||
|
||||
mediaCodec = null;
|
||||
mediaCodecThread = null;
|
||||
runningInstance = null;
|
||||
if (useSurface) {
|
||||
surface.release();
|
||||
surface = null;
|
||||
textureListener.release();
|
||||
}
|
||||
Logging.d(TAG, "Java releaseDecoder done");
|
||||
}
|
||||
|
||||
// Dequeue an input buffer and return its index, -1 if no input buffer is
|
||||
// available, or -2 if the codec is no longer operative.
|
||||
private int dequeueInputBuffer() {
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "dequeueIntputBuffer failed", e);
|
||||
return -2;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean queueInputBuffer(int inputBufferIndex, int size, long presentationTimeStamUs,
|
||||
long timeStampMs, long ntpTimeStamp) {
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
inputBuffers[inputBufferIndex].position(0);
|
||||
inputBuffers[inputBufferIndex].limit(size);
|
||||
decodeStartTimeMs.add(
|
||||
new TimeStamps(SystemClock.elapsedRealtime(), timeStampMs, ntpTimeStamp));
|
||||
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, presentationTimeStamUs, 0);
|
||||
return true;
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "decode failed", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static class TimeStamps {
|
||||
public TimeStamps(long decodeStartTimeMs, long timeStampMs, long ntpTimeStampMs) {
|
||||
this.decodeStartTimeMs = decodeStartTimeMs;
|
||||
this.timeStampMs = timeStampMs;
|
||||
this.ntpTimeStampMs = ntpTimeStampMs;
|
||||
}
|
||||
// Time when this frame was queued for decoding.
|
||||
private final long decodeStartTimeMs;
|
||||
// Only used for bookkeeping in Java. Stores C++ inputImage._timeStamp value for input frame.
|
||||
private final long timeStampMs;
|
||||
// Only used for bookkeeping in Java. Stores C++ inputImage.ntp_time_ms_ value for input frame.
|
||||
private final long ntpTimeStampMs;
|
||||
}
|
||||
|
||||
// Helper struct for dequeueOutputBuffer() below.
|
||||
private static class DecodedOutputBuffer {
|
||||
public DecodedOutputBuffer(int index, int offset, int size, long presentationTimeStampMs,
|
||||
long timeStampMs, long ntpTimeStampMs, long decodeTime, long endDecodeTime) {
|
||||
this.index = index;
|
||||
this.offset = offset;
|
||||
this.size = size;
|
||||
this.presentationTimeStampMs = presentationTimeStampMs;
|
||||
this.timeStampMs = timeStampMs;
|
||||
this.ntpTimeStampMs = ntpTimeStampMs;
|
||||
this.decodeTimeMs = decodeTime;
|
||||
this.endDecodeTimeMs = endDecodeTime;
|
||||
}
|
||||
|
||||
private final int index;
|
||||
private final int offset;
|
||||
private final int size;
|
||||
// Presentation timestamp returned in dequeueOutputBuffer call.
|
||||
private final long presentationTimeStampMs;
|
||||
// C++ inputImage._timeStamp value for output frame.
|
||||
private final long timeStampMs;
|
||||
// C++ inputImage.ntp_time_ms_ value for output frame.
|
||||
private final long ntpTimeStampMs;
|
||||
// Number of ms it took to decode this frame.
|
||||
private final long decodeTimeMs;
|
||||
// System time when this frame decoding finished.
|
||||
private final long endDecodeTimeMs;
|
||||
}
|
||||
|
||||
// Helper struct for dequeueTextureBuffer() below.
|
||||
private static class DecodedTextureBuffer {
|
||||
private final int textureID;
|
||||
private final float[] transformMatrix;
|
||||
// Presentation timestamp returned in dequeueOutputBuffer call.
|
||||
private final long presentationTimeStampMs;
|
||||
// C++ inputImage._timeStamp value for output frame.
|
||||
private final long timeStampMs;
|
||||
// C++ inputImage.ntp_time_ms_ value for output frame.
|
||||
private final long ntpTimeStampMs;
|
||||
// Number of ms it took to decode this frame.
|
||||
private final long decodeTimeMs;
|
||||
// Interval from when the frame finished decoding until this buffer has been created.
|
||||
// Since there is only one texture, this interval depend on the time from when
|
||||
// a frame is decoded and provided to C++ and until that frame is returned to the MediaCodec
|
||||
// so that the texture can be updated with the next decoded frame.
|
||||
private final long frameDelayMs;
|
||||
|
||||
// A DecodedTextureBuffer with zero |textureID| has special meaning and represents a frame
|
||||
// that was dropped.
|
||||
public DecodedTextureBuffer(int textureID, float[] transformMatrix,
|
||||
long presentationTimeStampMs, long timeStampMs, long ntpTimeStampMs, long decodeTimeMs,
|
||||
long frameDelay) {
|
||||
this.textureID = textureID;
|
||||
this.transformMatrix = transformMatrix;
|
||||
this.presentationTimeStampMs = presentationTimeStampMs;
|
||||
this.timeStampMs = timeStampMs;
|
||||
this.ntpTimeStampMs = ntpTimeStampMs;
|
||||
this.decodeTimeMs = decodeTimeMs;
|
||||
this.frameDelayMs = frameDelay;
|
||||
}
|
||||
}
|
||||
|
||||
// Poll based texture listener.
|
||||
private static class TextureListener
|
||||
implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
// |newFrameLock| is used to synchronize arrival of new frames with wait()/notifyAll().
|
||||
private final Object newFrameLock = new Object();
|
||||
// |bufferToRender| is non-null when waiting for transition between addBufferToRender() to
|
||||
// onTextureFrameAvailable().
|
||||
private DecodedOutputBuffer bufferToRender;
|
||||
private DecodedTextureBuffer renderedBuffer;
|
||||
|
||||
public TextureListener(SurfaceTextureHelper surfaceTextureHelper) {
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
surfaceTextureHelper.startListening(this);
|
||||
}
|
||||
|
||||
public void addBufferToRender(DecodedOutputBuffer buffer) {
|
||||
if (bufferToRender != null) {
|
||||
Logging.e(TAG, "Unexpected addBufferToRender() called while waiting for a texture.");
|
||||
throw new IllegalStateException("Waiting for a texture.");
|
||||
}
|
||||
bufferToRender = buffer;
|
||||
}
|
||||
|
||||
public boolean isWaitingForTexture() {
|
||||
synchronized (newFrameLock) {
|
||||
return bufferToRender != null;
|
||||
}
|
||||
}
|
||||
|
||||
// Callback from |surfaceTextureHelper|. May be called on an arbitrary thread.
|
||||
@Override
|
||||
public void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
synchronized (newFrameLock) {
|
||||
if (renderedBuffer != null) {
|
||||
Logging.e(
|
||||
TAG, "Unexpected onTextureFrameAvailable() called while already holding a texture.");
|
||||
throw new IllegalStateException("Already holding a texture.");
|
||||
}
|
||||
// |timestampNs| is always zero on some Android versions.
|
||||
renderedBuffer = new DecodedTextureBuffer(oesTextureId, transformMatrix,
|
||||
bufferToRender.presentationTimeStampMs, bufferToRender.timeStampMs,
|
||||
bufferToRender.ntpTimeStampMs, bufferToRender.decodeTimeMs,
|
||||
SystemClock.elapsedRealtime() - bufferToRender.endDecodeTimeMs);
|
||||
bufferToRender = null;
|
||||
newFrameLock.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
// Dequeues and returns a DecodedTextureBuffer if available, or null otherwise.
|
||||
public DecodedTextureBuffer dequeueTextureBuffer(int timeoutMs) {
|
||||
synchronized (newFrameLock) {
|
||||
if (renderedBuffer == null && timeoutMs > 0 && isWaitingForTexture()) {
|
||||
try {
|
||||
newFrameLock.wait(timeoutMs);
|
||||
} catch (InterruptedException e) {
|
||||
// Restore the interrupted status by reinterrupting the thread.
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
DecodedTextureBuffer returnedBuffer = renderedBuffer;
|
||||
renderedBuffer = null;
|
||||
return returnedBuffer;
|
||||
}
|
||||
}
|
||||
|
||||
public void release() {
|
||||
// SurfaceTextureHelper.stopListening() will block until any onTextureFrameAvailable() in
|
||||
// progress is done. Therefore, the call must be outside any synchronized
|
||||
// statement that is also used in the onTextureFrameAvailable() above to avoid deadlocks.
|
||||
surfaceTextureHelper.stopListening();
|
||||
synchronized (newFrameLock) {
|
||||
if (renderedBuffer != null) {
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
renderedBuffer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Returns null if no decoded buffer is available, and otherwise a DecodedByteBuffer.
|
||||
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
|
||||
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
|
||||
// upon codec error.
|
||||
private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
|
||||
checkOnMediaCodecThread();
|
||||
if (decodeStartTimeMs.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
// Drain the decoder until receiving a decoded buffer or hitting
|
||||
// MediaCodec.INFO_TRY_AGAIN_LATER.
|
||||
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
|
||||
while (true) {
|
||||
final int result =
|
||||
mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
|
||||
switch (result) {
|
||||
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
|
||||
outputBuffers = mediaCodec.getOutputBuffers();
|
||||
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
|
||||
if (hasDecodedFirstFrame) {
|
||||
throw new RuntimeException("Unexpected output buffer change event.");
|
||||
}
|
||||
break;
|
||||
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
|
||||
MediaFormat format = mediaCodec.getOutputFormat();
|
||||
Logging.d(TAG, "Decoder format changed: " + format.toString());
|
||||
final int newWidth;
|
||||
final int newHeight;
|
||||
if (format.containsKey(FORMAT_KEY_CROP_LEFT) && format.containsKey(FORMAT_KEY_CROP_RIGHT)
|
||||
&& format.containsKey(FORMAT_KEY_CROP_BOTTOM)
|
||||
&& format.containsKey(FORMAT_KEY_CROP_TOP)) {
|
||||
newWidth = 1 + format.getInteger(FORMAT_KEY_CROP_RIGHT)
|
||||
- format.getInteger(FORMAT_KEY_CROP_LEFT);
|
||||
newHeight = 1 + format.getInteger(FORMAT_KEY_CROP_BOTTOM)
|
||||
- format.getInteger(FORMAT_KEY_CROP_TOP);
|
||||
} else {
|
||||
newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
|
||||
newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
|
||||
}
|
||||
if (hasDecodedFirstFrame && (newWidth != width || newHeight != height)) {
|
||||
throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
|
||||
+ ". New " + newWidth + "*" + newHeight);
|
||||
}
|
||||
width = newWidth;
|
||||
height = newHeight;
|
||||
|
||||
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
|
||||
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
|
||||
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
|
||||
if (!supportedColorList.contains(colorFormat)) {
|
||||
throw new IllegalStateException("Non supported color format: " + colorFormat);
|
||||
}
|
||||
}
|
||||
if (format.containsKey(FORMAT_KEY_STRIDE)) {
|
||||
stride = format.getInteger(FORMAT_KEY_STRIDE);
|
||||
}
|
||||
if (format.containsKey(FORMAT_KEY_SLICE_HEIGHT)) {
|
||||
sliceHeight = format.getInteger(FORMAT_KEY_SLICE_HEIGHT);
|
||||
}
|
||||
Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
|
||||
stride = Math.max(width, stride);
|
||||
sliceHeight = Math.max(height, sliceHeight);
|
||||
break;
|
||||
case MediaCodec.INFO_TRY_AGAIN_LATER:
|
||||
return null;
|
||||
default:
|
||||
hasDecodedFirstFrame = true;
|
||||
TimeStamps timeStamps = decodeStartTimeMs.remove();
|
||||
long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
|
||||
if (decodeTimeMs > MAX_DECODE_TIME_MS) {
|
||||
Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
|
||||
+ ". Q size: " + decodeStartTimeMs.size()
|
||||
+ ". Might be caused by resuming H264 decoding after a pause.");
|
||||
decodeTimeMs = MAX_DECODE_TIME_MS;
|
||||
}
|
||||
return new DecodedOutputBuffer(result, info.offset, info.size,
|
||||
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
|
||||
timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Returns null if no decoded buffer is available, and otherwise a DecodedTextureBuffer.
|
||||
// Throws IllegalStateException if call is made on the wrong thread, if color format changes to an
|
||||
// unsupported format, or if |mediaCodec| is not in the Executing state. Throws CodecException
|
||||
// upon codec error. If |dequeueTimeoutMs| > 0, the oldest decoded frame will be dropped if
|
||||
// a frame can't be returned.
|
||||
private DecodedTextureBuffer dequeueTextureBuffer(int dequeueTimeoutMs) {
|
||||
checkOnMediaCodecThread();
|
||||
if (!useSurface) {
|
||||
throw new IllegalStateException("dequeueTexture() called for byte buffer decoding.");
|
||||
}
|
||||
DecodedOutputBuffer outputBuffer = dequeueOutputBuffer(dequeueTimeoutMs);
|
||||
if (outputBuffer != null) {
|
||||
dequeuedSurfaceOutputBuffers.add(outputBuffer);
|
||||
}
|
||||
|
||||
MaybeRenderDecodedTextureBuffer();
|
||||
// Check if there is texture ready now by waiting max |dequeueTimeoutMs|.
|
||||
DecodedTextureBuffer renderedBuffer = textureListener.dequeueTextureBuffer(dequeueTimeoutMs);
|
||||
if (renderedBuffer != null) {
|
||||
MaybeRenderDecodedTextureBuffer();
|
||||
return renderedBuffer;
|
||||
}
|
||||
|
||||
if ((dequeuedSurfaceOutputBuffers.size()
|
||||
>= Math.min(MAX_QUEUED_OUTPUTBUFFERS, outputBuffers.length)
|
||||
|| (dequeueTimeoutMs > 0 && !dequeuedSurfaceOutputBuffers.isEmpty()))) {
|
||||
++droppedFrames;
|
||||
// Drop the oldest frame still in dequeuedSurfaceOutputBuffers.
|
||||
// The oldest frame is owned by |textureListener| and can't be dropped since
|
||||
// mediaCodec.releaseOutputBuffer has already been called.
|
||||
final DecodedOutputBuffer droppedFrame = dequeuedSurfaceOutputBuffers.remove();
|
||||
if (dequeueTimeoutMs > 0) {
|
||||
// TODO(perkj): Re-add the below log when VideoRenderGUI has been removed or fixed to
|
||||
// return the one and only texture even if it does not render.
|
||||
Logging.w(TAG, "Draining decoder. Dropping frame with TS: "
|
||||
+ droppedFrame.presentationTimeStampMs + ". Total number of dropped frames: "
|
||||
+ droppedFrames);
|
||||
} else {
|
||||
Logging.w(TAG, "Too many output buffers " + dequeuedSurfaceOutputBuffers.size()
|
||||
+ ". Dropping frame with TS: " + droppedFrame.presentationTimeStampMs
|
||||
+ ". Total number of dropped frames: " + droppedFrames);
|
||||
}
|
||||
|
||||
mediaCodec.releaseOutputBuffer(droppedFrame.index, false /* render */);
|
||||
return new DecodedTextureBuffer(0, null, droppedFrame.presentationTimeStampMs,
|
||||
droppedFrame.timeStampMs, droppedFrame.ntpTimeStampMs, droppedFrame.decodeTimeMs,
|
||||
SystemClock.elapsedRealtime() - droppedFrame.endDecodeTimeMs);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private void MaybeRenderDecodedTextureBuffer() {
|
||||
if (dequeuedSurfaceOutputBuffers.isEmpty() || textureListener.isWaitingForTexture()) {
|
||||
return;
|
||||
}
|
||||
// Get the first frame in the queue and render to the decoder output surface.
|
||||
final DecodedOutputBuffer buffer = dequeuedSurfaceOutputBuffers.remove();
|
||||
textureListener.addBufferToRender(buffer);
|
||||
mediaCodec.releaseOutputBuffer(buffer.index, true /* render */);
|
||||
}
|
||||
|
||||
// Release a dequeued output byte buffer back to the codec for re-use. Should only be called for
|
||||
// non-surface decoding.
|
||||
// Throws IllegalStateException if the call is made on the wrong thread, if codec is configured
|
||||
// for surface decoding, or if |mediaCodec| is not in the Executing state. Throws
|
||||
// MediaCodec.CodecException upon codec error.
|
||||
private void returnDecodedOutputBuffer(int index)
|
||||
throws IllegalStateException, MediaCodec.CodecException {
|
||||
checkOnMediaCodecThread();
|
||||
if (useSurface) {
|
||||
throw new IllegalStateException("returnDecodedOutputBuffer() called for surface decoding.");
|
||||
}
|
||||
mediaCodec.releaseOutputBuffer(index, false /* render */);
|
||||
}
|
||||
}
|
||||
927
sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
Normal file
927
sdk/android/api/org/webrtc/MediaCodecVideoEncoder.java
Normal file
@ -0,0 +1,927 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.Matrix;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaFormat;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.view.Surface;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
// Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
|
||||
// This class is an implementation detail of the Java PeerConnection API.
|
||||
@TargetApi(19)
|
||||
@SuppressWarnings("deprecation")
|
||||
public class MediaCodecVideoEncoder {
|
||||
// This class is constructed, operated, and destroyed by its C++ incarnation,
|
||||
// so the class and its methods have non-public visibility. The API this
|
||||
// class exposes aims to mimic the webrtc::VideoEncoder API as closely as
|
||||
// possibly to minimize the amount of translation work necessary.
|
||||
|
||||
private static final String TAG = "MediaCodecVideoEncoder";
|
||||
|
||||
// Tracks webrtc::VideoCodecType.
|
||||
public enum VideoCodecType { VIDEO_CODEC_VP8, VIDEO_CODEC_VP9, VIDEO_CODEC_H264 }
|
||||
|
||||
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
|
||||
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
|
||||
private static final int BITRATE_ADJUSTMENT_FPS = 30;
|
||||
private static final int MAXIMUM_INITIAL_FPS = 30;
|
||||
private static final double BITRATE_CORRECTION_SEC = 3.0;
|
||||
// Maximum bitrate correction scale - no more than 4 times.
|
||||
private static final double BITRATE_CORRECTION_MAX_SCALE = 4;
|
||||
// Amount of correction steps to reach correction maximum scale.
|
||||
private static final int BITRATE_CORRECTION_STEPS = 20;
|
||||
// Forced key frame interval - used to reduce color distortions on Qualcomm platform.
|
||||
private static final long QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS = 15000;
|
||||
private static final long QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000;
|
||||
private static final long QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000;
|
||||
|
||||
// Active running encoder instance. Set in initEncode() (called from native code)
|
||||
// and reset to null in release() call.
|
||||
private static MediaCodecVideoEncoder runningInstance = null;
|
||||
private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
|
||||
private static int codecErrors = 0;
|
||||
// List of disabled codec types - can be set from application.
|
||||
private static Set<String> hwEncoderDisabledTypes = new HashSet<String>();
|
||||
|
||||
private Thread mediaCodecThread;
|
||||
private MediaCodec mediaCodec;
|
||||
private ByteBuffer[] outputBuffers;
|
||||
private EglBase14 eglBase;
|
||||
private int profile;
|
||||
private int width;
|
||||
private int height;
|
||||
private Surface inputSurface;
|
||||
private GlRectDrawer drawer;
|
||||
|
||||
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
|
||||
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
|
||||
private static final String H264_MIME_TYPE = "video/avc";
|
||||
|
||||
private static final int VIDEO_AVCProfileHigh = 8;
|
||||
private static final int VIDEO_AVCLevel3 = 0x100;
|
||||
|
||||
// Type of bitrate adjustment for video encoder.
|
||||
public enum BitrateAdjustmentType {
|
||||
// No adjustment - video encoder has no known bitrate problem.
|
||||
NO_ADJUSTMENT,
|
||||
// Framerate based bitrate adjustment is required - HW encoder does not use frame
|
||||
// timestamps to calculate frame bitrate budget and instead is relying on initial
|
||||
// fps configuration assuming that all frames are coming at fixed initial frame rate.
|
||||
FRAMERATE_ADJUSTMENT,
|
||||
// Dynamic bitrate adjustment is required - HW encoder used frame timestamps, but actual
|
||||
// bitrate deviates too much from the target value.
|
||||
DYNAMIC_ADJUSTMENT
|
||||
}
|
||||
|
||||
// Should be in sync with webrtc::H264::Profile.
|
||||
public static enum H264Profile {
|
||||
CONSTRAINED_BASELINE(0),
|
||||
BASELINE(1),
|
||||
MAIN(2),
|
||||
CONSTRAINED_HIGH(3),
|
||||
HIGH(4);
|
||||
|
||||
private final int value;
|
||||
|
||||
H264Profile(int value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public int getValue() {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
// Class describing supported media codec properties.
|
||||
private static class MediaCodecProperties {
|
||||
public final String codecPrefix;
|
||||
// Minimum Android SDK required for this codec to be used.
|
||||
public final int minSdk;
|
||||
// Flag if encoder implementation does not use frame timestamps to calculate frame bitrate
|
||||
// budget and instead is relying on initial fps configuration assuming that all frames are
|
||||
// coming at fixed initial frame rate. Bitrate adjustment is required for this case.
|
||||
public final BitrateAdjustmentType bitrateAdjustmentType;
|
||||
|
||||
MediaCodecProperties(
|
||||
String codecPrefix, int minSdk, BitrateAdjustmentType bitrateAdjustmentType) {
|
||||
this.codecPrefix = codecPrefix;
|
||||
this.minSdk = minSdk;
|
||||
this.bitrateAdjustmentType = bitrateAdjustmentType;
|
||||
}
|
||||
}
|
||||
|
||||
// List of supported HW VP8 encoders.
|
||||
private static final MediaCodecProperties qcomVp8HwProperties = new MediaCodecProperties(
|
||||
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
|
||||
private static final MediaCodecProperties exynosVp8HwProperties = new MediaCodecProperties(
|
||||
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.DYNAMIC_ADJUSTMENT);
|
||||
private static final MediaCodecProperties intelVp8HwProperties = new MediaCodecProperties(
|
||||
"OMX.Intel.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.NO_ADJUSTMENT);
|
||||
private static MediaCodecProperties[] vp8HwList() {
|
||||
final ArrayList<MediaCodecProperties> supported_codecs = new ArrayList<MediaCodecProperties>();
|
||||
supported_codecs.add(qcomVp8HwProperties);
|
||||
supported_codecs.add(exynosVp8HwProperties);
|
||||
if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTC-IntelVP8").equals("Enabled")) {
|
||||
supported_codecs.add(intelVp8HwProperties);
|
||||
}
|
||||
return supported_codecs.toArray(new MediaCodecProperties[supported_codecs.size()]);
|
||||
}
|
||||
|
||||
// List of supported HW VP9 encoders.
|
||||
private static final MediaCodecProperties qcomVp9HwProperties = new MediaCodecProperties(
|
||||
"OMX.qcom.", Build.VERSION_CODES.N, BitrateAdjustmentType.NO_ADJUSTMENT);
|
||||
private static final MediaCodecProperties exynosVp9HwProperties = new MediaCodecProperties(
|
||||
"OMX.Exynos.", Build.VERSION_CODES.N, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
|
||||
private static final MediaCodecProperties[] vp9HwList =
|
||||
new MediaCodecProperties[] {qcomVp9HwProperties, exynosVp9HwProperties};
|
||||
|
||||
// List of supported HW H.264 encoders.
|
||||
private static final MediaCodecProperties qcomH264HwProperties = new MediaCodecProperties(
|
||||
"OMX.qcom.", Build.VERSION_CODES.KITKAT, BitrateAdjustmentType.NO_ADJUSTMENT);
|
||||
private static final MediaCodecProperties exynosH264HwProperties = new MediaCodecProperties(
|
||||
"OMX.Exynos.", Build.VERSION_CODES.LOLLIPOP, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
|
||||
private static final MediaCodecProperties[] h264HwList =
|
||||
new MediaCodecProperties[] {qcomH264HwProperties, exynosH264HwProperties};
|
||||
|
||||
// List of supported HW H.264 high profile encoders.
|
||||
private static final MediaCodecProperties exynosH264HighProfileHwProperties =
|
||||
new MediaCodecProperties(
|
||||
"OMX.Exynos.", Build.VERSION_CODES.M, BitrateAdjustmentType.FRAMERATE_ADJUSTMENT);
|
||||
private static final MediaCodecProperties[] h264HighProfileHwList =
|
||||
new MediaCodecProperties[] {exynosH264HighProfileHwProperties};
|
||||
|
||||
// List of devices with poor H.264 encoder quality.
|
||||
// HW H.264 encoder on below devices has poor bitrate control - actual
|
||||
// bitrates deviates a lot from the target value.
|
||||
private static final String[] H264_HW_EXCEPTION_MODELS =
|
||||
new String[] {"SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"};
|
||||
|
||||
// Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
|
||||
// in OMX_Video.h
|
||||
private static final int VIDEO_ControlRateConstant = 2;
|
||||
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
|
||||
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
|
||||
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
|
||||
// Allowable color formats supported by codec - in order of preference.
|
||||
private static final int[] supportedColorList = {CodecCapabilities.COLOR_FormatYUV420Planar,
|
||||
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
|
||||
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
|
||||
private static final int[] supportedSurfaceColorList = {CodecCapabilities.COLOR_FormatSurface};
|
||||
private VideoCodecType type;
|
||||
private int colorFormat; // Used by native code.
|
||||
|
||||
// Variables used for dynamic bitrate adjustment.
|
||||
private BitrateAdjustmentType bitrateAdjustmentType = BitrateAdjustmentType.NO_ADJUSTMENT;
|
||||
private double bitrateAccumulator;
|
||||
private double bitrateAccumulatorMax;
|
||||
private double bitrateObservationTimeMs;
|
||||
private int bitrateAdjustmentScaleExp;
|
||||
private int targetBitrateBps;
|
||||
private int targetFps;
|
||||
|
||||
// Interval in ms to force key frame generation. Used to reduce the time of color distortions
|
||||
// happened sometime when using Qualcomm video encoder.
|
||||
private long forcedKeyFrameMs;
|
||||
private long lastKeyFrameMs;
|
||||
|
||||
// SPS and PPS NALs (Config frame) for H.264.
|
||||
private ByteBuffer configData = null;
|
||||
|
||||
// MediaCodec error handler - invoked when critical error happens which may prevent
|
||||
// further use of media codec API. Now it means that one of media codec instances
|
||||
// is hanging and can no longer be used in the next call.
|
||||
public static interface MediaCodecVideoEncoderErrorCallback {
|
||||
void onMediaCodecVideoEncoderCriticalError(int codecErrors);
|
||||
}
|
||||
|
||||
public static void setErrorCallback(MediaCodecVideoEncoderErrorCallback errorCallback) {
|
||||
Logging.d(TAG, "Set error callback");
|
||||
MediaCodecVideoEncoder.errorCallback = errorCallback;
|
||||
}
|
||||
|
||||
// Functions to disable HW encoding - can be called from applications for platforms
|
||||
// which have known HW decoding problems.
|
||||
public static void disableVp8HwCodec() {
|
||||
Logging.w(TAG, "VP8 encoding is disabled by application.");
|
||||
hwEncoderDisabledTypes.add(VP8_MIME_TYPE);
|
||||
}
|
||||
|
||||
public static void disableVp9HwCodec() {
|
||||
Logging.w(TAG, "VP9 encoding is disabled by application.");
|
||||
hwEncoderDisabledTypes.add(VP9_MIME_TYPE);
|
||||
}
|
||||
|
||||
public static void disableH264HwCodec() {
|
||||
Logging.w(TAG, "H.264 encoding is disabled by application.");
|
||||
hwEncoderDisabledTypes.add(H264_MIME_TYPE);
|
||||
}
|
||||
|
||||
// Functions to query if HW encoding is supported.
|
||||
public static boolean isVp8HwSupported() {
|
||||
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
|
||||
&& (findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedColorList) != null);
|
||||
}
|
||||
|
||||
public static EncoderProperties vp8HwEncoderProperties() {
|
||||
if (hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)) {
|
||||
return null;
|
||||
} else {
|
||||
return findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedColorList);
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isVp9HwSupported() {
|
||||
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
|
||||
&& (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isH264HwSupported() {
|
||||
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
|
||||
&& (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isH264HighProfileHwSupported() {
|
||||
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
|
||||
&& (findHwEncoder(H264_MIME_TYPE, h264HighProfileHwList, supportedColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isVp8HwSupportedUsingTextures() {
|
||||
return !hwEncoderDisabledTypes.contains(VP8_MIME_TYPE)
|
||||
&& (findHwEncoder(VP8_MIME_TYPE, vp8HwList(), supportedSurfaceColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isVp9HwSupportedUsingTextures() {
|
||||
return !hwEncoderDisabledTypes.contains(VP9_MIME_TYPE)
|
||||
&& (findHwEncoder(VP9_MIME_TYPE, vp9HwList, supportedSurfaceColorList) != null);
|
||||
}
|
||||
|
||||
public static boolean isH264HwSupportedUsingTextures() {
|
||||
return !hwEncoderDisabledTypes.contains(H264_MIME_TYPE)
|
||||
&& (findHwEncoder(H264_MIME_TYPE, h264HwList, supportedSurfaceColorList) != null);
|
||||
}
|
||||
|
||||
// Helper struct for findHwEncoder() below.
|
||||
public static class EncoderProperties {
|
||||
public EncoderProperties(
|
||||
String codecName, int colorFormat, BitrateAdjustmentType bitrateAdjustmentType) {
|
||||
this.codecName = codecName;
|
||||
this.colorFormat = colorFormat;
|
||||
this.bitrateAdjustmentType = bitrateAdjustmentType;
|
||||
}
|
||||
public final String codecName; // OpenMax component name for HW codec.
|
||||
public final int colorFormat; // Color format supported by codec.
|
||||
public final BitrateAdjustmentType bitrateAdjustmentType; // Bitrate adjustment type
|
||||
}
|
||||
|
||||
private static EncoderProperties findHwEncoder(
|
||||
String mime, MediaCodecProperties[] supportedHwCodecProperties, int[] colorList) {
|
||||
// MediaCodec.setParameters is missing for JB and below, so bitrate
|
||||
// can not be adjusted dynamically.
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if device is in H.264 exception list.
|
||||
if (mime.equals(H264_MIME_TYPE)) {
|
||||
List<String> exceptionModels = Arrays.asList(H264_HW_EXCEPTION_MODELS);
|
||||
if (exceptionModels.contains(Build.MODEL)) {
|
||||
Logging.w(TAG, "Model: " + Build.MODEL + " has black listed H.264 encoder.");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
|
||||
MediaCodecInfo info = null;
|
||||
try {
|
||||
info = MediaCodecList.getCodecInfoAt(i);
|
||||
} catch (IllegalArgumentException e) {
|
||||
Logging.e(TAG, "Cannot retrieve encoder codec info", e);
|
||||
}
|
||||
if (info == null || !info.isEncoder()) {
|
||||
continue;
|
||||
}
|
||||
String name = null;
|
||||
for (String mimeType : info.getSupportedTypes()) {
|
||||
if (mimeType.equals(mime)) {
|
||||
name = info.getName();
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (name == null) {
|
||||
continue; // No HW support in this codec; try the next one.
|
||||
}
|
||||
Logging.v(TAG, "Found candidate encoder " + name);
|
||||
|
||||
// Check if this is supported HW encoder.
|
||||
boolean supportedCodec = false;
|
||||
BitrateAdjustmentType bitrateAdjustmentType = BitrateAdjustmentType.NO_ADJUSTMENT;
|
||||
for (MediaCodecProperties codecProperties : supportedHwCodecProperties) {
|
||||
if (name.startsWith(codecProperties.codecPrefix)) {
|
||||
if (Build.VERSION.SDK_INT < codecProperties.minSdk) {
|
||||
Logging.w(
|
||||
TAG, "Codec " + name + " is disabled due to SDK version " + Build.VERSION.SDK_INT);
|
||||
continue;
|
||||
}
|
||||
if (codecProperties.bitrateAdjustmentType != BitrateAdjustmentType.NO_ADJUSTMENT) {
|
||||
bitrateAdjustmentType = codecProperties.bitrateAdjustmentType;
|
||||
Logging.w(
|
||||
TAG, "Codec " + name + " requires bitrate adjustment: " + bitrateAdjustmentType);
|
||||
}
|
||||
supportedCodec = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!supportedCodec) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if HW codec supports known color format.
|
||||
CodecCapabilities capabilities;
|
||||
try {
|
||||
capabilities = info.getCapabilitiesForType(mime);
|
||||
} catch (IllegalArgumentException e) {
|
||||
Logging.e(TAG, "Cannot retrieve encoder capabilities", e);
|
||||
continue;
|
||||
}
|
||||
for (int colorFormat : capabilities.colorFormats) {
|
||||
Logging.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
|
||||
}
|
||||
|
||||
for (int supportedColorFormat : colorList) {
|
||||
for (int codecColorFormat : capabilities.colorFormats) {
|
||||
if (codecColorFormat == supportedColorFormat) {
|
||||
// Found supported HW encoder.
|
||||
Logging.d(TAG, "Found target encoder for mime " + mime + " : " + name + ". Color: 0x"
|
||||
+ Integer.toHexString(codecColorFormat) + ". Bitrate adjustment: "
|
||||
+ bitrateAdjustmentType);
|
||||
return new EncoderProperties(name, codecColorFormat, bitrateAdjustmentType);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null; // No HW encoder.
|
||||
}
|
||||
|
||||
private void checkOnMediaCodecThread() {
|
||||
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
|
||||
throw new RuntimeException("MediaCodecVideoEncoder previously operated on " + mediaCodecThread
|
||||
+ " but is now called on " + Thread.currentThread());
|
||||
}
|
||||
}
|
||||
|
||||
public static void printStackTrace() {
|
||||
if (runningInstance != null && runningInstance.mediaCodecThread != null) {
|
||||
StackTraceElement[] mediaCodecStackTraces = runningInstance.mediaCodecThread.getStackTrace();
|
||||
if (mediaCodecStackTraces.length > 0) {
|
||||
Logging.d(TAG, "MediaCodecVideoEncoder stacks trace:");
|
||||
for (StackTraceElement stackTrace : mediaCodecStackTraces) {
|
||||
Logging.d(TAG, stackTrace.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static MediaCodec createByCodecName(String codecName) {
|
||||
try {
|
||||
// In the L-SDK this call can throw IOException so in order to work in
|
||||
// both cases catch an exception.
|
||||
return MediaCodec.createByCodecName(codecName);
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
boolean initEncode(VideoCodecType type, int profile, int width, int height, int kbps, int fps,
|
||||
EglBase14.Context sharedContext) {
|
||||
final boolean useSurface = sharedContext != null;
|
||||
Logging.d(TAG,
|
||||
"Java initEncode: " + type + ". Profile: " + profile + " : " + width + " x " + height
|
||||
+ ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
|
||||
|
||||
this.profile = profile;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
if (mediaCodecThread != null) {
|
||||
throw new RuntimeException("Forgot to release()?");
|
||||
}
|
||||
EncoderProperties properties = null;
|
||||
String mime = null;
|
||||
int keyFrameIntervalSec = 0;
|
||||
boolean configureH264HighProfile = false;
|
||||
if (type == VideoCodecType.VIDEO_CODEC_VP8) {
|
||||
mime = VP8_MIME_TYPE;
|
||||
properties = findHwEncoder(
|
||||
VP8_MIME_TYPE, vp8HwList(), useSurface ? supportedSurfaceColorList : supportedColorList);
|
||||
keyFrameIntervalSec = 100;
|
||||
} else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
|
||||
mime = VP9_MIME_TYPE;
|
||||
properties = findHwEncoder(
|
||||
VP9_MIME_TYPE, vp9HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
|
||||
keyFrameIntervalSec = 100;
|
||||
} else if (type == VideoCodecType.VIDEO_CODEC_H264) {
|
||||
mime = H264_MIME_TYPE;
|
||||
properties = findHwEncoder(
|
||||
H264_MIME_TYPE, h264HwList, useSurface ? supportedSurfaceColorList : supportedColorList);
|
||||
if (profile == H264Profile.CONSTRAINED_HIGH.getValue()) {
|
||||
EncoderProperties h264HighProfileProperties = findHwEncoder(H264_MIME_TYPE,
|
||||
h264HighProfileHwList, useSurface ? supportedSurfaceColorList : supportedColorList);
|
||||
if (h264HighProfileProperties != null) {
|
||||
Logging.d(TAG, "High profile H.264 encoder supported.");
|
||||
configureH264HighProfile = true;
|
||||
} else {
|
||||
Logging.d(TAG, "High profile H.264 encoder requested, but not supported. Use baseline.");
|
||||
}
|
||||
}
|
||||
keyFrameIntervalSec = 20;
|
||||
}
|
||||
if (properties == null) {
|
||||
throw new RuntimeException("Can not find HW encoder for " + type);
|
||||
}
|
||||
runningInstance = this; // Encoder is now running and can be queried for stack traces.
|
||||
colorFormat = properties.colorFormat;
|
||||
bitrateAdjustmentType = properties.bitrateAdjustmentType;
|
||||
if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT) {
|
||||
fps = BITRATE_ADJUSTMENT_FPS;
|
||||
} else {
|
||||
fps = Math.min(fps, MAXIMUM_INITIAL_FPS);
|
||||
}
|
||||
|
||||
forcedKeyFrameMs = 0;
|
||||
lastKeyFrameMs = -1;
|
||||
if (type == VideoCodecType.VIDEO_CODEC_VP8
|
||||
&& properties.codecName.startsWith(qcomVp8HwProperties.codecPrefix)) {
|
||||
if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP
|
||||
|| Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP_MR1) {
|
||||
forcedKeyFrameMs = QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_L_MS;
|
||||
} else if (Build.VERSION.SDK_INT == Build.VERSION_CODES.M) {
|
||||
forcedKeyFrameMs = QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS;
|
||||
} else if (Build.VERSION.SDK_INT > Build.VERSION_CODES.M) {
|
||||
forcedKeyFrameMs = QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS;
|
||||
}
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Color format: " + colorFormat + ". Bitrate adjustment: " + bitrateAdjustmentType
|
||||
+ ". Key frame interval: " + forcedKeyFrameMs + " . Initial fps: " + fps);
|
||||
targetBitrateBps = 1000 * kbps;
|
||||
targetFps = fps;
|
||||
bitrateAccumulatorMax = targetBitrateBps / 8.0;
|
||||
bitrateAccumulator = 0;
|
||||
bitrateObservationTimeMs = 0;
|
||||
bitrateAdjustmentScaleExp = 0;
|
||||
|
||||
mediaCodecThread = Thread.currentThread();
|
||||
try {
|
||||
MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, targetBitrateBps);
|
||||
format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, targetFps);
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
|
||||
if (configureH264HighProfile) {
|
||||
format.setInteger("profile", VIDEO_AVCProfileHigh);
|
||||
format.setInteger("level", VIDEO_AVCLevel3);
|
||||
}
|
||||
Logging.d(TAG, " Format: " + format);
|
||||
mediaCodec = createByCodecName(properties.codecName);
|
||||
this.type = type;
|
||||
if (mediaCodec == null) {
|
||||
Logging.e(TAG, "Can not create media encoder");
|
||||
release();
|
||||
return false;
|
||||
}
|
||||
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
|
||||
if (useSurface) {
|
||||
eglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
|
||||
// Create an input surface and keep a reference since we must release the surface when done.
|
||||
inputSurface = mediaCodec.createInputSurface();
|
||||
eglBase.createSurface(inputSurface);
|
||||
drawer = new GlRectDrawer();
|
||||
}
|
||||
mediaCodec.start();
|
||||
outputBuffers = mediaCodec.getOutputBuffers();
|
||||
Logging.d(TAG, "Output buffers: " + outputBuffers.length);
|
||||
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "initEncode failed", e);
|
||||
release();
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
ByteBuffer[] getInputBuffers() {
|
||||
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
|
||||
Logging.d(TAG, "Input buffers: " + inputBuffers.length);
|
||||
return inputBuffers;
|
||||
}
|
||||
|
||||
void checkKeyFrameRequired(boolean requestedKeyFrame, long presentationTimestampUs) {
|
||||
long presentationTimestampMs = (presentationTimestampUs + 500) / 1000;
|
||||
if (lastKeyFrameMs < 0) {
|
||||
lastKeyFrameMs = presentationTimestampMs;
|
||||
}
|
||||
boolean forcedKeyFrame = false;
|
||||
if (!requestedKeyFrame && forcedKeyFrameMs > 0
|
||||
&& presentationTimestampMs > lastKeyFrameMs + forcedKeyFrameMs) {
|
||||
forcedKeyFrame = true;
|
||||
}
|
||||
if (requestedKeyFrame || forcedKeyFrame) {
|
||||
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
|
||||
// indicate this in queueInputBuffer() below and guarantee _this_ frame
|
||||
// be encoded as a key frame, but sadly that flag is ignored. Instead,
|
||||
// we request a key frame "soon".
|
||||
if (requestedKeyFrame) {
|
||||
Logging.d(TAG, "Sync frame request");
|
||||
} else {
|
||||
Logging.d(TAG, "Sync frame forced");
|
||||
}
|
||||
Bundle b = new Bundle();
|
||||
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
|
||||
mediaCodec.setParameters(b);
|
||||
lastKeyFrameMs = presentationTimestampMs;
|
||||
}
|
||||
}
|
||||
|
||||
boolean encodeBuffer(
|
||||
boolean isKeyframe, int inputBuffer, int size, long presentationTimestampUs) {
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
|
||||
mediaCodec.queueInputBuffer(inputBuffer, 0, size, presentationTimestampUs, 0);
|
||||
return true;
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "encodeBuffer failed", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
|
||||
long presentationTimestampUs) {
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
|
||||
eglBase.makeCurrent();
|
||||
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
|
||||
// but it's a workaround for bug webrtc:5147.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
drawer.drawOes(oesTextureId, transformationMatrix, width, height, 0, 0, width, height);
|
||||
eglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs));
|
||||
return true;
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, "encodeTexture failed", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes a new style VideoFrame. Called by JNI. |bufferIndex| is -1 if we are not encoding in
|
||||
* surface mode.
|
||||
*/
|
||||
boolean encodeFrame(long nativeEncoder, boolean isKeyframe, VideoFrame frame, int bufferIndex) {
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
long presentationTimestampUs = TimeUnit.NANOSECONDS.toMicros(frame.getTimestampNs());
|
||||
checkKeyFrameRequired(isKeyframe, presentationTimestampUs);
|
||||
|
||||
VideoFrame.Buffer buffer = frame.getBuffer();
|
||||
if (buffer instanceof VideoFrame.TextureBuffer) {
|
||||
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer;
|
||||
eglBase.makeCurrent();
|
||||
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
|
||||
// but it's a workaround for bug webrtc:5147.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
VideoFrameDrawer.drawTexture(drawer, textureBuffer, new Matrix() /* renderMatrix */, width,
|
||||
height, 0 /* viewportX */, 0 /* viewportY */, width, height);
|
||||
eglBase.swapBuffers(frame.getTimestampNs());
|
||||
} else {
|
||||
VideoFrame.I420Buffer i420Buffer = buffer.toI420();
|
||||
nativeFillBuffer(nativeEncoder, bufferIndex, i420Buffer.getDataY(), i420Buffer.getStrideY(),
|
||||
i420Buffer.getDataU(), i420Buffer.getStrideU(), i420Buffer.getDataV(),
|
||||
i420Buffer.getStrideV());
|
||||
i420Buffer.release();
|
||||
// I420 consists of one full-resolution and two half-resolution planes.
|
||||
// 1 + 1 / 4 + 1 / 4 = 3 / 2
|
||||
int yuvSize = width * height * 3 / 2;
|
||||
mediaCodec.queueInputBuffer(bufferIndex, 0, yuvSize, presentationTimestampUs, 0);
|
||||
}
|
||||
return true;
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, "encodeFrame failed", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
void release() {
|
||||
Logging.d(TAG, "Java releaseEncoder");
|
||||
checkOnMediaCodecThread();
|
||||
|
||||
class CaughtException {
|
||||
Exception e;
|
||||
}
|
||||
final CaughtException caughtException = new CaughtException();
|
||||
boolean stopHung = false;
|
||||
|
||||
if (mediaCodec != null) {
|
||||
// Run Mediacodec stop() and release() on separate thread since sometime
|
||||
// Mediacodec.stop() may hang.
|
||||
final CountDownLatch releaseDone = new CountDownLatch(1);
|
||||
|
||||
Runnable runMediaCodecRelease = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
Logging.d(TAG, "Java releaseEncoder on release thread");
|
||||
try {
|
||||
mediaCodec.stop();
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "Media encoder stop failed", e);
|
||||
}
|
||||
try {
|
||||
mediaCodec.release();
|
||||
} catch (Exception e) {
|
||||
Logging.e(TAG, "Media encoder release failed", e);
|
||||
caughtException.e = e;
|
||||
}
|
||||
Logging.d(TAG, "Java releaseEncoder on release thread done");
|
||||
|
||||
releaseDone.countDown();
|
||||
}
|
||||
};
|
||||
new Thread(runMediaCodecRelease).start();
|
||||
|
||||
if (!ThreadUtils.awaitUninterruptibly(releaseDone, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
|
||||
Logging.e(TAG, "Media encoder release timeout");
|
||||
stopHung = true;
|
||||
}
|
||||
|
||||
mediaCodec = null;
|
||||
}
|
||||
|
||||
mediaCodecThread = null;
|
||||
if (drawer != null) {
|
||||
drawer.release();
|
||||
drawer = null;
|
||||
}
|
||||
if (eglBase != null) {
|
||||
eglBase.release();
|
||||
eglBase = null;
|
||||
}
|
||||
if (inputSurface != null) {
|
||||
inputSurface.release();
|
||||
inputSurface = null;
|
||||
}
|
||||
runningInstance = null;
|
||||
|
||||
if (stopHung) {
|
||||
codecErrors++;
|
||||
if (errorCallback != null) {
|
||||
Logging.e(TAG, "Invoke codec error callback. Errors: " + codecErrors);
|
||||
errorCallback.onMediaCodecVideoEncoderCriticalError(codecErrors);
|
||||
}
|
||||
throw new RuntimeException("Media encoder release timeout.");
|
||||
}
|
||||
|
||||
// Re-throw any runtime exception caught inside the other thread. Since this is an invoke, add
|
||||
// stack trace for the waiting thread as well.
|
||||
if (caughtException.e != null) {
|
||||
final RuntimeException runtimeException = new RuntimeException(caughtException.e);
|
||||
runtimeException.setStackTrace(ThreadUtils.concatStackTraces(
|
||||
caughtException.e.getStackTrace(), runtimeException.getStackTrace()));
|
||||
throw runtimeException;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Java releaseEncoder done");
|
||||
}
|
||||
|
||||
private boolean setRates(int kbps, int frameRate) {
|
||||
checkOnMediaCodecThread();
|
||||
|
||||
int codecBitrateBps = 1000 * kbps;
|
||||
if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
|
||||
bitrateAccumulatorMax = codecBitrateBps / 8.0;
|
||||
if (targetBitrateBps > 0 && codecBitrateBps < targetBitrateBps) {
|
||||
// Rescale the accumulator level if the accumulator max decreases
|
||||
bitrateAccumulator = bitrateAccumulator * codecBitrateBps / targetBitrateBps;
|
||||
}
|
||||
}
|
||||
targetBitrateBps = codecBitrateBps;
|
||||
targetFps = frameRate;
|
||||
|
||||
// Adjust actual encoder bitrate based on bitrate adjustment type.
|
||||
if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT && targetFps > 0) {
|
||||
codecBitrateBps = BITRATE_ADJUSTMENT_FPS * targetBitrateBps / targetFps;
|
||||
Logging.v(TAG,
|
||||
"setRates: " + kbps + " -> " + (codecBitrateBps / 1000) + " kbps. Fps: " + targetFps);
|
||||
} else if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
|
||||
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps + ". ExpScale: "
|
||||
+ bitrateAdjustmentScaleExp);
|
||||
if (bitrateAdjustmentScaleExp != 0) {
|
||||
codecBitrateBps = (int) (codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
|
||||
}
|
||||
} else {
|
||||
Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps);
|
||||
}
|
||||
|
||||
try {
|
||||
Bundle params = new Bundle();
|
||||
params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, codecBitrateBps);
|
||||
mediaCodec.setParameters(params);
|
||||
return true;
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "setRates failed", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Dequeue an input buffer and return its index, -1 if no input buffer is
|
||||
// available, or -2 if the codec is no longer operative.
|
||||
int dequeueInputBuffer() {
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "dequeueIntputBuffer failed", e);
|
||||
return -2;
|
||||
}
|
||||
}
|
||||
|
||||
// Helper struct for dequeueOutputBuffer() below.
|
||||
static class OutputBufferInfo {
|
||||
public OutputBufferInfo(
|
||||
int index, ByteBuffer buffer, boolean isKeyFrame, long presentationTimestampUs) {
|
||||
this.index = index;
|
||||
this.buffer = buffer;
|
||||
this.isKeyFrame = isKeyFrame;
|
||||
this.presentationTimestampUs = presentationTimestampUs;
|
||||
}
|
||||
|
||||
public final int index;
|
||||
public final ByteBuffer buffer;
|
||||
public final boolean isKeyFrame;
|
||||
public final long presentationTimestampUs;
|
||||
}
|
||||
|
||||
// Dequeue and return an output buffer, or null if no output is ready. Return
|
||||
// a fake OutputBufferInfo with index -1 if the codec is no longer operable.
|
||||
OutputBufferInfo dequeueOutputBuffer() {
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
|
||||
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
|
||||
// Check if this is config frame and save configuration data.
|
||||
if (result >= 0) {
|
||||
boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
|
||||
if (isConfigFrame) {
|
||||
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
|
||||
configData = ByteBuffer.allocateDirect(info.size);
|
||||
outputBuffers[result].position(info.offset);
|
||||
outputBuffers[result].limit(info.offset + info.size);
|
||||
configData.put(outputBuffers[result]);
|
||||
// Log few SPS header bytes to check profile and level.
|
||||
String spsData = "";
|
||||
for (int i = 0; i < (info.size < 8 ? info.size : 8); i++) {
|
||||
spsData += Integer.toHexString(configData.get(i) & 0xff) + " ";
|
||||
}
|
||||
Logging.d(TAG, spsData);
|
||||
// Release buffer back.
|
||||
mediaCodec.releaseOutputBuffer(result, false);
|
||||
// Query next output.
|
||||
result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
|
||||
}
|
||||
}
|
||||
if (result >= 0) {
|
||||
// MediaCodec doesn't care about Buffer position/remaining/etc so we can
|
||||
// mess with them to get a slice and avoid having to pass extra
|
||||
// (BufferInfo-related) parameters back to C++.
|
||||
ByteBuffer outputBuffer = outputBuffers[result].duplicate();
|
||||
outputBuffer.position(info.offset);
|
||||
outputBuffer.limit(info.offset + info.size);
|
||||
reportEncodedFrame(info.size);
|
||||
|
||||
// Check key frame flag.
|
||||
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
|
||||
if (isKeyFrame) {
|
||||
Logging.d(TAG, "Sync frame generated");
|
||||
}
|
||||
if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
|
||||
Logging.d(TAG, "Appending config frame of size " + configData.capacity()
|
||||
+ " to output buffer with offset " + info.offset + ", size " + info.size);
|
||||
// For H.264 key frame append SPS and PPS NALs at the start
|
||||
ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
|
||||
configData.rewind();
|
||||
keyFrameBuffer.put(configData);
|
||||
keyFrameBuffer.put(outputBuffer);
|
||||
keyFrameBuffer.position(0);
|
||||
return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
|
||||
} else {
|
||||
return new OutputBufferInfo(
|
||||
result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
|
||||
}
|
||||
} else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||
outputBuffers = mediaCodec.getOutputBuffers();
|
||||
return dequeueOutputBuffer();
|
||||
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||
return dequeueOutputBuffer();
|
||||
} else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
return null;
|
||||
}
|
||||
throw new RuntimeException("dequeueOutputBuffer: " + result);
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "dequeueOutputBuffer failed", e);
|
||||
return new OutputBufferInfo(-1, null, false, -1);
|
||||
}
|
||||
}
|
||||
|
||||
private double getBitrateScale(int bitrateAdjustmentScaleExp) {
|
||||
return Math.pow(BITRATE_CORRECTION_MAX_SCALE,
|
||||
(double) bitrateAdjustmentScaleExp / BITRATE_CORRECTION_STEPS);
|
||||
}
|
||||
|
||||
private void reportEncodedFrame(int size) {
|
||||
if (targetFps == 0 || bitrateAdjustmentType != BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Accumulate the difference between actial and expected frame sizes.
|
||||
double expectedBytesPerFrame = targetBitrateBps / (8.0 * targetFps);
|
||||
bitrateAccumulator += (size - expectedBytesPerFrame);
|
||||
bitrateObservationTimeMs += 1000.0 / targetFps;
|
||||
|
||||
// Put a cap on the accumulator, i.e., don't let it grow beyond some level to avoid
|
||||
// using too old data for bitrate adjustment.
|
||||
double bitrateAccumulatorCap = BITRATE_CORRECTION_SEC * bitrateAccumulatorMax;
|
||||
bitrateAccumulator = Math.min(bitrateAccumulator, bitrateAccumulatorCap);
|
||||
bitrateAccumulator = Math.max(bitrateAccumulator, -bitrateAccumulatorCap);
|
||||
|
||||
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
|
||||
// form the target value.
|
||||
if (bitrateObservationTimeMs > 1000 * BITRATE_CORRECTION_SEC) {
|
||||
Logging.d(TAG, "Acc: " + (int) bitrateAccumulator + ". Max: " + (int) bitrateAccumulatorMax
|
||||
+ ". ExpScale: " + bitrateAdjustmentScaleExp);
|
||||
boolean bitrateAdjustmentScaleChanged = false;
|
||||
if (bitrateAccumulator > bitrateAccumulatorMax) {
|
||||
// Encoder generates too high bitrate - need to reduce the scale.
|
||||
int bitrateAdjustmentInc = (int) (bitrateAccumulator / bitrateAccumulatorMax + 0.5);
|
||||
bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
|
||||
bitrateAccumulator = bitrateAccumulatorMax;
|
||||
bitrateAdjustmentScaleChanged = true;
|
||||
} else if (bitrateAccumulator < -bitrateAccumulatorMax) {
|
||||
// Encoder generates too low bitrate - need to increase the scale.
|
||||
int bitrateAdjustmentInc = (int) (-bitrateAccumulator / bitrateAccumulatorMax + 0.5);
|
||||
bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
|
||||
bitrateAccumulator = -bitrateAccumulatorMax;
|
||||
bitrateAdjustmentScaleChanged = true;
|
||||
}
|
||||
if (bitrateAdjustmentScaleChanged) {
|
||||
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_CORRECTION_STEPS);
|
||||
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_CORRECTION_STEPS);
|
||||
Logging.d(TAG, "Adjusting bitrate scale to " + bitrateAdjustmentScaleExp + ". Value: "
|
||||
+ getBitrateScale(bitrateAdjustmentScaleExp));
|
||||
setRates(targetBitrateBps / 1000, targetFps);
|
||||
}
|
||||
bitrateObservationTimeMs = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Release a dequeued output buffer back to the codec for re-use. Return
|
||||
// false if the codec is no longer operable.
|
||||
boolean releaseOutputBuffer(int index) {
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
mediaCodec.releaseOutputBuffer(index, false);
|
||||
return true;
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "releaseOutputBuffer failed", e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/** Fills an inputBuffer with the given index with data from the byte buffers. */
|
||||
private static native void nativeFillBuffer(long nativeEncoder, int inputBuffer, ByteBuffer dataY,
|
||||
int strideY, ByteBuffer dataU, int strideU, ByteBuffer dataV, int strideV);
|
||||
}
|
||||
84
sdk/android/api/org/webrtc/MediaConstraints.java
Normal file
84
sdk/android/api/org/webrtc/MediaConstraints.java
Normal file
@ -0,0 +1,84 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Description of media constraints for {@code MediaStream} and
|
||||
* {@code PeerConnection}.
|
||||
*/
|
||||
public class MediaConstraints {
|
||||
/** Simple String key/value pair. */
|
||||
public static class KeyValuePair {
|
||||
private final String key;
|
||||
private final String value;
|
||||
|
||||
public KeyValuePair(String key, String value) {
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public String getKey() {
|
||||
return key;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return key + ": " + value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
return true;
|
||||
}
|
||||
if (other == null || getClass() != other.getClass()) {
|
||||
return false;
|
||||
}
|
||||
KeyValuePair that = (KeyValuePair) other;
|
||||
return key.equals(that.key) && value.equals(that.value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return key.hashCode() + value.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
public final List<KeyValuePair> mandatory;
|
||||
public final List<KeyValuePair> optional;
|
||||
|
||||
public MediaConstraints() {
|
||||
mandatory = new LinkedList<KeyValuePair>();
|
||||
optional = new LinkedList<KeyValuePair>();
|
||||
}
|
||||
|
||||
private static String stringifyKeyValuePairList(List<KeyValuePair> list) {
|
||||
StringBuilder builder = new StringBuilder("[");
|
||||
for (KeyValuePair pair : list) {
|
||||
if (builder.length() > 1) {
|
||||
builder.append(", ");
|
||||
}
|
||||
builder.append(pair.toString());
|
||||
}
|
||||
return builder.append("]").toString();
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "mandatory: " + stringifyKeyValuePairList(mandatory) + ", optional: "
|
||||
+ stringifyKeyValuePairList(optional);
|
||||
}
|
||||
}
|
||||
33
sdk/android/api/org/webrtc/MediaSource.java
Normal file
33
sdk/android/api/org/webrtc/MediaSource.java
Normal file
@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Java wrapper for a C++ MediaSourceInterface. */
|
||||
public class MediaSource {
|
||||
/** Tracks MediaSourceInterface.SourceState */
|
||||
public enum State { INITIALIZING, LIVE, ENDED, MUTED }
|
||||
|
||||
final long nativeSource; // Package-protected for PeerConnectionFactory.
|
||||
|
||||
public MediaSource(long nativeSource) {
|
||||
this.nativeSource = nativeSource;
|
||||
}
|
||||
|
||||
public State state() {
|
||||
return nativeState(nativeSource);
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
JniCommon.nativeReleaseRef(nativeSource);
|
||||
}
|
||||
|
||||
private static native State nativeState(long pointer);
|
||||
}
|
||||
106
sdk/android/api/org/webrtc/MediaStream.java
Normal file
106
sdk/android/api/org/webrtc/MediaStream.java
Normal file
@ -0,0 +1,106 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.LinkedList;
|
||||
|
||||
/** Java wrapper for a C++ MediaStreamInterface. */
|
||||
public class MediaStream {
|
||||
public final LinkedList<AudioTrack> audioTracks;
|
||||
public final LinkedList<VideoTrack> videoTracks;
|
||||
public final LinkedList<VideoTrack> preservedVideoTracks;
|
||||
// Package-protected for PeerConnection.
|
||||
final long nativeStream;
|
||||
|
||||
public MediaStream(long nativeStream) {
|
||||
audioTracks = new LinkedList<AudioTrack>();
|
||||
videoTracks = new LinkedList<VideoTrack>();
|
||||
preservedVideoTracks = new LinkedList<VideoTrack>();
|
||||
this.nativeStream = nativeStream;
|
||||
}
|
||||
|
||||
public boolean addTrack(AudioTrack track) {
|
||||
if (nativeAddAudioTrack(nativeStream, track.nativeTrack)) {
|
||||
audioTracks.add(track);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean addTrack(VideoTrack track) {
|
||||
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
|
||||
videoTracks.add(track);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Tracks added in addTrack() call will be auto released once MediaStream.dispose()
|
||||
// is called. If video track need to be preserved after MediaStream is destroyed it
|
||||
// should be added to MediaStream using addPreservedTrack() call.
|
||||
public boolean addPreservedTrack(VideoTrack track) {
|
||||
if (nativeAddVideoTrack(nativeStream, track.nativeTrack)) {
|
||||
preservedVideoTracks.add(track);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean removeTrack(AudioTrack track) {
|
||||
audioTracks.remove(track);
|
||||
return nativeRemoveAudioTrack(nativeStream, track.nativeTrack);
|
||||
}
|
||||
|
||||
public boolean removeTrack(VideoTrack track) {
|
||||
videoTracks.remove(track);
|
||||
preservedVideoTracks.remove(track);
|
||||
return nativeRemoveVideoTrack(nativeStream, track.nativeTrack);
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
// Remove and release previously added audio and video tracks.
|
||||
while (!audioTracks.isEmpty()) {
|
||||
AudioTrack track = audioTracks.getFirst();
|
||||
removeTrack(track);
|
||||
track.dispose();
|
||||
}
|
||||
while (!videoTracks.isEmpty()) {
|
||||
VideoTrack track = videoTracks.getFirst();
|
||||
removeTrack(track);
|
||||
track.dispose();
|
||||
}
|
||||
// Remove, but do not release preserved video tracks.
|
||||
while (!preservedVideoTracks.isEmpty()) {
|
||||
removeTrack(preservedVideoTracks.getFirst());
|
||||
}
|
||||
free(nativeStream);
|
||||
}
|
||||
|
||||
public String label() {
|
||||
return nativeLabel(nativeStream);
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return "[" + label() + ":A=" + audioTracks.size() + ":V=" + videoTracks.size() + "]";
|
||||
}
|
||||
|
||||
private static native boolean nativeAddAudioTrack(long nativeStream, long nativeAudioTrack);
|
||||
|
||||
private static native boolean nativeAddVideoTrack(long nativeStream, long nativeVideoTrack);
|
||||
|
||||
private static native boolean nativeRemoveAudioTrack(long nativeStream, long nativeAudioTrack);
|
||||
|
||||
private static native boolean nativeRemoveVideoTrack(long nativeStream, long nativeVideoTrack);
|
||||
|
||||
private static native String nativeLabel(long nativeStream);
|
||||
|
||||
private static native void free(long nativeStream);
|
||||
}
|
||||
62
sdk/android/api/org/webrtc/MediaStreamTrack.java
Normal file
62
sdk/android/api/org/webrtc/MediaStreamTrack.java
Normal file
@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Java wrapper for a C++ MediaStreamTrackInterface. */
|
||||
public class MediaStreamTrack {
|
||||
/** Tracks MediaStreamTrackInterface.TrackState */
|
||||
public enum State { LIVE, ENDED }
|
||||
|
||||
public enum MediaType {
|
||||
MEDIA_TYPE_AUDIO,
|
||||
MEDIA_TYPE_VIDEO,
|
||||
}
|
||||
|
||||
final long nativeTrack;
|
||||
|
||||
public MediaStreamTrack(long nativeTrack) {
|
||||
this.nativeTrack = nativeTrack;
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return nativeId(nativeTrack);
|
||||
}
|
||||
|
||||
public String kind() {
|
||||
return nativeKind(nativeTrack);
|
||||
}
|
||||
|
||||
public boolean enabled() {
|
||||
return nativeEnabled(nativeTrack);
|
||||
}
|
||||
|
||||
public boolean setEnabled(boolean enable) {
|
||||
return nativeSetEnabled(nativeTrack, enable);
|
||||
}
|
||||
|
||||
public State state() {
|
||||
return nativeState(nativeTrack);
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
JniCommon.nativeReleaseRef(nativeTrack);
|
||||
}
|
||||
|
||||
private static native String nativeId(long nativeTrack);
|
||||
|
||||
private static native String nativeKind(long nativeTrack);
|
||||
|
||||
private static native boolean nativeEnabled(long nativeTrack);
|
||||
|
||||
private static native boolean nativeSetEnabled(long nativeTrack, boolean enabled);
|
||||
|
||||
private static native State nativeState(long nativeTrack);
|
||||
}
|
||||
79
sdk/android/api/org/webrtc/Metrics.java
Normal file
79
sdk/android/api/org/webrtc/Metrics.java
Normal file
@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
// Java-side of androidmetrics_jni.cc.
|
||||
//
|
||||
// Rtc histograms can be queried through the API, getAndReset().
|
||||
// The returned map holds the name of a histogram and its samples.
|
||||
//
|
||||
// Example of |map| with one histogram:
|
||||
// |name|: "WebRTC.Video.InputFramesPerSecond"
|
||||
// |min|: 1
|
||||
// |max|: 100
|
||||
// |bucketCount|: 50
|
||||
// |samples|: [30]:1
|
||||
//
|
||||
// Most histograms are not updated frequently (e.g. most video metrics are an
|
||||
// average over the call and recorded when a stream is removed).
|
||||
// The metrics can for example be retrieved when a peer connection is closed.
|
||||
|
||||
public class Metrics {
|
||||
private static final String TAG = "Metrics";
|
||||
|
||||
static {
|
||||
System.loadLibrary("jingle_peerconnection_so");
|
||||
}
|
||||
public final Map<String, HistogramInfo> map =
|
||||
new HashMap<String, HistogramInfo>(); // <name, HistogramInfo>
|
||||
|
||||
/**
|
||||
* Class holding histogram information.
|
||||
*/
|
||||
public static class HistogramInfo {
|
||||
public final int min;
|
||||
public final int max;
|
||||
public final int bucketCount;
|
||||
public final Map<Integer, Integer> samples =
|
||||
new HashMap<Integer, Integer>(); // <value, # of events>
|
||||
|
||||
public HistogramInfo(int min, int max, int bucketCount) {
|
||||
this.min = min;
|
||||
this.max = max;
|
||||
this.bucketCount = bucketCount;
|
||||
}
|
||||
|
||||
public void addSample(int value, int numEvents) {
|
||||
samples.put(value, numEvents);
|
||||
}
|
||||
}
|
||||
|
||||
private void add(String name, HistogramInfo info) {
|
||||
map.put(name, info);
|
||||
}
|
||||
|
||||
// Enables gathering of metrics (which can be fetched with getAndReset()).
|
||||
// Must be called before PeerConnectionFactory is created.
|
||||
public static void enable() {
|
||||
nativeEnable();
|
||||
}
|
||||
|
||||
// Gets and clears native histograms.
|
||||
public static Metrics getAndReset() {
|
||||
return nativeGetAndReset();
|
||||
}
|
||||
|
||||
private static native void nativeEnable();
|
||||
private static native Metrics nativeGetAndReset();
|
||||
}
|
||||
238
sdk/android/api/org/webrtc/NetworkMonitor.java
Normal file
238
sdk/android/api/org/webrtc/NetworkMonitor.java
Normal file
@ -0,0 +1,238 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.Context;
|
||||
import android.os.Build;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Borrowed from Chromium's src/net/android/java/src/org/chromium/net/NetworkChangeNotifier.java
|
||||
*
|
||||
* Triggers updates to the underlying network state from OS networking events.
|
||||
*
|
||||
* WARNING: This class is not thread-safe.
|
||||
*/
|
||||
public class NetworkMonitor {
|
||||
/**
|
||||
* Alerted when the connection type of the network changes.
|
||||
* The alert is fired on the UI thread.
|
||||
*/
|
||||
public interface NetworkObserver {
|
||||
public void onConnectionTypeChanged(ConnectionType connectionType);
|
||||
}
|
||||
|
||||
private static final String TAG = "NetworkMonitor";
|
||||
|
||||
// We are storing application context so it is okay.
|
||||
private static NetworkMonitor instance;
|
||||
|
||||
// Native observers of the connection type changes.
|
||||
private final ArrayList<Long> nativeNetworkObservers;
|
||||
// Java observers of the connection type changes.
|
||||
private final ArrayList<NetworkObserver> networkObservers;
|
||||
|
||||
// Object that detects the connection type changes.
|
||||
private NetworkMonitorAutoDetect autoDetector;
|
||||
|
||||
private ConnectionType currentConnectionType = ConnectionType.CONNECTION_UNKNOWN;
|
||||
|
||||
private NetworkMonitor() {
|
||||
nativeNetworkObservers = new ArrayList<Long>();
|
||||
networkObservers = new ArrayList<NetworkObserver>();
|
||||
}
|
||||
|
||||
// TODO(sakal): Remove once downstream dependencies have been updated.
|
||||
@Deprecated
|
||||
public static void init(Context context) {}
|
||||
|
||||
/**
|
||||
* Returns the singleton instance.
|
||||
*/
|
||||
public static NetworkMonitor getInstance() {
|
||||
if (instance == null) {
|
||||
instance = new NetworkMonitor();
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
private static void assertIsTrue(boolean condition) {
|
||||
if (!condition) {
|
||||
throw new AssertionError("Expected to be true");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Called by the native code.
|
||||
*
|
||||
* Enables auto detection of the current network state based on notifications
|
||||
* from the system. Note that this requires the embedding app have the
|
||||
* platform ACCESS_NETWORK_STATE permission.
|
||||
*/
|
||||
private void startMonitoring(long nativeObserver) {
|
||||
Logging.d(TAG, "Start monitoring from native observer " + nativeObserver);
|
||||
nativeNetworkObservers.add(nativeObserver);
|
||||
if (autoDetector == null) {
|
||||
createAutoDetector();
|
||||
}
|
||||
// The observers expect a network list update after they call startMonitoring.
|
||||
final NetworkMonitorAutoDetect.NetworkState networkState =
|
||||
autoDetector.getCurrentNetworkState();
|
||||
updateCurrentConnectionType(NetworkMonitorAutoDetect.getConnectionType(networkState));
|
||||
updateObserverActiveNetworkList(nativeObserver);
|
||||
}
|
||||
|
||||
// Called by the native code.
|
||||
private void stopMonitoring(long nativeObserver) {
|
||||
Logging.d(TAG, "Stop monitoring from native observer " + nativeObserver);
|
||||
nativeNetworkObservers.remove(nativeObserver);
|
||||
if (nativeNetworkObservers.isEmpty()) {
|
||||
autoDetector.destroy();
|
||||
autoDetector = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Called by the native code to determine if network binding is supported
|
||||
// on this platform.
|
||||
private boolean networkBindingSupported() {
|
||||
return autoDetector != null && autoDetector.supportNetworkCallback();
|
||||
}
|
||||
|
||||
// Called by the native code to get the Android SDK version.
|
||||
private static int androidSdkInt() {
|
||||
return Build.VERSION.SDK_INT;
|
||||
}
|
||||
|
||||
private ConnectionType getCurrentConnectionType() {
|
||||
return currentConnectionType;
|
||||
}
|
||||
|
||||
private long getCurrentDefaultNetId() {
|
||||
return autoDetector == null ? INVALID_NET_ID : autoDetector.getDefaultNetId();
|
||||
}
|
||||
|
||||
private void createAutoDetector() {
|
||||
autoDetector = new NetworkMonitorAutoDetect(new NetworkMonitorAutoDetect.Observer() {
|
||||
|
||||
@Override
|
||||
public void onConnectionTypeChanged(ConnectionType newConnectionType) {
|
||||
updateCurrentConnectionType(newConnectionType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNetworkConnect(NetworkInformation networkInfo) {
|
||||
notifyObserversOfNetworkConnect(networkInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNetworkDisconnect(long networkHandle) {
|
||||
notifyObserversOfNetworkDisconnect(networkHandle);
|
||||
}
|
||||
}, ContextUtils.getApplicationContext());
|
||||
}
|
||||
|
||||
private void updateCurrentConnectionType(ConnectionType newConnectionType) {
|
||||
currentConnectionType = newConnectionType;
|
||||
notifyObserversOfConnectionTypeChange(newConnectionType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Alerts all observers of a connection change.
|
||||
*/
|
||||
private void notifyObserversOfConnectionTypeChange(ConnectionType newConnectionType) {
|
||||
for (long nativeObserver : nativeNetworkObservers) {
|
||||
nativeNotifyConnectionTypeChanged(nativeObserver);
|
||||
}
|
||||
for (NetworkObserver observer : networkObservers) {
|
||||
observer.onConnectionTypeChanged(newConnectionType);
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyObserversOfNetworkConnect(NetworkInformation networkInfo) {
|
||||
for (long nativeObserver : nativeNetworkObservers) {
|
||||
nativeNotifyOfNetworkConnect(nativeObserver, networkInfo);
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyObserversOfNetworkDisconnect(long networkHandle) {
|
||||
for (long nativeObserver : nativeNetworkObservers) {
|
||||
nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle);
|
||||
}
|
||||
}
|
||||
|
||||
private void updateObserverActiveNetworkList(long nativeObserver) {
|
||||
List<NetworkInformation> networkInfoList = autoDetector.getActiveNetworkList();
|
||||
if (networkInfoList == null || networkInfoList.size() == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
NetworkInformation[] networkInfos = new NetworkInformation[networkInfoList.size()];
|
||||
networkInfos = networkInfoList.toArray(networkInfos);
|
||||
nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds an observer for any connection type changes.
|
||||
*/
|
||||
public static void addNetworkObserver(NetworkObserver observer) {
|
||||
getInstance().addNetworkObserverInternal(observer);
|
||||
}
|
||||
|
||||
private void addNetworkObserverInternal(NetworkObserver observer) {
|
||||
networkObservers.add(observer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes an observer for any connection type changes.
|
||||
*/
|
||||
public static void removeNetworkObserver(NetworkObserver observer) {
|
||||
getInstance().removeNetworkObserverInternal(observer);
|
||||
}
|
||||
|
||||
private void removeNetworkObserverInternal(NetworkObserver observer) {
|
||||
networkObservers.remove(observer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if there currently is connectivity.
|
||||
*/
|
||||
public static boolean isOnline() {
|
||||
ConnectionType connectionType = getInstance().getCurrentConnectionType();
|
||||
return connectionType != ConnectionType.CONNECTION_NONE;
|
||||
}
|
||||
|
||||
private native void nativeNotifyConnectionTypeChanged(long nativePtr);
|
||||
private native void nativeNotifyOfNetworkConnect(long nativePtr, NetworkInformation networkInfo);
|
||||
private native void nativeNotifyOfNetworkDisconnect(long nativePtr, long networkHandle);
|
||||
private native void nativeNotifyOfActiveNetworkList(
|
||||
long nativePtr, NetworkInformation[] networkInfos);
|
||||
|
||||
// For testing only.
|
||||
static void resetInstanceForTests() {
|
||||
instance = new NetworkMonitor();
|
||||
}
|
||||
|
||||
// For testing only.
|
||||
static void createAutoDetectorForTest() {
|
||||
getInstance().createAutoDetector();
|
||||
}
|
||||
|
||||
// For testing only.
|
||||
static NetworkMonitorAutoDetect getAutoDetectorForTest() {
|
||||
return getInstance().autoDetector;
|
||||
}
|
||||
}
|
||||
740
sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java
Normal file
740
sdk/android/api/org/webrtc/NetworkMonitorAutoDetect.java
Normal file
@ -0,0 +1,740 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.BroadcastReceiver;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.content.IntentFilter;
|
||||
import android.net.ConnectivityManager;
|
||||
import android.net.ConnectivityManager.NetworkCallback;
|
||||
import android.net.LinkAddress;
|
||||
import android.net.LinkProperties;
|
||||
import android.net.Network;
|
||||
import android.net.NetworkCapabilities;
|
||||
import android.net.NetworkInfo;
|
||||
import android.net.NetworkRequest;
|
||||
import android.net.wifi.WifiInfo;
|
||||
import android.net.wifi.WifiManager;
|
||||
import android.net.wifi.p2p.WifiP2pGroup;
|
||||
import android.net.wifi.p2p.WifiP2pManager;
|
||||
import android.os.Build;
|
||||
import android.telephony.TelephonyManager;
|
||||
import java.net.InetAddress;
|
||||
import java.net.NetworkInterface;
|
||||
import java.net.SocketException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Borrowed from Chromium's
|
||||
* src/net/android/java/src/org/chromium/net/NetworkChangeNotifierAutoDetect.java
|
||||
*
|
||||
* Used by the NetworkMonitor to listen to platform changes in connectivity.
|
||||
* Note that use of this class requires that the app have the platform
|
||||
* ACCESS_NETWORK_STATE permission.
|
||||
*/
|
||||
public class NetworkMonitorAutoDetect extends BroadcastReceiver {
|
||||
public static enum ConnectionType {
|
||||
CONNECTION_UNKNOWN,
|
||||
CONNECTION_ETHERNET,
|
||||
CONNECTION_WIFI,
|
||||
CONNECTION_4G,
|
||||
CONNECTION_3G,
|
||||
CONNECTION_2G,
|
||||
CONNECTION_UNKNOWN_CELLULAR,
|
||||
CONNECTION_BLUETOOTH,
|
||||
CONNECTION_NONE
|
||||
}
|
||||
|
||||
public static class IPAddress {
|
||||
public final byte[] address;
|
||||
public IPAddress(byte[] address) {
|
||||
this.address = address;
|
||||
}
|
||||
}
|
||||
|
||||
/** Java version of NetworkMonitor.NetworkInformation */
|
||||
public static class NetworkInformation {
|
||||
public final String name;
|
||||
public final ConnectionType type;
|
||||
public final long handle;
|
||||
public final IPAddress[] ipAddresses;
|
||||
public NetworkInformation(
|
||||
String name, ConnectionType type, long handle, IPAddress[] addresses) {
|
||||
this.name = name;
|
||||
this.type = type;
|
||||
this.handle = handle;
|
||||
this.ipAddresses = addresses;
|
||||
}
|
||||
};
|
||||
|
||||
static class NetworkState {
|
||||
private final boolean connected;
|
||||
// Defined from ConnectivityManager.TYPE_XXX for non-mobile; for mobile, it is
|
||||
// further divided into 2G, 3G, or 4G from the subtype.
|
||||
private final int type;
|
||||
// Defined from NetworkInfo.subtype, which is one of the TelephonyManager.NETWORK_TYPE_XXXs.
|
||||
// Will be useful to find the maximum bandwidth.
|
||||
private final int subtype;
|
||||
|
||||
public NetworkState(boolean connected, int type, int subtype) {
|
||||
this.connected = connected;
|
||||
this.type = type;
|
||||
this.subtype = subtype;
|
||||
}
|
||||
|
||||
public boolean isConnected() {
|
||||
return connected;
|
||||
}
|
||||
|
||||
public int getNetworkType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public int getNetworkSubType() {
|
||||
return subtype;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* The methods in this class get called when the network changes if the callback
|
||||
* is registered with a proper network request. It is only available in Android Lollipop
|
||||
* and above.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
private class SimpleNetworkCallback extends NetworkCallback {
|
||||
@Override
|
||||
public void onAvailable(Network network) {
|
||||
Logging.d(TAG, "Network becomes available: " + network.toString());
|
||||
onNetworkChanged(network);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCapabilitiesChanged(Network network, NetworkCapabilities networkCapabilities) {
|
||||
// A capabilities change may indicate the ConnectionType has changed,
|
||||
// so forward the new NetworkInformation along to the observer.
|
||||
Logging.d(TAG, "capabilities changed: " + networkCapabilities.toString());
|
||||
onNetworkChanged(network);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLinkPropertiesChanged(Network network, LinkProperties linkProperties) {
|
||||
// A link property change may indicate the IP address changes.
|
||||
// so forward the new NetworkInformation to the observer.
|
||||
Logging.d(TAG, "link properties changed: " + linkProperties.toString());
|
||||
onNetworkChanged(network);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLosing(Network network, int maxMsToLive) {
|
||||
// Tell the network is going to lose in MaxMsToLive milliseconds.
|
||||
// We may use this signal later.
|
||||
Logging.d(
|
||||
TAG, "Network " + network.toString() + " is about to lose in " + maxMsToLive + "ms");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLost(Network network) {
|
||||
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
|
||||
observer.onNetworkDisconnect(networkToNetId(network));
|
||||
}
|
||||
|
||||
private void onNetworkChanged(Network network) {
|
||||
NetworkInformation networkInformation = connectivityManagerDelegate.networkToInfo(network);
|
||||
if (networkInformation != null) {
|
||||
observer.onNetworkConnect(networkInformation);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Queries the ConnectivityManager for information about the current connection. */
|
||||
static class ConnectivityManagerDelegate {
|
||||
/**
|
||||
* Note: In some rare Android systems connectivityManager is null. We handle that
|
||||
* gracefully below.
|
||||
*/
|
||||
private final ConnectivityManager connectivityManager;
|
||||
|
||||
ConnectivityManagerDelegate(Context context) {
|
||||
connectivityManager =
|
||||
(ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
|
||||
}
|
||||
|
||||
// For testing.
|
||||
ConnectivityManagerDelegate() {
|
||||
// All the methods below should be overridden.
|
||||
connectivityManager = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns connection type and status information about the current
|
||||
* default network.
|
||||
*/
|
||||
NetworkState getNetworkState() {
|
||||
if (connectivityManager == null) {
|
||||
return new NetworkState(false, -1, -1);
|
||||
}
|
||||
return getNetworkState(connectivityManager.getActiveNetworkInfo());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns connection type and status information about |network|.
|
||||
* Only callable on Lollipop and newer releases.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
NetworkState getNetworkState(Network network) {
|
||||
if (connectivityManager == null) {
|
||||
return new NetworkState(false, -1, -1);
|
||||
}
|
||||
return getNetworkState(connectivityManager.getNetworkInfo(network));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns connection type and status information gleaned from networkInfo.
|
||||
*/
|
||||
NetworkState getNetworkState(NetworkInfo networkInfo) {
|
||||
if (networkInfo == null || !networkInfo.isConnected()) {
|
||||
return new NetworkState(false, -1, -1);
|
||||
}
|
||||
return new NetworkState(true, networkInfo.getType(), networkInfo.getSubtype());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all connected networks.
|
||||
* Only callable on Lollipop and newer releases.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
Network[] getAllNetworks() {
|
||||
if (connectivityManager == null) {
|
||||
return new Network[0];
|
||||
}
|
||||
return connectivityManager.getAllNetworks();
|
||||
}
|
||||
|
||||
List<NetworkInformation> getActiveNetworkList() {
|
||||
if (!supportNetworkCallback()) {
|
||||
return null;
|
||||
}
|
||||
ArrayList<NetworkInformation> netInfoList = new ArrayList<NetworkInformation>();
|
||||
for (Network network : getAllNetworks()) {
|
||||
NetworkInformation info = networkToInfo(network);
|
||||
if (info != null) {
|
||||
netInfoList.add(info);
|
||||
}
|
||||
}
|
||||
return netInfoList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the NetID of the current default network. Returns
|
||||
* INVALID_NET_ID if no current default network connected.
|
||||
* Only callable on Lollipop and newer releases.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
long getDefaultNetId() {
|
||||
if (!supportNetworkCallback()) {
|
||||
return INVALID_NET_ID;
|
||||
}
|
||||
// Android Lollipop had no API to get the default network; only an
|
||||
// API to return the NetworkInfo for the default network. To
|
||||
// determine the default network one can find the network with
|
||||
// type matching that of the default network.
|
||||
final NetworkInfo defaultNetworkInfo = connectivityManager.getActiveNetworkInfo();
|
||||
if (defaultNetworkInfo == null) {
|
||||
return INVALID_NET_ID;
|
||||
}
|
||||
final Network[] networks = getAllNetworks();
|
||||
long defaultNetId = INVALID_NET_ID;
|
||||
for (Network network : networks) {
|
||||
if (!hasInternetCapability(network)) {
|
||||
continue;
|
||||
}
|
||||
final NetworkInfo networkInfo = connectivityManager.getNetworkInfo(network);
|
||||
if (networkInfo != null && networkInfo.getType() == defaultNetworkInfo.getType()) {
|
||||
// There should not be multiple connected networks of the
|
||||
// same type. At least as of Android Marshmallow this is
|
||||
// not supported. If this becomes supported this assertion
|
||||
// may trigger. At that point we could consider using
|
||||
// ConnectivityManager.getDefaultNetwork() though this
|
||||
// may give confusing results with VPNs and is only
|
||||
// available with Android Marshmallow.
|
||||
if (defaultNetId != INVALID_NET_ID) {
|
||||
throw new RuntimeException(
|
||||
"Multiple connected networks of same type are not supported.");
|
||||
}
|
||||
defaultNetId = networkToNetId(network);
|
||||
}
|
||||
}
|
||||
return defaultNetId;
|
||||
}
|
||||
|
||||
@SuppressLint("NewApi")
|
||||
private NetworkInformation networkToInfo(Network network) {
|
||||
LinkProperties linkProperties = connectivityManager.getLinkProperties(network);
|
||||
// getLinkProperties will return null if the network is unknown.
|
||||
if (linkProperties == null) {
|
||||
Logging.w(TAG, "Detected unknown network: " + network.toString());
|
||||
return null;
|
||||
}
|
||||
if (linkProperties.getInterfaceName() == null) {
|
||||
Logging.w(TAG, "Null interface name for network " + network.toString());
|
||||
return null;
|
||||
}
|
||||
|
||||
NetworkState networkState = getNetworkState(network);
|
||||
if (networkState.connected && networkState.getNetworkType() == ConnectivityManager.TYPE_VPN) {
|
||||
// If a VPN network is in place, we can find the underlying network type via querying the
|
||||
// active network info thanks to
|
||||
// https://android.googlesource.com/platform/frameworks/base/+/d6a7980d
|
||||
networkState = getNetworkState();
|
||||
}
|
||||
ConnectionType connectionType = getConnectionType(networkState);
|
||||
if (connectionType == ConnectionType.CONNECTION_NONE) {
|
||||
// This may not be an error. The OS may signal a network event with connection type
|
||||
// NONE when the network disconnects.
|
||||
Logging.d(TAG, "Network " + network.toString() + " is disconnected");
|
||||
return null;
|
||||
}
|
||||
|
||||
// Some android device may return a CONNECTION_UNKNOWN_CELLULAR or CONNECTION_UNKNOWN type,
|
||||
// which appears to be usable. Just log them here.
|
||||
if (connectionType == ConnectionType.CONNECTION_UNKNOWN
|
||||
|| connectionType == ConnectionType.CONNECTION_UNKNOWN_CELLULAR) {
|
||||
Logging.d(TAG, "Network " + network.toString() + " connection type is " + connectionType
|
||||
+ " because it has type " + networkState.getNetworkType() + " and subtype "
|
||||
+ networkState.getNetworkSubType());
|
||||
}
|
||||
|
||||
NetworkInformation networkInformation =
|
||||
new NetworkInformation(linkProperties.getInterfaceName(), connectionType,
|
||||
networkToNetId(network), getIPAddresses(linkProperties));
|
||||
return networkInformation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if {@code network} can provide Internet access. Can be used to
|
||||
* ignore specialized networks (e.g. IMS, FOTA).
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
boolean hasInternetCapability(Network network) {
|
||||
if (connectivityManager == null) {
|
||||
return false;
|
||||
}
|
||||
final NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
|
||||
return capabilities != null
|
||||
&& capabilities.hasCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET);
|
||||
}
|
||||
|
||||
/** Only callable on Lollipop and newer releases. */
|
||||
@SuppressLint("NewApi")
|
||||
public void registerNetworkCallback(NetworkCallback networkCallback) {
|
||||
connectivityManager.registerNetworkCallback(
|
||||
new NetworkRequest.Builder()
|
||||
.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
|
||||
.build(),
|
||||
networkCallback);
|
||||
}
|
||||
|
||||
/** Only callable on Lollipop and newer releases. */
|
||||
@SuppressLint("NewApi")
|
||||
public void requestMobileNetwork(NetworkCallback networkCallback) {
|
||||
NetworkRequest.Builder builder = new NetworkRequest.Builder();
|
||||
builder.addCapability(NetworkCapabilities.NET_CAPABILITY_INTERNET)
|
||||
.addTransportType(NetworkCapabilities.TRANSPORT_CELLULAR);
|
||||
connectivityManager.requestNetwork(builder.build(), networkCallback);
|
||||
}
|
||||
|
||||
@SuppressLint("NewApi")
|
||||
IPAddress[] getIPAddresses(LinkProperties linkProperties) {
|
||||
IPAddress[] ipAddresses = new IPAddress[linkProperties.getLinkAddresses().size()];
|
||||
int i = 0;
|
||||
for (LinkAddress linkAddress : linkProperties.getLinkAddresses()) {
|
||||
ipAddresses[i] = new IPAddress(linkAddress.getAddress().getAddress());
|
||||
++i;
|
||||
}
|
||||
return ipAddresses;
|
||||
}
|
||||
|
||||
@SuppressLint("NewApi")
|
||||
public void releaseCallback(NetworkCallback networkCallback) {
|
||||
if (supportNetworkCallback()) {
|
||||
Logging.d(TAG, "Unregister network callback");
|
||||
connectivityManager.unregisterNetworkCallback(networkCallback);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean supportNetworkCallback() {
|
||||
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP && connectivityManager != null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Queries the WifiManager for SSID of the current Wifi connection. */
|
||||
static class WifiManagerDelegate {
|
||||
private final Context context;
|
||||
WifiManagerDelegate(Context context) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
// For testing.
|
||||
WifiManagerDelegate() {
|
||||
// All the methods below should be overridden.
|
||||
context = null;
|
||||
}
|
||||
|
||||
String getWifiSSID() {
|
||||
final Intent intent = context.registerReceiver(
|
||||
null, new IntentFilter(WifiManager.NETWORK_STATE_CHANGED_ACTION));
|
||||
if (intent != null) {
|
||||
final WifiInfo wifiInfo = intent.getParcelableExtra(WifiManager.EXTRA_WIFI_INFO);
|
||||
if (wifiInfo != null) {
|
||||
final String ssid = wifiInfo.getSSID();
|
||||
if (ssid != null) {
|
||||
return ssid;
|
||||
}
|
||||
}
|
||||
}
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/** Maintains the information about wifi direct (aka WifiP2p) networks. */
|
||||
static class WifiDirectManagerDelegate extends BroadcastReceiver {
|
||||
// Network "handle" for the Wifi P2p network. We have to bind to the default network id
|
||||
// (NETWORK_UNSPECIFIED) for these addresses.
|
||||
private static final int WIFI_P2P_NETWORK_HANDLE = 0;
|
||||
private final Context context;
|
||||
private final Observer observer;
|
||||
// Network information about a WifiP2p (aka WiFi-Direct) network, or null if no such network is
|
||||
// connected.
|
||||
private NetworkInformation wifiP2pNetworkInfo = null;
|
||||
|
||||
WifiDirectManagerDelegate(Observer observer, Context context) {
|
||||
this.context = context;
|
||||
this.observer = observer;
|
||||
IntentFilter intentFilter = new IntentFilter();
|
||||
intentFilter.addAction(WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION);
|
||||
intentFilter.addAction(WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION);
|
||||
context.registerReceiver(this, intentFilter);
|
||||
}
|
||||
|
||||
// BroadcastReceiver
|
||||
@Override
|
||||
@SuppressLint("InlinedApi")
|
||||
public void onReceive(Context context, Intent intent) {
|
||||
if (WifiP2pManager.WIFI_P2P_CONNECTION_CHANGED_ACTION.equals(intent.getAction())) {
|
||||
WifiP2pGroup wifiP2pGroup = intent.getParcelableExtra(WifiP2pManager.EXTRA_WIFI_P2P_GROUP);
|
||||
onWifiP2pGroupChange(wifiP2pGroup);
|
||||
} else if (WifiP2pManager.WIFI_P2P_STATE_CHANGED_ACTION.equals(intent.getAction())) {
|
||||
int state = intent.getIntExtra(WifiP2pManager.EXTRA_WIFI_STATE, 0 /* default to unknown */);
|
||||
onWifiP2pStateChange(state);
|
||||
}
|
||||
}
|
||||
|
||||
/** Releases the broadcast receiver. */
|
||||
public void release() {
|
||||
context.unregisterReceiver(this);
|
||||
}
|
||||
|
||||
public List<NetworkInformation> getActiveNetworkList() {
|
||||
if (wifiP2pNetworkInfo != null) {
|
||||
return Collections.singletonList(wifiP2pNetworkInfo);
|
||||
}
|
||||
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/** Handle a change notification about the wifi p2p group. */
|
||||
private void onWifiP2pGroupChange(WifiP2pGroup wifiP2pGroup) {
|
||||
if (wifiP2pGroup == null || wifiP2pGroup.getInterface() == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
NetworkInterface wifiP2pInterface;
|
||||
try {
|
||||
wifiP2pInterface = NetworkInterface.getByName(wifiP2pGroup.getInterface());
|
||||
} catch (SocketException e) {
|
||||
Logging.e(TAG, "Unable to get WifiP2p network interface", e);
|
||||
return;
|
||||
}
|
||||
|
||||
List<InetAddress> interfaceAddresses = Collections.list(wifiP2pInterface.getInetAddresses());
|
||||
IPAddress[] ipAddresses = new IPAddress[interfaceAddresses.size()];
|
||||
for (int i = 0; i < interfaceAddresses.size(); ++i) {
|
||||
ipAddresses[i] = new IPAddress(interfaceAddresses.get(i).getAddress());
|
||||
}
|
||||
|
||||
wifiP2pNetworkInfo =
|
||||
new NetworkInformation(
|
||||
wifiP2pGroup.getInterface(),
|
||||
ConnectionType.CONNECTION_WIFI,
|
||||
WIFI_P2P_NETWORK_HANDLE,
|
||||
ipAddresses);
|
||||
observer.onNetworkConnect(wifiP2pNetworkInfo);
|
||||
}
|
||||
|
||||
/** Handle a state change notification about wifi p2p. */
|
||||
private void onWifiP2pStateChange(int state) {
|
||||
if (state == WifiP2pManager.WIFI_P2P_STATE_DISABLED) {
|
||||
wifiP2pNetworkInfo = null;
|
||||
observer.onNetworkDisconnect(WIFI_P2P_NETWORK_HANDLE);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static final long INVALID_NET_ID = -1;
|
||||
private static final String TAG = "NetworkMonitorAutoDetect";
|
||||
|
||||
// Observer for the connection type change.
|
||||
private final Observer observer;
|
||||
private final IntentFilter intentFilter;
|
||||
private final Context context;
|
||||
// Used to request mobile network. It does not do anything except for keeping
|
||||
// the callback for releasing the request.
|
||||
private final NetworkCallback mobileNetworkCallback;
|
||||
// Used to receive updates on all networks.
|
||||
private final NetworkCallback allNetworkCallback;
|
||||
// connectivityManagerDelegate and wifiManagerDelegate are only non-final for testing.
|
||||
private ConnectivityManagerDelegate connectivityManagerDelegate;
|
||||
private WifiManagerDelegate wifiManagerDelegate;
|
||||
private WifiDirectManagerDelegate wifiDirectManagerDelegate;
|
||||
|
||||
private boolean isRegistered;
|
||||
private ConnectionType connectionType;
|
||||
private String wifiSSID;
|
||||
|
||||
/**
|
||||
* Observer interface by which observer is notified of network changes.
|
||||
*/
|
||||
public static interface Observer {
|
||||
/**
|
||||
* Called when default network changes.
|
||||
*/
|
||||
public void onConnectionTypeChanged(ConnectionType newConnectionType);
|
||||
public void onNetworkConnect(NetworkInformation networkInfo);
|
||||
public void onNetworkDisconnect(long networkHandle);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a NetworkMonitorAutoDetect. Should only be called on UI thread.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
public NetworkMonitorAutoDetect(Observer observer, Context context) {
|
||||
this.observer = observer;
|
||||
this.context = context;
|
||||
connectivityManagerDelegate = new ConnectivityManagerDelegate(context);
|
||||
wifiManagerDelegate = new WifiManagerDelegate(context);
|
||||
|
||||
final NetworkState networkState = connectivityManagerDelegate.getNetworkState();
|
||||
connectionType = getConnectionType(networkState);
|
||||
wifiSSID = getWifiSSID(networkState);
|
||||
intentFilter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION);
|
||||
|
||||
if (PeerConnectionFactory.fieldTrialsFindFullName("IncludeWifiDirect").equals("Enabled")) {
|
||||
wifiDirectManagerDelegate = new WifiDirectManagerDelegate(observer, context);
|
||||
}
|
||||
|
||||
registerReceiver();
|
||||
if (connectivityManagerDelegate.supportNetworkCallback()) {
|
||||
// On Android 6.0.0, the WRITE_SETTINGS permission is necessary for
|
||||
// requestNetwork, so it will fail. This was fixed in Android 6.0.1.
|
||||
NetworkCallback tempNetworkCallback = new NetworkCallback();
|
||||
try {
|
||||
connectivityManagerDelegate.requestMobileNetwork(tempNetworkCallback);
|
||||
} catch (java.lang.SecurityException e) {
|
||||
Logging.w(TAG, "Unable to obtain permission to request a cellular network.");
|
||||
tempNetworkCallback = null;
|
||||
}
|
||||
mobileNetworkCallback = tempNetworkCallback;
|
||||
allNetworkCallback = new SimpleNetworkCallback();
|
||||
connectivityManagerDelegate.registerNetworkCallback(allNetworkCallback);
|
||||
} else {
|
||||
mobileNetworkCallback = null;
|
||||
allNetworkCallback = null;
|
||||
}
|
||||
}
|
||||
|
||||
public boolean supportNetworkCallback() {
|
||||
return connectivityManagerDelegate.supportNetworkCallback();
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows overriding the ConnectivityManagerDelegate for tests.
|
||||
*/
|
||||
void setConnectivityManagerDelegateForTests(ConnectivityManagerDelegate delegate) {
|
||||
connectivityManagerDelegate = delegate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows overriding the WifiManagerDelegate for tests.
|
||||
*/
|
||||
void setWifiManagerDelegateForTests(WifiManagerDelegate delegate) {
|
||||
wifiManagerDelegate = delegate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether the object has registered to receive network connectivity intents.
|
||||
* Visible for testing.
|
||||
*/
|
||||
boolean isReceiverRegisteredForTesting() {
|
||||
return isRegistered;
|
||||
}
|
||||
|
||||
List<NetworkInformation> getActiveNetworkList() {
|
||||
List<NetworkInformation> connectivityManagerList =
|
||||
connectivityManagerDelegate.getActiveNetworkList();
|
||||
if (connectivityManagerList == null) {
|
||||
return null;
|
||||
}
|
||||
ArrayList<NetworkInformation> result =
|
||||
new ArrayList<NetworkInformation>(connectivityManagerList);
|
||||
if (wifiDirectManagerDelegate != null) {
|
||||
result.addAll(wifiDirectManagerDelegate.getActiveNetworkList());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public void destroy() {
|
||||
if (allNetworkCallback != null) {
|
||||
connectivityManagerDelegate.releaseCallback(allNetworkCallback);
|
||||
}
|
||||
if (mobileNetworkCallback != null) {
|
||||
connectivityManagerDelegate.releaseCallback(mobileNetworkCallback);
|
||||
}
|
||||
if (wifiDirectManagerDelegate != null) {
|
||||
wifiDirectManagerDelegate.release();
|
||||
}
|
||||
unregisterReceiver();
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a BroadcastReceiver in the given context.
|
||||
*/
|
||||
private void registerReceiver() {
|
||||
if (isRegistered)
|
||||
return;
|
||||
|
||||
isRegistered = true;
|
||||
context.registerReceiver(this, intentFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregisters the BroadcastReceiver in the given context.
|
||||
*/
|
||||
private void unregisterReceiver() {
|
||||
if (!isRegistered)
|
||||
return;
|
||||
|
||||
isRegistered = false;
|
||||
context.unregisterReceiver(this);
|
||||
}
|
||||
|
||||
public NetworkState getCurrentNetworkState() {
|
||||
return connectivityManagerDelegate.getNetworkState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns NetID of device's current default connected network used for
|
||||
* communication.
|
||||
* Only implemented on Lollipop and newer releases, returns INVALID_NET_ID
|
||||
* when not implemented.
|
||||
*/
|
||||
public long getDefaultNetId() {
|
||||
return connectivityManagerDelegate.getDefaultNetId();
|
||||
}
|
||||
|
||||
public static ConnectionType getConnectionType(NetworkState networkState) {
|
||||
if (!networkState.isConnected()) {
|
||||
return ConnectionType.CONNECTION_NONE;
|
||||
}
|
||||
|
||||
switch (networkState.getNetworkType()) {
|
||||
case ConnectivityManager.TYPE_ETHERNET:
|
||||
return ConnectionType.CONNECTION_ETHERNET;
|
||||
case ConnectivityManager.TYPE_WIFI:
|
||||
return ConnectionType.CONNECTION_WIFI;
|
||||
case ConnectivityManager.TYPE_WIMAX:
|
||||
return ConnectionType.CONNECTION_4G;
|
||||
case ConnectivityManager.TYPE_BLUETOOTH:
|
||||
return ConnectionType.CONNECTION_BLUETOOTH;
|
||||
case ConnectivityManager.TYPE_MOBILE:
|
||||
// Use information from TelephonyManager to classify the connection.
|
||||
switch (networkState.getNetworkSubType()) {
|
||||
case TelephonyManager.NETWORK_TYPE_GPRS:
|
||||
case TelephonyManager.NETWORK_TYPE_EDGE:
|
||||
case TelephonyManager.NETWORK_TYPE_CDMA:
|
||||
case TelephonyManager.NETWORK_TYPE_1xRTT:
|
||||
case TelephonyManager.NETWORK_TYPE_IDEN:
|
||||
return ConnectionType.CONNECTION_2G;
|
||||
case TelephonyManager.NETWORK_TYPE_UMTS:
|
||||
case TelephonyManager.NETWORK_TYPE_EVDO_0:
|
||||
case TelephonyManager.NETWORK_TYPE_EVDO_A:
|
||||
case TelephonyManager.NETWORK_TYPE_HSDPA:
|
||||
case TelephonyManager.NETWORK_TYPE_HSUPA:
|
||||
case TelephonyManager.NETWORK_TYPE_HSPA:
|
||||
case TelephonyManager.NETWORK_TYPE_EVDO_B:
|
||||
case TelephonyManager.NETWORK_TYPE_EHRPD:
|
||||
case TelephonyManager.NETWORK_TYPE_HSPAP:
|
||||
return ConnectionType.CONNECTION_3G;
|
||||
case TelephonyManager.NETWORK_TYPE_LTE:
|
||||
return ConnectionType.CONNECTION_4G;
|
||||
default:
|
||||
return ConnectionType.CONNECTION_UNKNOWN_CELLULAR;
|
||||
}
|
||||
default:
|
||||
return ConnectionType.CONNECTION_UNKNOWN;
|
||||
}
|
||||
}
|
||||
|
||||
private String getWifiSSID(NetworkState networkState) {
|
||||
if (getConnectionType(networkState) != ConnectionType.CONNECTION_WIFI)
|
||||
return "";
|
||||
return wifiManagerDelegate.getWifiSSID();
|
||||
}
|
||||
|
||||
// BroadcastReceiver
|
||||
@Override
|
||||
public void onReceive(Context context, Intent intent) {
|
||||
final NetworkState networkState = getCurrentNetworkState();
|
||||
if (ConnectivityManager.CONNECTIVITY_ACTION.equals(intent.getAction())) {
|
||||
connectionTypeChanged(networkState);
|
||||
}
|
||||
}
|
||||
|
||||
private void connectionTypeChanged(NetworkState networkState) {
|
||||
ConnectionType newConnectionType = getConnectionType(networkState);
|
||||
String newWifiSSID = getWifiSSID(networkState);
|
||||
if (newConnectionType == connectionType && newWifiSSID.equals(wifiSSID))
|
||||
return;
|
||||
|
||||
connectionType = newConnectionType;
|
||||
wifiSSID = newWifiSSID;
|
||||
Logging.d(TAG, "Network connectivity changed, type is: " + connectionType);
|
||||
observer.onConnectionTypeChanged(newConnectionType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts NetID of network on Lollipop and NetworkHandle (which is mungled
|
||||
* NetID) on Marshmallow and newer releases. Only available on Lollipop and
|
||||
* newer releases. Returns long since getNetworkHandle returns long.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
private static long networkToNetId(Network network) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
|
||||
return network.getNetworkHandle();
|
||||
}
|
||||
|
||||
// NOTE(honghaiz): This depends on Android framework implementation details.
|
||||
// These details cannot change because Lollipop has been released.
|
||||
return Integer.parseInt(network.toString());
|
||||
}
|
||||
}
|
||||
3
sdk/android/api/org/webrtc/OWNERS
Normal file
3
sdk/android/api/org/webrtc/OWNERS
Normal file
@ -0,0 +1,3 @@
|
||||
per-file Camera*=sakal@webrtc.org
|
||||
per-file Histogram.java=sakal@webrtc.org
|
||||
per-file Metrics.java=sakal@webrtc.org
|
||||
553
sdk/android/api/org/webrtc/PeerConnection.java
Normal file
553
sdk/android/api/org/webrtc/PeerConnection.java
Normal file
@ -0,0 +1,553 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Java-land version of the PeerConnection APIs; wraps the C++ API
|
||||
* http://www.webrtc.org/reference/native-apis, which in turn is inspired by the
|
||||
* JS APIs: http://dev.w3.org/2011/webrtc/editor/webrtc.html and
|
||||
* http://www.w3.org/TR/mediacapture-streams/
|
||||
*/
|
||||
public class PeerConnection {
|
||||
static {
|
||||
System.loadLibrary("jingle_peerconnection_so");
|
||||
}
|
||||
|
||||
/** Tracks PeerConnectionInterface::IceGatheringState */
|
||||
public enum IceGatheringState { NEW, GATHERING, COMPLETE }
|
||||
|
||||
/** Tracks PeerConnectionInterface::IceConnectionState */
|
||||
public enum IceConnectionState {
|
||||
NEW,
|
||||
CHECKING,
|
||||
CONNECTED,
|
||||
COMPLETED,
|
||||
FAILED,
|
||||
DISCONNECTED,
|
||||
CLOSED
|
||||
}
|
||||
|
||||
/** Tracks PeerConnectionInterface::TlsCertPolicy */
|
||||
public enum TlsCertPolicy {
|
||||
TLS_CERT_POLICY_SECURE,
|
||||
TLS_CERT_POLICY_INSECURE_NO_CHECK,
|
||||
}
|
||||
|
||||
/** Tracks PeerConnectionInterface::SignalingState */
|
||||
public enum SignalingState {
|
||||
STABLE,
|
||||
HAVE_LOCAL_OFFER,
|
||||
HAVE_LOCAL_PRANSWER,
|
||||
HAVE_REMOTE_OFFER,
|
||||
HAVE_REMOTE_PRANSWER,
|
||||
CLOSED
|
||||
}
|
||||
|
||||
/** Java version of PeerConnectionObserver. */
|
||||
public static interface Observer {
|
||||
/** Triggered when the SignalingState changes. */
|
||||
public void onSignalingChange(SignalingState newState);
|
||||
|
||||
/** Triggered when the IceConnectionState changes. */
|
||||
public void onIceConnectionChange(IceConnectionState newState);
|
||||
|
||||
/** Triggered when the ICE connection receiving status changes. */
|
||||
public void onIceConnectionReceivingChange(boolean receiving);
|
||||
|
||||
/** Triggered when the IceGatheringState changes. */
|
||||
public void onIceGatheringChange(IceGatheringState newState);
|
||||
|
||||
/** Triggered when a new ICE candidate has been found. */
|
||||
public void onIceCandidate(IceCandidate candidate);
|
||||
|
||||
/** Triggered when some ICE candidates have been removed. */
|
||||
public void onIceCandidatesRemoved(IceCandidate[] candidates);
|
||||
|
||||
/** Triggered when media is received on a new stream from remote peer. */
|
||||
public void onAddStream(MediaStream stream);
|
||||
|
||||
/** Triggered when a remote peer close a stream. */
|
||||
public void onRemoveStream(MediaStream stream);
|
||||
|
||||
/** Triggered when a remote peer opens a DataChannel. */
|
||||
public void onDataChannel(DataChannel dataChannel);
|
||||
|
||||
/** Triggered when renegotiation is necessary. */
|
||||
public void onRenegotiationNeeded();
|
||||
|
||||
/**
|
||||
* Triggered when a new track is signaled by the remote peer, as a result of
|
||||
* setRemoteDescription.
|
||||
*/
|
||||
public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams);
|
||||
}
|
||||
|
||||
/** Java version of PeerConnectionInterface.IceServer. */
|
||||
public static class IceServer {
|
||||
// List of URIs associated with this server. Valid formats are described
|
||||
// in RFC7064 and RFC7065, and more may be added in the future. The "host"
|
||||
// part of the URI may contain either an IP address or a hostname.
|
||||
@Deprecated public final String uri;
|
||||
public final List<String> urls;
|
||||
public final String username;
|
||||
public final String password;
|
||||
public final TlsCertPolicy tlsCertPolicy;
|
||||
|
||||
// If the URIs in |urls| only contain IP addresses, this field can be used
|
||||
// to indicate the hostname, which may be necessary for TLS (using the SNI
|
||||
// extension). If |urls| itself contains the hostname, this isn't
|
||||
// necessary.
|
||||
public final String hostname;
|
||||
|
||||
// List of protocols to be used in the TLS ALPN extension.
|
||||
public final List<String> tlsAlpnProtocols;
|
||||
|
||||
// List of elliptic curves to be used in the TLS elliptic curves extension.
|
||||
// Only curve names supported by OpenSSL should be used (eg. "P-256","X25519").
|
||||
public final List<String> tlsEllipticCurves;
|
||||
|
||||
/** Convenience constructor for STUN servers. */
|
||||
@Deprecated
|
||||
public IceServer(String uri) {
|
||||
this(uri, "", "");
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public IceServer(String uri, String username, String password) {
|
||||
this(uri, username, password, TlsCertPolicy.TLS_CERT_POLICY_SECURE);
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public IceServer(String uri, String username, String password, TlsCertPolicy tlsCertPolicy) {
|
||||
this(uri, username, password, tlsCertPolicy, "");
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public IceServer(String uri, String username, String password, TlsCertPolicy tlsCertPolicy,
|
||||
String hostname) {
|
||||
this(uri, Collections.singletonList(uri), username, password, tlsCertPolicy, hostname, null,
|
||||
null);
|
||||
}
|
||||
|
||||
private IceServer(String uri, List<String> urls, String username, String password,
|
||||
TlsCertPolicy tlsCertPolicy, String hostname, List<String> tlsAlpnProtocols,
|
||||
List<String> tlsEllipticCurves) {
|
||||
if (uri == null || urls == null || urls.isEmpty()) {
|
||||
throw new IllegalArgumentException("uri == null || urls == null || urls.isEmpty()");
|
||||
}
|
||||
for (String it : urls) {
|
||||
if (it == null) {
|
||||
throw new IllegalArgumentException("urls element is null: " + urls);
|
||||
}
|
||||
}
|
||||
if (username == null) {
|
||||
throw new IllegalArgumentException("username == null");
|
||||
}
|
||||
if (password == null) {
|
||||
throw new IllegalArgumentException("password == null");
|
||||
}
|
||||
if (hostname == null) {
|
||||
throw new IllegalArgumentException("hostname == null");
|
||||
}
|
||||
this.uri = uri;
|
||||
this.urls = urls;
|
||||
this.username = username;
|
||||
this.password = password;
|
||||
this.tlsCertPolicy = tlsCertPolicy;
|
||||
this.hostname = hostname;
|
||||
this.tlsAlpnProtocols = tlsAlpnProtocols;
|
||||
this.tlsEllipticCurves = tlsEllipticCurves;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
return urls + " [" + username + ":" + password + "] [" + tlsCertPolicy + "] [" + hostname
|
||||
+ "] [" + tlsAlpnProtocols + "] [" + tlsEllipticCurves + "]";
|
||||
}
|
||||
|
||||
public static Builder builder(String uri) {
|
||||
return new Builder(Collections.singletonList(uri));
|
||||
}
|
||||
|
||||
public static Builder builder(List<String> urls) {
|
||||
return new Builder(urls);
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
private final List<String> urls;
|
||||
private String username = "";
|
||||
private String password = "";
|
||||
private TlsCertPolicy tlsCertPolicy = TlsCertPolicy.TLS_CERT_POLICY_SECURE;
|
||||
private String hostname = "";
|
||||
private List<String> tlsAlpnProtocols;
|
||||
private List<String> tlsEllipticCurves;
|
||||
|
||||
private Builder(List<String> urls) {
|
||||
if (urls == null || urls.isEmpty()) {
|
||||
throw new IllegalArgumentException("urls == null || urls.isEmpty(): " + urls);
|
||||
}
|
||||
this.urls = urls;
|
||||
}
|
||||
|
||||
public Builder setUsername(String username) {
|
||||
this.username = username;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setPassword(String password) {
|
||||
this.password = password;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setTlsCertPolicy(TlsCertPolicy tlsCertPolicy) {
|
||||
this.tlsCertPolicy = tlsCertPolicy;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setHostname(String hostname) {
|
||||
this.hostname = hostname;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setTlsAlpnProtocols(List<String> tlsAlpnProtocols) {
|
||||
this.tlsAlpnProtocols = tlsAlpnProtocols;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setTlsEllipticCurves(List<String> tlsEllipticCurves) {
|
||||
this.tlsEllipticCurves = tlsEllipticCurves;
|
||||
return this;
|
||||
}
|
||||
|
||||
public IceServer createIceServer() {
|
||||
return new IceServer(urls.get(0), urls, username, password, tlsCertPolicy, hostname,
|
||||
tlsAlpnProtocols, tlsEllipticCurves);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Java version of PeerConnectionInterface.IceTransportsType */
|
||||
public enum IceTransportsType { NONE, RELAY, NOHOST, ALL }
|
||||
|
||||
/** Java version of PeerConnectionInterface.BundlePolicy */
|
||||
public enum BundlePolicy { BALANCED, MAXBUNDLE, MAXCOMPAT }
|
||||
|
||||
/** Java version of PeerConnectionInterface.RtcpMuxPolicy */
|
||||
public enum RtcpMuxPolicy { NEGOTIATE, REQUIRE }
|
||||
|
||||
/** Java version of PeerConnectionInterface.TcpCandidatePolicy */
|
||||
public enum TcpCandidatePolicy { ENABLED, DISABLED }
|
||||
|
||||
/** Java version of PeerConnectionInterface.CandidateNetworkPolicy */
|
||||
public enum CandidateNetworkPolicy { ALL, LOW_COST }
|
||||
|
||||
/** Java version of rtc::KeyType */
|
||||
public enum KeyType { RSA, ECDSA }
|
||||
|
||||
/** Java version of PeerConnectionInterface.ContinualGatheringPolicy */
|
||||
public enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY }
|
||||
|
||||
/** Java version of rtc::IntervalRange */
|
||||
public static class IntervalRange {
|
||||
private final int min;
|
||||
private final int max;
|
||||
|
||||
public IntervalRange(int min, int max) {
|
||||
this.min = min;
|
||||
this.max = max;
|
||||
}
|
||||
|
||||
public int getMin() {
|
||||
return min;
|
||||
}
|
||||
|
||||
public int getMax() {
|
||||
return max;
|
||||
}
|
||||
}
|
||||
|
||||
/** Java version of PeerConnectionInterface.RTCConfiguration */
|
||||
public static class RTCConfiguration {
|
||||
public IceTransportsType iceTransportsType;
|
||||
public List<IceServer> iceServers;
|
||||
public BundlePolicy bundlePolicy;
|
||||
public RtcpMuxPolicy rtcpMuxPolicy;
|
||||
public TcpCandidatePolicy tcpCandidatePolicy;
|
||||
public CandidateNetworkPolicy candidateNetworkPolicy;
|
||||
public int audioJitterBufferMaxPackets;
|
||||
public boolean audioJitterBufferFastAccelerate;
|
||||
public int iceConnectionReceivingTimeout;
|
||||
public int iceBackupCandidatePairPingInterval;
|
||||
public KeyType keyType;
|
||||
public ContinualGatheringPolicy continualGatheringPolicy;
|
||||
public int iceCandidatePoolSize;
|
||||
public boolean pruneTurnPorts;
|
||||
public boolean presumeWritableWhenFullyRelayed;
|
||||
public Integer iceCheckMinInterval;
|
||||
public boolean disableIPv6OnWifi;
|
||||
// By default, PeerConnection will use a limited number of IPv6 network
|
||||
// interfaces, in order to avoid too many ICE candidate pairs being created
|
||||
// and delaying ICE completion.
|
||||
//
|
||||
// Can be set to Integer.MAX_VALUE to effectively disable the limit.
|
||||
public int maxIPv6Networks;
|
||||
public IntervalRange iceRegatherIntervalRange;
|
||||
|
||||
// TODO(deadbeef): Instead of duplicating the defaults here, we should do
|
||||
// something to pick up the defaults from C++. The Objective-C equivalent
|
||||
// of RTCConfiguration does that.
|
||||
public RTCConfiguration(List<IceServer> iceServers) {
|
||||
iceTransportsType = IceTransportsType.ALL;
|
||||
bundlePolicy = BundlePolicy.BALANCED;
|
||||
rtcpMuxPolicy = RtcpMuxPolicy.REQUIRE;
|
||||
tcpCandidatePolicy = TcpCandidatePolicy.ENABLED;
|
||||
candidateNetworkPolicy = candidateNetworkPolicy.ALL;
|
||||
this.iceServers = iceServers;
|
||||
audioJitterBufferMaxPackets = 50;
|
||||
audioJitterBufferFastAccelerate = false;
|
||||
iceConnectionReceivingTimeout = -1;
|
||||
iceBackupCandidatePairPingInterval = -1;
|
||||
keyType = KeyType.ECDSA;
|
||||
continualGatheringPolicy = ContinualGatheringPolicy.GATHER_ONCE;
|
||||
iceCandidatePoolSize = 0;
|
||||
pruneTurnPorts = false;
|
||||
presumeWritableWhenFullyRelayed = false;
|
||||
iceCheckMinInterval = null;
|
||||
disableIPv6OnWifi = false;
|
||||
maxIPv6Networks = 5;
|
||||
iceRegatherIntervalRange = null;
|
||||
}
|
||||
};
|
||||
|
||||
private final List<MediaStream> localStreams;
|
||||
private final long nativePeerConnection;
|
||||
private final long nativeObserver;
|
||||
private List<RtpSender> senders;
|
||||
private List<RtpReceiver> receivers;
|
||||
|
||||
PeerConnection(long nativePeerConnection, long nativeObserver) {
|
||||
this.nativePeerConnection = nativePeerConnection;
|
||||
this.nativeObserver = nativeObserver;
|
||||
localStreams = new LinkedList<MediaStream>();
|
||||
senders = new LinkedList<RtpSender>();
|
||||
receivers = new LinkedList<RtpReceiver>();
|
||||
}
|
||||
|
||||
// JsepInterface.
|
||||
public native SessionDescription getLocalDescription();
|
||||
|
||||
public native SessionDescription getRemoteDescription();
|
||||
|
||||
public native DataChannel createDataChannel(String label, DataChannel.Init init);
|
||||
|
||||
public native void createOffer(SdpObserver observer, MediaConstraints constraints);
|
||||
|
||||
public native void createAnswer(SdpObserver observer, MediaConstraints constraints);
|
||||
|
||||
public native void setLocalDescription(SdpObserver observer, SessionDescription sdp);
|
||||
|
||||
public native void setRemoteDescription(SdpObserver observer, SessionDescription sdp);
|
||||
|
||||
public boolean setConfiguration(RTCConfiguration config) {
|
||||
return nativeSetConfiguration(config, nativeObserver);
|
||||
}
|
||||
|
||||
public boolean addIceCandidate(IceCandidate candidate) {
|
||||
return nativeAddIceCandidate(candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp);
|
||||
}
|
||||
|
||||
public boolean removeIceCandidates(final IceCandidate[] candidates) {
|
||||
return nativeRemoveIceCandidates(candidates);
|
||||
}
|
||||
|
||||
public boolean addStream(MediaStream stream) {
|
||||
boolean ret = nativeAddLocalStream(stream.nativeStream);
|
||||
if (!ret) {
|
||||
return false;
|
||||
}
|
||||
localStreams.add(stream);
|
||||
return true;
|
||||
}
|
||||
|
||||
public void removeStream(MediaStream stream) {
|
||||
nativeRemoveLocalStream(stream.nativeStream);
|
||||
localStreams.remove(stream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates an RtpSender without a track.
|
||||
* <p>
|
||||
* This method allows an application to cause the PeerConnection to negotiate
|
||||
* sending/receiving a specific media type, but without having a track to
|
||||
* send yet.
|
||||
* <p>
|
||||
* When the application does want to begin sending a track, it can call
|
||||
* RtpSender.setTrack, which doesn't require any additional SDP negotiation.
|
||||
* <p>
|
||||
* Example use:
|
||||
* <pre>
|
||||
* {@code
|
||||
* audioSender = pc.createSender("audio", "stream1");
|
||||
* videoSender = pc.createSender("video", "stream1");
|
||||
* // Do normal SDP offer/answer, which will kick off ICE/DTLS and negotiate
|
||||
* // media parameters....
|
||||
* // Later, when the endpoint is ready to actually begin sending:
|
||||
* audioSender.setTrack(audioTrack, false);
|
||||
* videoSender.setTrack(videoTrack, false);
|
||||
* }
|
||||
* </pre>
|
||||
* Note: This corresponds most closely to "addTransceiver" in the official
|
||||
* WebRTC API, in that it creates a sender without a track. It was
|
||||
* implemented before addTransceiver because it provides useful
|
||||
* functionality, and properly implementing transceivers would have required
|
||||
* a great deal more work.
|
||||
*
|
||||
* @param kind Corresponds to MediaStreamTrack kinds (must be "audio" or
|
||||
* "video").
|
||||
* @param stream_id The ID of the MediaStream that this sender's track will
|
||||
* be associated with when SDP is applied to the remote
|
||||
* PeerConnection. If createSender is used to create an
|
||||
* audio and video sender that should be synchronized, they
|
||||
* should use the same stream ID.
|
||||
* @return A new RtpSender object if successful, or null otherwise.
|
||||
*/
|
||||
public RtpSender createSender(String kind, String stream_id) {
|
||||
RtpSender new_sender = nativeCreateSender(kind, stream_id);
|
||||
if (new_sender != null) {
|
||||
senders.add(new_sender);
|
||||
}
|
||||
return new_sender;
|
||||
}
|
||||
|
||||
// Note that calling getSenders will dispose of the senders previously
|
||||
// returned (and same goes for getReceivers).
|
||||
public List<RtpSender> getSenders() {
|
||||
for (RtpSender sender : senders) {
|
||||
sender.dispose();
|
||||
}
|
||||
senders = nativeGetSenders();
|
||||
return Collections.unmodifiableList(senders);
|
||||
}
|
||||
|
||||
public List<RtpReceiver> getReceivers() {
|
||||
for (RtpReceiver receiver : receivers) {
|
||||
receiver.dispose();
|
||||
}
|
||||
receivers = nativeGetReceivers();
|
||||
return Collections.unmodifiableList(receivers);
|
||||
}
|
||||
|
||||
// Older, non-standard implementation of getStats.
|
||||
@Deprecated
|
||||
public boolean getStats(StatsObserver observer, MediaStreamTrack track) {
|
||||
return nativeOldGetStats(observer, (track == null) ? 0 : track.nativeTrack);
|
||||
}
|
||||
|
||||
// Gets stats using the new stats collection API, see webrtc/api/stats/. These
|
||||
// will replace old stats collection API when the new API has matured enough.
|
||||
public void getStats(RTCStatsCollectorCallback callback) {
|
||||
nativeNewGetStats(callback);
|
||||
}
|
||||
|
||||
// Limits the bandwidth allocated for all RTP streams sent by this
|
||||
// PeerConnection. Pass null to leave a value unchanged.
|
||||
public native boolean setBitrate(Integer min, Integer current, Integer max);
|
||||
|
||||
// Starts recording an RTC event log. Ownership of the file is transfered to
|
||||
// the native code. If an RTC event log is already being recorded, it will be
|
||||
// stopped and a new one will start using the provided file. Logging will
|
||||
// continue until the stopRtcEventLog function is called. The max_size_bytes
|
||||
// argument is ignored, it is added for future use.
|
||||
public boolean startRtcEventLog(int file_descriptor, int max_size_bytes) {
|
||||
return nativeStartRtcEventLog(file_descriptor, max_size_bytes);
|
||||
}
|
||||
|
||||
// Stops recording an RTC event log. If no RTC event log is currently being
|
||||
// recorded, this call will have no effect.
|
||||
public void stopRtcEventLog() {
|
||||
nativeStopRtcEventLog();
|
||||
}
|
||||
|
||||
// TODO(fischman): add support for DTMF-related methods once that API
|
||||
// stabilizes.
|
||||
public native SignalingState signalingState();
|
||||
|
||||
public native IceConnectionState iceConnectionState();
|
||||
|
||||
public native IceGatheringState iceGatheringState();
|
||||
|
||||
public native void close();
|
||||
|
||||
/**
|
||||
* Free native resources associated with this PeerConnection instance.
|
||||
* <p>
|
||||
* This method removes a reference count from the C++ PeerConnection object,
|
||||
* which should result in it being destroyed. It also calls equivalent
|
||||
* "dispose" methods on the Java objects attached to this PeerConnection
|
||||
* (streams, senders, receivers), such that their associated C++ objects
|
||||
* will also be destroyed.
|
||||
* <p>
|
||||
* Note that this method cannot be safely called from an observer callback
|
||||
* (PeerConnection.Observer, DataChannel.Observer, etc.). If you want to, for
|
||||
* example, destroy the PeerConnection after an "ICE failed" callback, you
|
||||
* must do this asynchronously (in other words, unwind the stack first). See
|
||||
* <a href="https://bugs.chromium.org/p/webrtc/issues/detail?id=3721">bug
|
||||
* 3721</a> for more details.
|
||||
*/
|
||||
public void dispose() {
|
||||
close();
|
||||
for (MediaStream stream : localStreams) {
|
||||
nativeRemoveLocalStream(stream.nativeStream);
|
||||
stream.dispose();
|
||||
}
|
||||
localStreams.clear();
|
||||
for (RtpSender sender : senders) {
|
||||
sender.dispose();
|
||||
}
|
||||
senders.clear();
|
||||
for (RtpReceiver receiver : receivers) {
|
||||
receiver.dispose();
|
||||
}
|
||||
receivers.clear();
|
||||
JniCommon.nativeReleaseRef(nativePeerConnection);
|
||||
freeObserver(nativeObserver);
|
||||
}
|
||||
|
||||
private static native void freeObserver(long nativeObserver);
|
||||
|
||||
public native boolean nativeSetConfiguration(RTCConfiguration config, long nativeObserver);
|
||||
|
||||
private native boolean nativeAddIceCandidate(
|
||||
String sdpMid, int sdpMLineIndex, String iceCandidateSdp);
|
||||
|
||||
private native boolean nativeRemoveIceCandidates(final IceCandidate[] candidates);
|
||||
|
||||
private native boolean nativeAddLocalStream(long nativeStream);
|
||||
|
||||
private native void nativeRemoveLocalStream(long nativeStream);
|
||||
|
||||
private native boolean nativeOldGetStats(StatsObserver observer, long nativeTrack);
|
||||
|
||||
private native void nativeNewGetStats(RTCStatsCollectorCallback callback);
|
||||
|
||||
private native RtpSender nativeCreateSender(String kind, String stream_id);
|
||||
|
||||
private native List<RtpSender> nativeGetSenders();
|
||||
|
||||
private native List<RtpReceiver> nativeGetReceivers();
|
||||
|
||||
private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes);
|
||||
|
||||
private native void nativeStopRtcEventLog();
|
||||
}
|
||||
293
sdk/android/api/org/webrtc/PeerConnectionFactory.java
Normal file
293
sdk/android/api/org/webrtc/PeerConnectionFactory.java
Normal file
@ -0,0 +1,293 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to
|
||||
* the PeerConnection API for clients.
|
||||
*/
|
||||
public class PeerConnectionFactory {
|
||||
private static volatile boolean nativeLibLoaded;
|
||||
|
||||
static {
|
||||
try {
|
||||
System.loadLibrary("jingle_peerconnection_so");
|
||||
nativeLibLoaded = true;
|
||||
} catch (UnsatisfiedLinkError t) {
|
||||
nativeLibLoaded = false;
|
||||
}
|
||||
}
|
||||
|
||||
public static final String TRIAL_ENABLED = "Enabled";
|
||||
public static final String VIDEO_FRAME_EMIT_TRIAL = "VideoFrameEmit";
|
||||
|
||||
private static final String TAG = "PeerConnectionFactory";
|
||||
private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
|
||||
private final long nativeFactory;
|
||||
private static Context applicationContext;
|
||||
private static Thread networkThread;
|
||||
private static Thread workerThread;
|
||||
private static Thread signalingThread;
|
||||
private EglBase localEglbase;
|
||||
private EglBase remoteEglbase;
|
||||
|
||||
public static class Options {
|
||||
// Keep in sync with webrtc/rtc_base/network.h!
|
||||
static final int ADAPTER_TYPE_UNKNOWN = 0;
|
||||
static final int ADAPTER_TYPE_ETHERNET = 1 << 0;
|
||||
static final int ADAPTER_TYPE_WIFI = 1 << 1;
|
||||
static final int ADAPTER_TYPE_CELLULAR = 1 << 2;
|
||||
static final int ADAPTER_TYPE_VPN = 1 << 3;
|
||||
static final int ADAPTER_TYPE_LOOPBACK = 1 << 4;
|
||||
|
||||
public int networkIgnoreMask;
|
||||
public boolean disableEncryption;
|
||||
public boolean disableNetworkMonitor;
|
||||
}
|
||||
|
||||
// Must be called at least once before creating a PeerConnectionFactory
|
||||
// (for example, at application startup time).
|
||||
public static native void nativeInitializeAndroidGlobals(
|
||||
Context context, boolean videoHwAcceleration);
|
||||
|
||||
public static void initializeAndroidGlobals(Context context, boolean videoHwAcceleration) {
|
||||
ContextUtils.initialize(context);
|
||||
nativeInitializeAndroidGlobals(context, videoHwAcceleration);
|
||||
}
|
||||
|
||||
// Older signature of initializeAndroidGlobals. The extra parameters are now meaningless.
|
||||
@Deprecated
|
||||
public static boolean initializeAndroidGlobals(Object context, boolean initializeAudio,
|
||||
boolean initializeVideo, boolean videoHwAcceleration) {
|
||||
initializeAndroidGlobals((Context) context, videoHwAcceleration);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Field trial initialization. Must be called before PeerConnectionFactory
|
||||
// is created.
|
||||
public static native void initializeFieldTrials(String fieldTrialsInitString);
|
||||
// Wrapper of webrtc::field_trial::FindFullName. Develop the feature with default behaviour off.
|
||||
// Example usage:
|
||||
// if (PeerConnectionFactory.fieldTrialsFindFullName("WebRTCExperiment").equals("Enabled")) {
|
||||
// method1();
|
||||
// } else {
|
||||
// method2();
|
||||
// }
|
||||
public static String fieldTrialsFindFullName(String name) {
|
||||
return nativeLibLoaded ? nativeFieldTrialsFindFullName(name) : "";
|
||||
}
|
||||
private static native String nativeFieldTrialsFindFullName(String name);
|
||||
// Internal tracing initialization. Must be called before PeerConnectionFactory is created to
|
||||
// prevent racing with tracing code.
|
||||
public static native void initializeInternalTracer();
|
||||
// Internal tracing shutdown, called to prevent resource leaks. Must be called after
|
||||
// PeerConnectionFactory is gone to prevent races with code performing tracing.
|
||||
public static native void shutdownInternalTracer();
|
||||
// Start/stop internal capturing of internal tracing.
|
||||
public static native boolean startInternalTracingCapture(String tracing_filename);
|
||||
public static native void stopInternalTracingCapture();
|
||||
|
||||
@Deprecated
|
||||
public PeerConnectionFactory() {
|
||||
this(null);
|
||||
}
|
||||
|
||||
// Note: initializeAndroidGlobals must be called at least once before
|
||||
// constructing a PeerConnectionFactory.
|
||||
public PeerConnectionFactory(Options options) {
|
||||
this(options, null /* encoderFactory */, null /* decoderFactory */);
|
||||
}
|
||||
|
||||
public PeerConnectionFactory(
|
||||
Options options, VideoEncoderFactory encoderFactory, VideoDecoderFactory decoderFactory) {
|
||||
nativeFactory = nativeCreatePeerConnectionFactory(options, encoderFactory, decoderFactory);
|
||||
if (nativeFactory == 0) {
|
||||
throw new RuntimeException("Failed to initialize PeerConnectionFactory!");
|
||||
}
|
||||
}
|
||||
|
||||
public PeerConnection createPeerConnection(PeerConnection.RTCConfiguration rtcConfig,
|
||||
MediaConstraints constraints, PeerConnection.Observer observer) {
|
||||
long nativeObserver = nativeCreateObserver(observer);
|
||||
if (nativeObserver == 0) {
|
||||
return null;
|
||||
}
|
||||
long nativePeerConnection =
|
||||
nativeCreatePeerConnection(nativeFactory, rtcConfig, constraints, nativeObserver);
|
||||
if (nativePeerConnection == 0) {
|
||||
return null;
|
||||
}
|
||||
return new PeerConnection(nativePeerConnection, nativeObserver);
|
||||
}
|
||||
|
||||
public PeerConnection createPeerConnection(List<PeerConnection.IceServer> iceServers,
|
||||
MediaConstraints constraints, PeerConnection.Observer observer) {
|
||||
PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(iceServers);
|
||||
return createPeerConnection(rtcConfig, constraints, observer);
|
||||
}
|
||||
|
||||
public MediaStream createLocalMediaStream(String label) {
|
||||
return new MediaStream(nativeCreateLocalMediaStream(nativeFactory, label));
|
||||
}
|
||||
|
||||
public VideoSource createVideoSource(VideoCapturer capturer) {
|
||||
final EglBase.Context eglContext =
|
||||
localEglbase == null ? null : localEglbase.getEglBaseContext();
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(VIDEO_CAPTURER_THREAD_NAME, eglContext);
|
||||
long nativeAndroidVideoTrackSource =
|
||||
nativeCreateVideoSource(nativeFactory, surfaceTextureHelper, capturer.isScreencast());
|
||||
VideoCapturer.CapturerObserver capturerObserver =
|
||||
new AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
|
||||
capturer.initialize(
|
||||
surfaceTextureHelper, ContextUtils.getApplicationContext(), capturerObserver);
|
||||
return new VideoSource(nativeAndroidVideoTrackSource);
|
||||
}
|
||||
|
||||
public VideoTrack createVideoTrack(String id, VideoSource source) {
|
||||
return new VideoTrack(nativeCreateVideoTrack(nativeFactory, id, source.nativeSource));
|
||||
}
|
||||
|
||||
public AudioSource createAudioSource(MediaConstraints constraints) {
|
||||
return new AudioSource(nativeCreateAudioSource(nativeFactory, constraints));
|
||||
}
|
||||
|
||||
public AudioTrack createAudioTrack(String id, AudioSource source) {
|
||||
return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.nativeSource));
|
||||
}
|
||||
|
||||
// Starts recording an AEC dump. Ownership of the file is transfered to the
|
||||
// native code. If an AEC dump is already in progress, it will be stopped and
|
||||
// a new one will start using the provided file.
|
||||
public boolean startAecDump(int file_descriptor, int filesize_limit_bytes) {
|
||||
return nativeStartAecDump(nativeFactory, file_descriptor, filesize_limit_bytes);
|
||||
}
|
||||
|
||||
// Stops recording an AEC dump. If no AEC dump is currently being recorded,
|
||||
// this call will have no effect.
|
||||
public void stopAecDump() {
|
||||
nativeStopAecDump(nativeFactory);
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public void setOptions(Options options) {
|
||||
nativeSetOptions(nativeFactory, options);
|
||||
}
|
||||
|
||||
/** Set the EGL context used by HW Video encoding and decoding.
|
||||
*
|
||||
* @param localEglContext Must be the same as used by VideoCapturerAndroid and any local video
|
||||
* renderer.
|
||||
* @param remoteEglContext Must be the same as used by any remote video renderer.
|
||||
*/
|
||||
public void setVideoHwAccelerationOptions(
|
||||
EglBase.Context localEglContext, EglBase.Context remoteEglContext) {
|
||||
if (localEglbase != null) {
|
||||
Logging.w(TAG, "Egl context already set.");
|
||||
localEglbase.release();
|
||||
}
|
||||
if (remoteEglbase != null) {
|
||||
Logging.w(TAG, "Egl context already set.");
|
||||
remoteEglbase.release();
|
||||
}
|
||||
localEglbase = EglBase.create(localEglContext);
|
||||
remoteEglbase = EglBase.create(remoteEglContext);
|
||||
nativeSetVideoHwAccelerationOptions(
|
||||
nativeFactory, localEglbase.getEglBaseContext(), remoteEglbase.getEglBaseContext());
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
nativeFreeFactory(nativeFactory);
|
||||
networkThread = null;
|
||||
workerThread = null;
|
||||
signalingThread = null;
|
||||
if (localEglbase != null)
|
||||
localEglbase.release();
|
||||
if (remoteEglbase != null)
|
||||
remoteEglbase.release();
|
||||
}
|
||||
|
||||
public void threadsCallbacks() {
|
||||
nativeThreadsCallbacks(nativeFactory);
|
||||
}
|
||||
|
||||
private static void printStackTrace(Thread thread, String threadName) {
|
||||
if (thread != null) {
|
||||
StackTraceElement[] stackTraces = thread.getStackTrace();
|
||||
if (stackTraces.length > 0) {
|
||||
Logging.d(TAG, threadName + " stacks trace:");
|
||||
for (StackTraceElement stackTrace : stackTraces) {
|
||||
Logging.d(TAG, stackTrace.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void printStackTraces() {
|
||||
printStackTrace(networkThread, "Network thread");
|
||||
printStackTrace(workerThread, "Worker thread");
|
||||
printStackTrace(signalingThread, "Signaling thread");
|
||||
}
|
||||
|
||||
private static void onNetworkThreadReady() {
|
||||
networkThread = Thread.currentThread();
|
||||
Logging.d(TAG, "onNetworkThreadReady");
|
||||
}
|
||||
|
||||
private static void onWorkerThreadReady() {
|
||||
workerThread = Thread.currentThread();
|
||||
Logging.d(TAG, "onWorkerThreadReady");
|
||||
}
|
||||
|
||||
private static void onSignalingThreadReady() {
|
||||
signalingThread = Thread.currentThread();
|
||||
Logging.d(TAG, "onSignalingThreadReady");
|
||||
}
|
||||
|
||||
private static native long nativeCreatePeerConnectionFactory(
|
||||
Options options, VideoEncoderFactory encoderFactory, VideoDecoderFactory decoderFactory);
|
||||
|
||||
private static native long nativeCreateObserver(PeerConnection.Observer observer);
|
||||
|
||||
private static native long nativeCreatePeerConnection(long nativeFactory,
|
||||
PeerConnection.RTCConfiguration rtcConfig, MediaConstraints constraints, long nativeObserver);
|
||||
|
||||
private static native long nativeCreateLocalMediaStream(long nativeFactory, String label);
|
||||
|
||||
private static native long nativeCreateVideoSource(
|
||||
long nativeFactory, SurfaceTextureHelper surfaceTextureHelper, boolean is_screencast);
|
||||
|
||||
private static native long nativeCreateVideoTrack(
|
||||
long nativeFactory, String id, long nativeVideoSource);
|
||||
|
||||
private static native long nativeCreateAudioSource(
|
||||
long nativeFactory, MediaConstraints constraints);
|
||||
|
||||
private static native long nativeCreateAudioTrack(
|
||||
long nativeFactory, String id, long nativeSource);
|
||||
|
||||
private static native boolean nativeStartAecDump(
|
||||
long nativeFactory, int file_descriptor, int filesize_limit_bytes);
|
||||
|
||||
private static native void nativeStopAecDump(long nativeFactory);
|
||||
|
||||
@Deprecated public native void nativeSetOptions(long nativeFactory, Options options);
|
||||
|
||||
private static native void nativeSetVideoHwAccelerationOptions(
|
||||
long nativeFactory, Object localEGLContext, Object remoteEGLContext);
|
||||
|
||||
private static native void nativeThreadsCallbacks(long nativeFactory);
|
||||
|
||||
private static native void nativeFreeFactory(long nativeFactory);
|
||||
}
|
||||
105
sdk/android/api/org/webrtc/RTCStats.java
Normal file
105
sdk/android/api/org/webrtc/RTCStats.java
Normal file
@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Java version of webrtc::RTCStats. Represents an RTCStats object, as
|
||||
* described in https://w3c.github.io/webrtc-stats/. The |id|, |timestampUs|
|
||||
* and |type| accessors have the same meaning for this class as for the
|
||||
* RTCStats dictionary. Each RTCStatsReport produced by getStats contains
|
||||
* multiple RTCStats objects; one for each underlying object (codec, stream,
|
||||
* transport, etc.) that was inspected to produce the stats.
|
||||
*/
|
||||
public class RTCStats {
|
||||
private final long timestampUs;
|
||||
private final String type;
|
||||
private final String id;
|
||||
private final Map<String, Object> members;
|
||||
|
||||
public RTCStats(long timestampUs, String type, String id, Map<String, Object> members) {
|
||||
this.timestampUs = timestampUs;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
this.members = members;
|
||||
}
|
||||
|
||||
// Timestamp in microseconds.
|
||||
public double getTimestampUs() {
|
||||
return timestampUs;
|
||||
}
|
||||
|
||||
// Equivalent to RTCStatsType in the stats spec. Indicates the type of the
|
||||
// object that was inspected to produce the stats.
|
||||
public String getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
// Unique ID representing this stats object. May be referred to by members of
|
||||
// other stats objects.
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns map of member names to values. Returns as an ordered map so that
|
||||
* the stats object can be serialized with a consistent ordering.
|
||||
*
|
||||
* Values will be one of the following objects:
|
||||
* - Boolean
|
||||
* - Integer (for 32-bit signed integers)
|
||||
* - Long (for 32-bit unsigned and 64-bit signed integers)
|
||||
* - BigInteger (for 64-bit unsigned integers)
|
||||
* - Double
|
||||
* - String
|
||||
* - The array form of any of the above (e.g., Integer[])
|
||||
*/
|
||||
public Map<String, Object> getMembers() {
|
||||
return members;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("{ timestampUs: ")
|
||||
.append(timestampUs)
|
||||
.append(", type: ")
|
||||
.append(type)
|
||||
.append(", id: ")
|
||||
.append(id);
|
||||
boolean first = true;
|
||||
for (Map.Entry<String, Object> entry : members.entrySet()) {
|
||||
builder.append(", ").append(entry.getKey()).append(": ");
|
||||
appendValue(builder, entry.getValue());
|
||||
}
|
||||
builder.append(" }");
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
private static void appendValue(StringBuilder builder, Object value) {
|
||||
if (value instanceof Object[]) {
|
||||
Object[] arrayValue = (Object[]) value;
|
||||
builder.append('[');
|
||||
for (int i = 0; i < arrayValue.length; ++i) {
|
||||
if (i != 0) {
|
||||
builder.append(", ");
|
||||
}
|
||||
appendValue(builder, arrayValue[i]);
|
||||
}
|
||||
builder.append(']');
|
||||
} else if (value instanceof String) {
|
||||
// Enclose strings in quotes to make it clear they're strings.
|
||||
builder.append('"').append(value).append('"');
|
||||
} else {
|
||||
builder.append(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
17
sdk/android/api/org/webrtc/RTCStatsCollectorCallback.java
Normal file
17
sdk/android/api/org/webrtc/RTCStatsCollectorCallback.java
Normal file
@ -0,0 +1,17 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Interface for receiving stats reports (see webrtc::RTCStatsCollectorCallback). */
|
||||
public interface RTCStatsCollectorCallback {
|
||||
/** Called when the stats report is ready. */
|
||||
public void onStatsDelivered(RTCStatsReport report);
|
||||
}
|
||||
54
sdk/android/api/org/webrtc/RTCStatsReport.java
Normal file
54
sdk/android/api/org/webrtc/RTCStatsReport.java
Normal file
@ -0,0 +1,54 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Java version of webrtc::RTCStatsReport. Each RTCStatsReport produced by
|
||||
* getStats contains multiple RTCStats objects; one for each underlying object
|
||||
* (codec, stream, transport, etc.) that was inspected to produce the stats.
|
||||
*/
|
||||
public class RTCStatsReport {
|
||||
private final long timestampUs;
|
||||
private final Map<String, RTCStats> stats;
|
||||
|
||||
public RTCStatsReport(long timestampUs, Map<String, RTCStats> stats) {
|
||||
this.timestampUs = timestampUs;
|
||||
this.stats = stats;
|
||||
}
|
||||
|
||||
// Timestamp in microseconds.
|
||||
public double getTimestampUs() {
|
||||
return timestampUs;
|
||||
}
|
||||
|
||||
// Map of stats object IDs to stats objects. Can be used to easily look up
|
||||
// other stats objects, when they refer to each other by ID.
|
||||
public Map<String, RTCStats> getStatsMap() {
|
||||
return stats;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("{ timestampUs: ").append(timestampUs).append(", stats: [\n");
|
||||
boolean first = true;
|
||||
for (RTCStats stat : stats.values()) {
|
||||
if (!first) {
|
||||
builder.append(",\n");
|
||||
}
|
||||
builder.append(stat);
|
||||
first = false;
|
||||
}
|
||||
builder.append(" ] }");
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
290
sdk/android/api/org/webrtc/RendererCommon.java
Normal file
290
sdk/android/api/org/webrtc/RendererCommon.java
Normal file
@ -0,0 +1,290 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Point;
|
||||
import android.opengl.Matrix;
|
||||
import android.view.View;
|
||||
|
||||
/**
|
||||
* Static helper functions for renderer implementations.
|
||||
*/
|
||||
public class RendererCommon {
|
||||
/** Interface for reporting rendering events. */
|
||||
public static interface RendererEvents {
|
||||
/**
|
||||
* Callback fired once first frame is rendered.
|
||||
*/
|
||||
public void onFirstFrameRendered();
|
||||
|
||||
/**
|
||||
* Callback fired when rendered frame resolution or rotation has changed.
|
||||
*/
|
||||
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
|
||||
}
|
||||
|
||||
/** Interface for rendering frames on an EGLSurface. */
|
||||
public static interface GlDrawer {
|
||||
/**
|
||||
* Functions for drawing frames with different sources. The rendering surface target is
|
||||
* implied by the current EGL context of the calling thread and requires no explicit argument.
|
||||
* The coordinates specify the viewport location on the surface target.
|
||||
*/
|
||||
void drawOes(int oesTextureId, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
void drawRgb(int textureId, float[] texMatrix, int frameWidth, int frameHeight, int viewportX,
|
||||
int viewportY, int viewportWidth, int viewportHeight);
|
||||
void drawYuv(int[] yuvTextures, float[] texMatrix, int frameWidth, int frameHeight,
|
||||
int viewportX, int viewportY, int viewportWidth, int viewportHeight);
|
||||
|
||||
/**
|
||||
* Release all GL resources. This needs to be done manually, otherwise resources may leak.
|
||||
*/
|
||||
void release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class for determining layout size based on layout requirements, scaling type, and video
|
||||
* aspect ratio.
|
||||
*/
|
||||
public static class VideoLayoutMeasure {
|
||||
// The scaling type determines how the video will fill the allowed layout area in measure(). It
|
||||
// can be specified separately for the case when video has matched orientation with layout size
|
||||
// and when there is an orientation mismatch.
|
||||
private ScalingType scalingTypeMatchOrientation = ScalingType.SCALE_ASPECT_BALANCED;
|
||||
private ScalingType scalingTypeMismatchOrientation = ScalingType.SCALE_ASPECT_BALANCED;
|
||||
|
||||
public void setScalingType(ScalingType scalingType) {
|
||||
this.scalingTypeMatchOrientation = scalingType;
|
||||
this.scalingTypeMismatchOrientation = scalingType;
|
||||
}
|
||||
|
||||
public void setScalingType(
|
||||
ScalingType scalingTypeMatchOrientation, ScalingType scalingTypeMismatchOrientation) {
|
||||
this.scalingTypeMatchOrientation = scalingTypeMatchOrientation;
|
||||
this.scalingTypeMismatchOrientation = scalingTypeMismatchOrientation;
|
||||
}
|
||||
|
||||
public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) {
|
||||
// Calculate max allowed layout size.
|
||||
final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec);
|
||||
final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec);
|
||||
if (frameWidth == 0 || frameHeight == 0 || maxWidth == 0 || maxHeight == 0) {
|
||||
return new Point(maxWidth, maxHeight);
|
||||
}
|
||||
// Calculate desired display size based on scaling type, video aspect ratio,
|
||||
// and maximum layout size.
|
||||
final float frameAspect = frameWidth / (float) frameHeight;
|
||||
final float displayAspect = maxWidth / (float) maxHeight;
|
||||
final ScalingType scalingType = (frameAspect > 1.0f) == (displayAspect > 1.0f)
|
||||
? scalingTypeMatchOrientation
|
||||
: scalingTypeMismatchOrientation;
|
||||
final Point layoutSize = getDisplaySize(scalingType, frameAspect, maxWidth, maxHeight);
|
||||
|
||||
// If the measure specification is forcing a specific size - yield.
|
||||
if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) {
|
||||
layoutSize.x = maxWidth;
|
||||
}
|
||||
if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) {
|
||||
layoutSize.y = maxHeight;
|
||||
}
|
||||
return layoutSize;
|
||||
}
|
||||
}
|
||||
|
||||
// Types of video scaling:
|
||||
// SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
|
||||
// maintaining the aspect ratio (black borders may be displayed).
|
||||
// SCALE_ASPECT_FILL - video frame is scaled to fill the size of the view by
|
||||
// maintaining the aspect ratio. Some portion of the video frame may be
|
||||
// clipped.
|
||||
// SCALE_ASPECT_BALANCED - Compromise between FIT and FILL. Video frame will fill as much as
|
||||
// possible of the view while maintaining aspect ratio, under the constraint that at least
|
||||
// |BALANCED_VISIBLE_FRACTION| of the frame content will be shown.
|
||||
public static enum ScalingType { SCALE_ASPECT_FIT, SCALE_ASPECT_FILL, SCALE_ASPECT_BALANCED }
|
||||
// The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
|
||||
// This limits excessive cropping when adjusting display size.
|
||||
private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
|
||||
// clang-format off
|
||||
public static final float[] identityMatrix() {
|
||||
return new float[] {
|
||||
1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 0, 0, 1};
|
||||
}
|
||||
// Matrix with transform y' = 1 - y.
|
||||
public static final float[] verticalFlipMatrix() {
|
||||
return new float[] {
|
||||
1, 0, 0, 0,
|
||||
0, -1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 1, 0, 1};
|
||||
}
|
||||
|
||||
// Matrix with transform x' = 1 - x.
|
||||
public static final float[] horizontalFlipMatrix() {
|
||||
return new float[] {
|
||||
-1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
1, 0, 0, 1};
|
||||
}
|
||||
// clang-format on
|
||||
|
||||
/**
|
||||
* Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
|
||||
* clockwise when rendered.
|
||||
*/
|
||||
public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
|
||||
final float[] rotationMatrix = new float[16];
|
||||
Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
|
||||
adjustOrigin(rotationMatrix);
|
||||
return multiplyMatrices(textureMatrix, rotationMatrix);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns new matrix with the result of a * b.
|
||||
*/
|
||||
public static float[] multiplyMatrices(float[] a, float[] b) {
|
||||
final float[] resultMatrix = new float[16];
|
||||
Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
|
||||
return resultMatrix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns layout transformation matrix that applies an optional mirror effect and compensates
|
||||
* for video vs display aspect ratio.
|
||||
*/
|
||||
public static float[] getLayoutMatrix(
|
||||
boolean mirror, float videoAspectRatio, float displayAspectRatio) {
|
||||
float scaleX = 1;
|
||||
float scaleY = 1;
|
||||
// Scale X or Y dimension so that video and display size have same aspect ratio.
|
||||
if (displayAspectRatio > videoAspectRatio) {
|
||||
scaleY = videoAspectRatio / displayAspectRatio;
|
||||
} else {
|
||||
scaleX = displayAspectRatio / videoAspectRatio;
|
||||
}
|
||||
// Apply optional horizontal flip.
|
||||
if (mirror) {
|
||||
scaleX *= -1;
|
||||
}
|
||||
final float matrix[] = new float[16];
|
||||
Matrix.setIdentityM(matrix, 0);
|
||||
Matrix.scaleM(matrix, 0, scaleX, scaleY, 1);
|
||||
adjustOrigin(matrix);
|
||||
return matrix;
|
||||
}
|
||||
|
||||
/** Converts a float[16] matrix array to android.graphics.Matrix. */
|
||||
public static android.graphics.Matrix convertMatrixToAndroidGraphicsMatrix(float[] matrix4x4) {
|
||||
// clang-format off
|
||||
float[] values = {
|
||||
matrix4x4[0 * 4 + 0], matrix4x4[1 * 4 + 0], matrix4x4[3 * 4 + 0],
|
||||
matrix4x4[0 * 4 + 1], matrix4x4[1 * 4 + 1], matrix4x4[3 * 4 + 1],
|
||||
matrix4x4[0 * 4 + 3], matrix4x4[1 * 4 + 3], matrix4x4[3 * 4 + 3],
|
||||
};
|
||||
// clang-format on
|
||||
|
||||
android.graphics.Matrix matrix = new android.graphics.Matrix();
|
||||
matrix.setValues(values);
|
||||
return matrix;
|
||||
}
|
||||
|
||||
/** Converts android.graphics.Matrix to a float[16] matrix array. */
|
||||
public static float[] convertMatrixFromAndroidGraphicsMatrix(android.graphics.Matrix matrix) {
|
||||
float[] values = new float[9];
|
||||
matrix.getValues(values);
|
||||
|
||||
// The android.graphics.Matrix looks like this:
|
||||
// [x1 y1 w1]
|
||||
// [x2 y2 w2]
|
||||
// [x3 y3 w3]
|
||||
// We want to contruct a matrix that looks like this:
|
||||
// [x1 y1 0 w1]
|
||||
// [x2 y2 0 w2]
|
||||
// [ 0 0 1 0]
|
||||
// [x3 y3 0 w3]
|
||||
// Since it is stored in column-major order, it looks like this:
|
||||
// [x1 x2 0 x3
|
||||
// y1 y2 0 y3
|
||||
// 0 0 1 0
|
||||
// w1 w2 0 w3]
|
||||
// clang-format off
|
||||
float[] matrix4x4 = {
|
||||
values[0 * 3 + 0], values[1 * 3 + 0], 0, values[2 * 3 + 0],
|
||||
values[0 * 3 + 1], values[1 * 3 + 1], 0, values[2 * 3 + 1],
|
||||
0, 0, 1, 0,
|
||||
values[0 * 3 + 2], values[1 * 3 + 2], 0, values[2 * 3 + 2],
|
||||
};
|
||||
// clang-format on
|
||||
return matrix4x4;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate display size based on scaling type, video aspect ratio, and maximum display size.
|
||||
*/
|
||||
public static Point getDisplaySize(
|
||||
ScalingType scalingType, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
|
||||
return getDisplaySize(convertScalingTypeToVisibleFraction(scalingType), videoAspectRatio,
|
||||
maxDisplayWidth, maxDisplayHeight);
|
||||
}
|
||||
|
||||
/**
|
||||
* Move |matrix| transformation origin to (0.5, 0.5). This is the origin for texture coordinates
|
||||
* that are in the range 0 to 1.
|
||||
*/
|
||||
private static void adjustOrigin(float[] matrix) {
|
||||
// Note that OpenGL is using column-major order.
|
||||
// Pre translate with -0.5 to move coordinates to range [-0.5, 0.5].
|
||||
matrix[12] -= 0.5f * (matrix[0] + matrix[4]);
|
||||
matrix[13] -= 0.5f * (matrix[1] + matrix[5]);
|
||||
// Post translate with 0.5 to move coordinates to range [0, 1].
|
||||
matrix[12] += 0.5f;
|
||||
matrix[13] += 0.5f;
|
||||
}
|
||||
|
||||
/**
|
||||
* Each scaling type has a one-to-one correspondence to a numeric minimum fraction of the video
|
||||
* that must remain visible.
|
||||
*/
|
||||
private static float convertScalingTypeToVisibleFraction(ScalingType scalingType) {
|
||||
switch (scalingType) {
|
||||
case SCALE_ASPECT_FIT:
|
||||
return 1.0f;
|
||||
case SCALE_ASPECT_FILL:
|
||||
return 0.0f;
|
||||
case SCALE_ASPECT_BALANCED:
|
||||
return BALANCED_VISIBLE_FRACTION;
|
||||
default:
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate display size based on minimum fraction of the video that must remain visible,
|
||||
* video aspect ratio, and maximum display size.
|
||||
*/
|
||||
private static Point getDisplaySize(
|
||||
float minVisibleFraction, float videoAspectRatio, int maxDisplayWidth, int maxDisplayHeight) {
|
||||
// If there is no constraint on the amount of cropping, fill the allowed display area.
|
||||
if (minVisibleFraction == 0 || videoAspectRatio == 0) {
|
||||
return new Point(maxDisplayWidth, maxDisplayHeight);
|
||||
}
|
||||
// Each dimension is constrained on max display size and how much we are allowed to crop.
|
||||
final int width = Math.min(
|
||||
maxDisplayWidth, Math.round(maxDisplayHeight / minVisibleFraction * videoAspectRatio));
|
||||
final int height = Math.min(
|
||||
maxDisplayHeight, Math.round(maxDisplayWidth / minVisibleFraction / videoAspectRatio));
|
||||
return new Point(width, height);
|
||||
}
|
||||
}
|
||||
61
sdk/android/api/org/webrtc/RtpParameters.java
Normal file
61
sdk/android/api/org/webrtc/RtpParameters.java
Normal file
@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.LinkedList;
|
||||
|
||||
/**
|
||||
* The parameters for an {@code RtpSender}, as defined in
|
||||
* http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface.
|
||||
*
|
||||
* Note: These structures use nullable Integer/etc. types because in the
|
||||
* future, they may be used to construct ORTC RtpSender/RtpReceivers, in
|
||||
* which case "null" will be used to represent "choose the implementation
|
||||
* default value".
|
||||
*/
|
||||
public class RtpParameters {
|
||||
public static class Encoding {
|
||||
// Set to true to cause this encoding to be sent, and false for it not to
|
||||
// be sent.
|
||||
public boolean active = true;
|
||||
// If non-null, this represents the Transport Independent Application
|
||||
// Specific maximum bandwidth defined in RFC3890. If null, there is no
|
||||
// maximum bitrate.
|
||||
public Integer maxBitrateBps;
|
||||
// SSRC to be used by this encoding.
|
||||
// Can't be changed between getParameters/setParameters.
|
||||
public Long ssrc;
|
||||
}
|
||||
|
||||
public static class Codec {
|
||||
// Payload type used to identify this codec in RTP packets.
|
||||
public int payloadType;
|
||||
// Name used to identify the codec. Equivalent to MIME subtype.
|
||||
public String name;
|
||||
// The media type of this codec. Equivalent to MIME top-level type.
|
||||
MediaStreamTrack.MediaType kind;
|
||||
// Clock rate in Hertz.
|
||||
public Integer clockRate;
|
||||
// The number of audio channels used. Set to null for video codecs.
|
||||
public Integer numChannels;
|
||||
}
|
||||
|
||||
public final LinkedList<Encoding> encodings;
|
||||
// Codec parameters can't currently be changed between getParameters and
|
||||
// setParameters. Though in the future it will be possible to reorder them or
|
||||
// remove them.
|
||||
public final LinkedList<Codec> codecs;
|
||||
|
||||
public RtpParameters() {
|
||||
encodings = new LinkedList<Encoding>();
|
||||
codecs = new LinkedList<Codec>();
|
||||
}
|
||||
}
|
||||
80
sdk/android/api/org/webrtc/RtpReceiver.java
Normal file
80
sdk/android/api/org/webrtc/RtpReceiver.java
Normal file
@ -0,0 +1,80 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Java wrapper for a C++ RtpReceiverInterface. */
|
||||
public class RtpReceiver {
|
||||
/** Java wrapper for a C++ RtpReceiverObserverInterface*/
|
||||
public static interface Observer {
|
||||
// Called when the first audio or video packet is received.
|
||||
public void onFirstPacketReceived(MediaStreamTrack.MediaType media_type);
|
||||
}
|
||||
|
||||
final long nativeRtpReceiver;
|
||||
private long nativeObserver;
|
||||
|
||||
private MediaStreamTrack cachedTrack;
|
||||
|
||||
public RtpReceiver(long nativeRtpReceiver) {
|
||||
this.nativeRtpReceiver = nativeRtpReceiver;
|
||||
long track = nativeGetTrack(nativeRtpReceiver);
|
||||
// We can assume that an RtpReceiver always has an associated track.
|
||||
cachedTrack = new MediaStreamTrack(track);
|
||||
}
|
||||
|
||||
public MediaStreamTrack track() {
|
||||
return cachedTrack;
|
||||
}
|
||||
|
||||
public boolean setParameters(RtpParameters parameters) {
|
||||
return nativeSetParameters(nativeRtpReceiver, parameters);
|
||||
}
|
||||
|
||||
public RtpParameters getParameters() {
|
||||
return nativeGetParameters(nativeRtpReceiver);
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return nativeId(nativeRtpReceiver);
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
cachedTrack.dispose();
|
||||
if (nativeObserver != 0) {
|
||||
nativeUnsetObserver(nativeRtpReceiver, nativeObserver);
|
||||
nativeObserver = 0;
|
||||
}
|
||||
JniCommon.nativeReleaseRef(nativeRtpReceiver);
|
||||
}
|
||||
|
||||
public void SetObserver(Observer observer) {
|
||||
// Unset the existing one before setting a new one.
|
||||
if (nativeObserver != 0) {
|
||||
nativeUnsetObserver(nativeRtpReceiver, nativeObserver);
|
||||
}
|
||||
nativeObserver = nativeSetObserver(nativeRtpReceiver, observer);
|
||||
}
|
||||
|
||||
// This should increment the reference count of the track.
|
||||
// Will be released in dispose().
|
||||
private static native long nativeGetTrack(long nativeRtpReceiver);
|
||||
|
||||
private static native boolean nativeSetParameters(
|
||||
long nativeRtpReceiver, RtpParameters parameters);
|
||||
|
||||
private static native RtpParameters nativeGetParameters(long nativeRtpReceiver);
|
||||
|
||||
private static native String nativeId(long nativeRtpReceiver);
|
||||
|
||||
private static native long nativeSetObserver(long nativeRtpReceiver, Observer observer);
|
||||
|
||||
private static native long nativeUnsetObserver(long nativeRtpReceiver, long nativeObserver);
|
||||
};
|
||||
103
sdk/android/api/org/webrtc/RtpSender.java
Normal file
103
sdk/android/api/org/webrtc/RtpSender.java
Normal file
@ -0,0 +1,103 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Java wrapper for a C++ RtpSenderInterface. */
|
||||
public class RtpSender {
|
||||
final long nativeRtpSender;
|
||||
|
||||
private MediaStreamTrack cachedTrack;
|
||||
private boolean ownsTrack = true;
|
||||
|
||||
private final DtmfSender dtmfSender;
|
||||
|
||||
public RtpSender(long nativeRtpSender) {
|
||||
this.nativeRtpSender = nativeRtpSender;
|
||||
long track = nativeGetTrack(nativeRtpSender);
|
||||
// It may be possible for an RtpSender to be created without a track.
|
||||
cachedTrack = (track != 0) ? new MediaStreamTrack(track) : null;
|
||||
|
||||
long nativeDtmfSender = nativeGetDtmfSender(nativeRtpSender);
|
||||
dtmfSender = (nativeDtmfSender != 0) ? new DtmfSender(nativeDtmfSender) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts sending a new track, without requiring additional SDP negotiation.
|
||||
* <p>
|
||||
* Note: This is equivalent to replaceTrack in the official WebRTC API. It
|
||||
* was just implemented before the standards group settled on a name.
|
||||
*
|
||||
* @param takeOwnership If true, the RtpSender takes ownership of the track
|
||||
* from the caller, and will auto-dispose of it when no
|
||||
* longer needed. |takeOwnership| should only be used if
|
||||
* the caller owns the track; it is not appropriate when
|
||||
* the track is owned by, for example, another RtpSender
|
||||
* or a MediaStream.
|
||||
* @return true on success and false on failure.
|
||||
*/
|
||||
public boolean setTrack(MediaStreamTrack track, boolean takeOwnership) {
|
||||
if (!nativeSetTrack(nativeRtpSender, (track == null) ? 0 : track.nativeTrack)) {
|
||||
return false;
|
||||
}
|
||||
if (cachedTrack != null && ownsTrack) {
|
||||
cachedTrack.dispose();
|
||||
}
|
||||
cachedTrack = track;
|
||||
ownsTrack = takeOwnership;
|
||||
return true;
|
||||
}
|
||||
|
||||
public MediaStreamTrack track() {
|
||||
return cachedTrack;
|
||||
}
|
||||
|
||||
public boolean setParameters(RtpParameters parameters) {
|
||||
return nativeSetParameters(nativeRtpSender, parameters);
|
||||
}
|
||||
|
||||
public RtpParameters getParameters() {
|
||||
return nativeGetParameters(nativeRtpSender);
|
||||
}
|
||||
|
||||
public String id() {
|
||||
return nativeId(nativeRtpSender);
|
||||
}
|
||||
|
||||
public DtmfSender dtmf() {
|
||||
return dtmfSender;
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
if (dtmfSender != null) {
|
||||
dtmfSender.dispose();
|
||||
}
|
||||
if (cachedTrack != null && ownsTrack) {
|
||||
cachedTrack.dispose();
|
||||
}
|
||||
JniCommon.nativeReleaseRef(nativeRtpSender);
|
||||
}
|
||||
|
||||
private static native boolean nativeSetTrack(long nativeRtpSender, long nativeTrack);
|
||||
|
||||
// This should increment the reference count of the track.
|
||||
// Will be released in dispose() or setTrack().
|
||||
private static native long nativeGetTrack(long nativeRtpSender);
|
||||
|
||||
// This should increment the reference count of the DTMF sender.
|
||||
// Will be released in dispose().
|
||||
private static native long nativeGetDtmfSender(long nativeRtpSender);
|
||||
|
||||
private static native boolean nativeSetParameters(long nativeRtpSender, RtpParameters parameters);
|
||||
|
||||
private static native RtpParameters nativeGetParameters(long nativeRtpSender);
|
||||
|
||||
private static native String nativeId(long nativeRtpSender);
|
||||
};
|
||||
203
sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
Normal file
203
sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
Normal file
@ -0,0 +1,203 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.app.Activity;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.hardware.display.DisplayManager;
|
||||
import android.hardware.display.VirtualDisplay;
|
||||
import android.media.projection.MediaProjection;
|
||||
import android.media.projection.MediaProjectionManager;
|
||||
import android.view.Surface;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* An implementation of VideoCapturer to capture the screen content as a video stream.
|
||||
* Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this
|
||||
* {@code SurfaceTexture} using a {@code SurfaceTextureHelper}.
|
||||
* The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in
|
||||
* {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it
|
||||
* as a texture to the native code via {@code CapturerObserver.onTextureFrameCaptured()}. This takes
|
||||
* place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
|
||||
* the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
|
||||
* frames. At any time, at most one frame is being processed.
|
||||
*/
|
||||
@TargetApi(21)
|
||||
public class ScreenCapturerAndroid
|
||||
implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||
private static final int DISPLAY_FLAGS =
|
||||
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
|
||||
// DPI for VirtualDisplay, does not seem to matter for us.
|
||||
private static final int VIRTUAL_DISPLAY_DPI = 400;
|
||||
|
||||
private final Intent mediaProjectionPermissionResultData;
|
||||
private final MediaProjection.Callback mediaProjectionCallback;
|
||||
|
||||
private int width;
|
||||
private int height;
|
||||
private VirtualDisplay virtualDisplay;
|
||||
private SurfaceTextureHelper surfaceTextureHelper;
|
||||
private CapturerObserver capturerObserver;
|
||||
private long numCapturedFrames = 0;
|
||||
private MediaProjection mediaProjection;
|
||||
private boolean isDisposed = false;
|
||||
private MediaProjectionManager mediaProjectionManager;
|
||||
|
||||
/**
|
||||
* Constructs a new Screen Capturer.
|
||||
*
|
||||
* @param mediaProjectionPermissionResultData the result data of MediaProjection permission
|
||||
* activity; the calling app must validate that result code is Activity.RESULT_OK before
|
||||
* calling this method.
|
||||
* @param mediaProjectionCallback MediaProjection callback to implement application specific
|
||||
* logic in events such as when the user revokes a previously granted capture permission.
|
||||
**/
|
||||
public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
|
||||
MediaProjection.Callback mediaProjectionCallback) {
|
||||
this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
|
||||
this.mediaProjectionCallback = mediaProjectionCallback;
|
||||
}
|
||||
|
||||
private void checkNotDisposed() {
|
||||
if (isDisposed) {
|
||||
throw new RuntimeException("capturer is disposed.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
|
||||
final Context applicationContext, final VideoCapturer.CapturerObserver capturerObserver) {
|
||||
checkNotDisposed();
|
||||
|
||||
if (capturerObserver == null) {
|
||||
throw new RuntimeException("capturerObserver not set.");
|
||||
}
|
||||
this.capturerObserver = capturerObserver;
|
||||
|
||||
if (surfaceTextureHelper == null) {
|
||||
throw new RuntimeException("surfaceTextureHelper not set.");
|
||||
}
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
|
||||
mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
|
||||
Context.MEDIA_PROJECTION_SERVICE);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void startCapture(
|
||||
final int width, final int height, final int ignoredFramerate) {
|
||||
checkNotDisposed();
|
||||
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
|
||||
mediaProjection = mediaProjectionManager.getMediaProjection(
|
||||
Activity.RESULT_OK, mediaProjectionPermissionResultData);
|
||||
|
||||
// Let MediaProjection callback use the SurfaceTextureHelper thread.
|
||||
mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler());
|
||||
|
||||
createVirtualDisplay();
|
||||
capturerObserver.onCapturerStarted(true);
|
||||
surfaceTextureHelper.startListening(ScreenCapturerAndroid.this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void stopCapture() {
|
||||
checkNotDisposed();
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
surfaceTextureHelper.stopListening();
|
||||
capturerObserver.onCapturerStopped();
|
||||
|
||||
if (virtualDisplay != null) {
|
||||
virtualDisplay.release();
|
||||
virtualDisplay = null;
|
||||
}
|
||||
|
||||
if (mediaProjection != null) {
|
||||
// Unregister the callback before stopping, otherwise the callback recursively
|
||||
// calls this method.
|
||||
mediaProjection.unregisterCallback(mediaProjectionCallback);
|
||||
mediaProjection.stop();
|
||||
mediaProjection = null;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void dispose() {
|
||||
isDisposed = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes output video format. This method can be used to scale the output
|
||||
* video, or to change orientation when the captured screen is rotated for example.
|
||||
*
|
||||
* @param width new output video width
|
||||
* @param height new output video height
|
||||
* @param ignoredFramerate ignored
|
||||
*/
|
||||
@Override
|
||||
public synchronized void changeCaptureFormat(
|
||||
final int width, final int height, final int ignoredFramerate) {
|
||||
checkNotDisposed();
|
||||
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
|
||||
if (virtualDisplay == null) {
|
||||
// Capturer is stopped, the virtual display will be created in startCaptuer().
|
||||
return;
|
||||
}
|
||||
|
||||
// Create a new virtual display on the surfaceTextureHelper thread to avoid interference
|
||||
// with frame processing, which happens on the same thread (we serialize events by running
|
||||
// them on the same thread).
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
virtualDisplay.release();
|
||||
createVirtualDisplay();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void createVirtualDisplay() {
|
||||
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
|
||||
virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
|
||||
VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
|
||||
null /* callback */, null /* callback handler */);
|
||||
}
|
||||
|
||||
// This is called on the internal looper thread of {@Code SurfaceTextureHelper}.
|
||||
@Override
|
||||
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
numCapturedFrames++;
|
||||
capturerObserver.onTextureFrameCaptured(
|
||||
width, height, oesTextureId, transformMatrix, 0 /* rotation */, timestampNs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isScreencast() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public long getNumCapturedFrames() {
|
||||
return numCapturedFrames;
|
||||
}
|
||||
}
|
||||
26
sdk/android/api/org/webrtc/SdpObserver.java
Normal file
26
sdk/android/api/org/webrtc/SdpObserver.java
Normal file
@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Interface for observing SDP-related events. */
|
||||
public interface SdpObserver {
|
||||
/** Called on success of Create{Offer,Answer}(). */
|
||||
public void onCreateSuccess(SessionDescription sdp);
|
||||
|
||||
/** Called on success of Set{Local,Remote}Description(). */
|
||||
public void onSetSuccess();
|
||||
|
||||
/** Called on error of Create{Offer,Answer}(). */
|
||||
public void onCreateFailure(String error);
|
||||
|
||||
/** Called on error of Set{Local,Remote}Description(). */
|
||||
public void onSetFailure(String error);
|
||||
}
|
||||
43
sdk/android/api/org/webrtc/SessionDescription.java
Normal file
43
sdk/android/api/org/webrtc/SessionDescription.java
Normal file
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* Description of an RFC 4566 Session.
|
||||
* SDPs are passed as serialized Strings in Java-land and are materialized
|
||||
* to SessionDescriptionInterface as appropriate in the JNI layer.
|
||||
*/
|
||||
public class SessionDescription {
|
||||
/** Java-land enum version of SessionDescriptionInterface's type() string. */
|
||||
public static enum Type {
|
||||
OFFER,
|
||||
PRANSWER,
|
||||
ANSWER;
|
||||
|
||||
public String canonicalForm() {
|
||||
return name().toLowerCase(Locale.US);
|
||||
}
|
||||
|
||||
public static Type fromCanonicalForm(String canonical) {
|
||||
return Type.valueOf(Type.class, canonical.toUpperCase(Locale.US));
|
||||
}
|
||||
}
|
||||
|
||||
public final Type type;
|
||||
public final String description;
|
||||
|
||||
public SessionDescription(Type type, String description) {
|
||||
this.type = type;
|
||||
this.description = description;
|
||||
}
|
||||
}
|
||||
17
sdk/android/api/org/webrtc/StatsObserver.java
Normal file
17
sdk/android/api/org/webrtc/StatsObserver.java
Normal file
@ -0,0 +1,17 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Interface for observing Stats reports (see webrtc::StatsObservers). */
|
||||
public interface StatsObserver {
|
||||
/** Called when the reports are ready.*/
|
||||
public void onComplete(StatsReport[] reports);
|
||||
}
|
||||
59
sdk/android/api/org/webrtc/StatsReport.java
Normal file
59
sdk/android/api/org/webrtc/StatsReport.java
Normal file
@ -0,0 +1,59 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Java version of webrtc::StatsReport. */
|
||||
public class StatsReport {
|
||||
/** Java version of webrtc::StatsReport::Value. */
|
||||
public static class Value {
|
||||
public final String name;
|
||||
public final String value;
|
||||
|
||||
public Value(String name, String value) {
|
||||
this.name = name;
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("[").append(name).append(": ").append(value).append("]");
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
|
||||
public final String id;
|
||||
public final String type;
|
||||
// Time since 1970-01-01T00:00:00Z in milliseconds.
|
||||
public final double timestamp;
|
||||
public final Value[] values;
|
||||
|
||||
public StatsReport(String id, String type, double timestamp, Value[] values) {
|
||||
this.id = id;
|
||||
this.type = type;
|
||||
this.timestamp = timestamp;
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
public String toString() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
builder.append("id: ")
|
||||
.append(id)
|
||||
.append(", type: ")
|
||||
.append(type)
|
||||
.append(", timestamp: ")
|
||||
.append(timestamp)
|
||||
.append(", values: ");
|
||||
for (int i = 0; i < values.length; ++i) {
|
||||
builder.append(values[i].toString()).append(", ");
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
}
|
||||
311
sdk/android/api/org/webrtc/SurfaceTextureHelper.java
Normal file
311
sdk/android/api/org/webrtc/SurfaceTextureHelper.java
Normal file
@ -0,0 +1,311 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.Matrix;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.os.SystemClock;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.FloatBuffer;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.webrtc.VideoFrame.I420Buffer;
|
||||
import org.webrtc.VideoFrame.TextureBuffer;
|
||||
|
||||
/**
|
||||
* Helper class to create and synchronize access to a SurfaceTexture. The caller will get notified
|
||||
* of new frames in onTextureFrameAvailable(), and should call returnTextureFrame() when done with
|
||||
* the frame. Only one texture frame can be in flight at once, so returnTextureFrame() must be
|
||||
* called in order to receive a new frame. Call stopListening() to stop receiveing new frames. Call
|
||||
* dispose to release all resources once the texture frame is returned.
|
||||
* Note that there is a C++ counter part of this class that optionally can be used. It is used for
|
||||
* wrapping texture frames into webrtc::VideoFrames and also handles calling returnTextureFrame()
|
||||
* when the webrtc::VideoFrame is no longer used.
|
||||
*/
|
||||
public class SurfaceTextureHelper {
|
||||
private static final String TAG = "SurfaceTextureHelper";
|
||||
/**
|
||||
* Callback interface for being notified that a new texture frame is available. The calls will be
|
||||
* made on the SurfaceTextureHelper handler thread, with a bound EGLContext. The callee is not
|
||||
* allowed to make another EGLContext current on the calling thread.
|
||||
*/
|
||||
public interface OnTextureFrameAvailableListener {
|
||||
abstract void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new SurfaceTextureHelper sharing OpenGL resources with |sharedContext|. A dedicated
|
||||
* thread and handler is created for handling the SurfaceTexture. May return null if EGL fails to
|
||||
* initialize a pixel buffer surface and make it current.
|
||||
*/
|
||||
public static SurfaceTextureHelper create(
|
||||
final String threadName, final EglBase.Context sharedContext) {
|
||||
final HandlerThread thread = new HandlerThread(threadName);
|
||||
thread.start();
|
||||
final Handler handler = new Handler(thread.getLooper());
|
||||
|
||||
// The onFrameAvailable() callback will be executed on the SurfaceTexture ctor thread. See:
|
||||
// http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/graphics/SurfaceTexture.java#195.
|
||||
// Therefore, in order to control the callback thread on API lvl < 21, the SurfaceTextureHelper
|
||||
// is constructed on the |handler| thread.
|
||||
return ThreadUtils.invokeAtFrontUninterruptibly(handler, new Callable<SurfaceTextureHelper>() {
|
||||
@Override
|
||||
public SurfaceTextureHelper call() {
|
||||
try {
|
||||
return new SurfaceTextureHelper(sharedContext, handler);
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, threadName + " create failure", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private final Handler handler;
|
||||
private final EglBase eglBase;
|
||||
private final SurfaceTexture surfaceTexture;
|
||||
private final int oesTextureId;
|
||||
private YuvConverter yuvConverter;
|
||||
|
||||
// These variables are only accessed from the |handler| thread.
|
||||
private OnTextureFrameAvailableListener listener;
|
||||
// The possible states of this class.
|
||||
private boolean hasPendingTexture = false;
|
||||
private volatile boolean isTextureInUse = false;
|
||||
private boolean isQuitting = false;
|
||||
// |pendingListener| is set in setListener() and the runnable is posted to the handler thread.
|
||||
// setListener() is not allowed to be called again before stopListening(), so this is thread safe.
|
||||
private OnTextureFrameAvailableListener pendingListener;
|
||||
final Runnable setListenerRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
Logging.d(TAG, "Setting listener to " + pendingListener);
|
||||
listener = pendingListener;
|
||||
pendingListener = null;
|
||||
// May have a pending frame from the previous capture session - drop it.
|
||||
if (hasPendingTexture) {
|
||||
// Calling updateTexImage() is neccessary in order to receive new frames.
|
||||
updateTexImage();
|
||||
hasPendingTexture = false;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private SurfaceTextureHelper(EglBase.Context sharedContext, Handler handler) {
|
||||
if (handler.getLooper().getThread() != Thread.currentThread()) {
|
||||
throw new IllegalStateException("SurfaceTextureHelper must be created on the handler thread");
|
||||
}
|
||||
this.handler = handler;
|
||||
|
||||
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
try {
|
||||
// Both these statements have been observed to fail on rare occasions, see BUG=webrtc:5682.
|
||||
eglBase.createDummyPbufferSurface();
|
||||
eglBase.makeCurrent();
|
||||
} catch (RuntimeException e) {
|
||||
// Clean up before rethrowing the exception.
|
||||
eglBase.release();
|
||||
handler.getLooper().quit();
|
||||
throw e;
|
||||
}
|
||||
|
||||
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
||||
surfaceTexture = new SurfaceTexture(oesTextureId);
|
||||
setOnFrameAvailableListener(surfaceTexture, (SurfaceTexture st) -> {
|
||||
hasPendingTexture = true;
|
||||
tryDeliverTextureFrame();
|
||||
}, handler);
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
private static void setOnFrameAvailableListener(SurfaceTexture surfaceTexture,
|
||||
SurfaceTexture.OnFrameAvailableListener listener, Handler handler) {
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||
surfaceTexture.setOnFrameAvailableListener(listener, handler);
|
||||
} else {
|
||||
// The documentation states that the listener will be called on an arbitrary thread, but in
|
||||
// pratice, it is always the thread on which the SurfaceTexture was constructed. There are
|
||||
// assertions in place in case this ever changes. For API >= 21, we use the new API to
|
||||
// explicitly specify the handler.
|
||||
surfaceTexture.setOnFrameAvailableListener(listener);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start to stream textures to the given |listener|. If you need to change listener, you need to
|
||||
* call stopListening() first.
|
||||
*/
|
||||
public void startListening(final OnTextureFrameAvailableListener listener) {
|
||||
if (this.listener != null || this.pendingListener != null) {
|
||||
throw new IllegalStateException("SurfaceTextureHelper listener has already been set.");
|
||||
}
|
||||
this.pendingListener = listener;
|
||||
handler.post(setListenerRunnable);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop listening. The listener set in startListening() is guaranteded to not receive any more
|
||||
* onTextureFrameAvailable() callbacks after this function returns.
|
||||
*/
|
||||
public void stopListening() {
|
||||
Logging.d(TAG, "stopListening()");
|
||||
handler.removeCallbacks(setListenerRunnable);
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
listener = null;
|
||||
pendingListener = null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the underlying SurfaceTexture. The SurfaceTexture should be passed in to a video
|
||||
* producer such as a camera or decoder.
|
||||
*/
|
||||
public SurfaceTexture getSurfaceTexture() {
|
||||
return surfaceTexture;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve the handler that calls onTextureFrameAvailable(). This handler is valid until
|
||||
* dispose() is called.
|
||||
*/
|
||||
public Handler getHandler() {
|
||||
return handler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call this function to signal that you are done with the frame received in
|
||||
* onTextureFrameAvailable(). Only one texture frame can be in flight at once, so you must call
|
||||
* this function in order to receive a new frame.
|
||||
*/
|
||||
public void returnTextureFrame() {
|
||||
handler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
isTextureInUse = false;
|
||||
if (isQuitting) {
|
||||
release();
|
||||
} else {
|
||||
tryDeliverTextureFrame();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public boolean isTextureInUse() {
|
||||
return isTextureInUse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call disconnect() to stop receiving frames. OpenGL resources are released and the handler is
|
||||
* stopped when the texture frame has been returned by a call to returnTextureFrame(). You are
|
||||
* guaranteed to not receive any more onTextureFrameAvailable() after this function returns.
|
||||
*/
|
||||
public void dispose() {
|
||||
Logging.d(TAG, "dispose()");
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
isQuitting = true;
|
||||
if (!isTextureInUse) {
|
||||
release();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void textureToYUV(final ByteBuffer buf, final int width, final int height,
|
||||
final int stride, final int textureId, final float[] transformMatrix) {
|
||||
if (textureId != oesTextureId) {
|
||||
throw new IllegalStateException("textureToByteBuffer called with unexpected textureId");
|
||||
}
|
||||
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(handler, new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (yuvConverter == null) {
|
||||
yuvConverter = new YuvConverter();
|
||||
}
|
||||
yuvConverter.convert(buf, width, height, stride, textureId, transformMatrix);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void updateTexImage() {
|
||||
// SurfaceTexture.updateTexImage apparently can compete and deadlock with eglSwapBuffers,
|
||||
// as observed on Nexus 5. Therefore, synchronize it with the EGL functions.
|
||||
// See https://bugs.chromium.org/p/webrtc/issues/detail?id=5702 for more info.
|
||||
synchronized (EglBase.lock) {
|
||||
surfaceTexture.updateTexImage();
|
||||
}
|
||||
}
|
||||
|
||||
private void tryDeliverTextureFrame() {
|
||||
if (handler.getLooper().getThread() != Thread.currentThread()) {
|
||||
throw new IllegalStateException("Wrong thread.");
|
||||
}
|
||||
if (isQuitting || !hasPendingTexture || isTextureInUse || listener == null) {
|
||||
return;
|
||||
}
|
||||
isTextureInUse = true;
|
||||
hasPendingTexture = false;
|
||||
|
||||
updateTexImage();
|
||||
|
||||
final float[] transformMatrix = new float[16];
|
||||
surfaceTexture.getTransformMatrix(transformMatrix);
|
||||
final long timestampNs = surfaceTexture.getTimestamp();
|
||||
listener.onTextureFrameAvailable(oesTextureId, transformMatrix, timestampNs);
|
||||
}
|
||||
|
||||
private void release() {
|
||||
if (handler.getLooper().getThread() != Thread.currentThread()) {
|
||||
throw new IllegalStateException("Wrong thread.");
|
||||
}
|
||||
if (isTextureInUse || !isQuitting) {
|
||||
throw new IllegalStateException("Unexpected release.");
|
||||
}
|
||||
if (yuvConverter != null) {
|
||||
yuvConverter.release();
|
||||
}
|
||||
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
|
||||
surfaceTexture.release();
|
||||
eglBase.release();
|
||||
handler.getLooper().quit();
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a VideoFrame buffer backed by this helper's texture. The |width| and |height| should
|
||||
* match the dimensions of the data placed in the texture. The correct |transformMatrix| may be
|
||||
* obtained from callbacks to OnTextureFrameAvailableListener.
|
||||
*
|
||||
* The returned TextureBuffer holds a reference to the SurfaceTextureHelper that created it. The
|
||||
* buffer calls returnTextureFrame() when it is released.
|
||||
*/
|
||||
public TextureBuffer createTextureBuffer(int width, int height, Matrix transformMatrix) {
|
||||
return new TextureBufferImpl(
|
||||
width, height, TextureBuffer.Type.OES, oesTextureId, transformMatrix, this, new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
returnTextureFrame();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
385
sdk/android/api/org/webrtc/SurfaceViewRenderer.java
Normal file
385
sdk/android/api/org/webrtc/SurfaceViewRenderer.java
Normal file
@ -0,0 +1,385 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.content.res.Resources.NotFoundException;
|
||||
import android.graphics.Point;
|
||||
import android.util.AttributeSet;
|
||||
import android.view.SurfaceHolder;
|
||||
import android.view.SurfaceView;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
/**
|
||||
* Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView.
|
||||
* renderFrame() is asynchronous to avoid blocking the calling thread.
|
||||
* This class is thread safe and handles access from potentially four different threads:
|
||||
* Interaction from the main app in init, release, setMirror, and setScalingtype.
|
||||
* Interaction from C++ rtc::VideoSinkInterface in renderFrame.
|
||||
* Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed.
|
||||
* Interaction with the layout framework in onMeasure and onSizeChanged.
|
||||
*/
|
||||
public class SurfaceViewRenderer
|
||||
extends SurfaceView implements SurfaceHolder.Callback, VideoRenderer.Callbacks, VideoSink {
|
||||
private static final String TAG = "SurfaceViewRenderer";
|
||||
|
||||
// Cached resource name.
|
||||
private final String resourceName;
|
||||
private final RendererCommon.VideoLayoutMeasure videoLayoutMeasure =
|
||||
new RendererCommon.VideoLayoutMeasure();
|
||||
private final EglRenderer eglRenderer;
|
||||
|
||||
// Callback for reporting renderer events. Read-only after initilization so no lock required.
|
||||
private RendererCommon.RendererEvents rendererEvents;
|
||||
|
||||
private final Object layoutLock = new Object();
|
||||
private boolean isRenderingPaused = false;
|
||||
private boolean isFirstFrameRendered;
|
||||
private int rotatedFrameWidth;
|
||||
private int rotatedFrameHeight;
|
||||
private int frameRotation;
|
||||
|
||||
// Accessed only on the main thread.
|
||||
private boolean enableFixedSize;
|
||||
private int surfaceWidth;
|
||||
private int surfaceHeight;
|
||||
|
||||
/**
|
||||
* Standard View constructor. In order to render something, you must first call init().
|
||||
*/
|
||||
public SurfaceViewRenderer(Context context) {
|
||||
super(context);
|
||||
this.resourceName = getResourceName();
|
||||
eglRenderer = new EglRenderer(resourceName);
|
||||
getHolder().addCallback(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Standard View constructor. In order to render something, you must first call init().
|
||||
*/
|
||||
public SurfaceViewRenderer(Context context, AttributeSet attrs) {
|
||||
super(context, attrs);
|
||||
this.resourceName = getResourceName();
|
||||
eglRenderer = new EglRenderer(resourceName);
|
||||
getHolder().addCallback(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to
|
||||
* reinitialize the renderer after a previous init()/release() cycle.
|
||||
*/
|
||||
public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
|
||||
init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
|
||||
* for drawing frames on the EGLSurface. This class is responsible for calling release() on
|
||||
* |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
|
||||
* init()/release() cycle.
|
||||
*/
|
||||
public void init(final EglBase.Context sharedContext,
|
||||
RendererCommon.RendererEvents rendererEvents, final int[] configAttributes,
|
||||
RendererCommon.GlDrawer drawer) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
this.rendererEvents = rendererEvents;
|
||||
synchronized (layoutLock) {
|
||||
isFirstFrameRendered = false;
|
||||
rotatedFrameWidth = 0;
|
||||
rotatedFrameHeight = 0;
|
||||
frameRotation = 0;
|
||||
}
|
||||
eglRenderer.init(sharedContext, configAttributes, drawer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Block until any pending frame is returned and all GL resources released, even if an interrupt
|
||||
* occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function
|
||||
* should be called before the Activity is destroyed and the EGLContext is still valid. If you
|
||||
* don't call this function, the GL resources might leak.
|
||||
*/
|
||||
public void release() {
|
||||
eglRenderer.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a callback to be invoked when a new video frame has been received.
|
||||
*
|
||||
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
|
||||
* It should be lightweight and must not call removeFrameListener.
|
||||
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
|
||||
* required.
|
||||
* @param drawer Custom drawer to use for this frame listener.
|
||||
*/
|
||||
public void addFrameListener(
|
||||
EglRenderer.FrameListener listener, float scale, RendererCommon.GlDrawer drawerParam) {
|
||||
eglRenderer.addFrameListener(listener, scale, drawerParam);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a callback to be invoked when a new video frame has been received. This version uses
|
||||
* the drawer of the EglRenderer that was passed in init.
|
||||
*
|
||||
* @param listener The callback to be invoked. The callback will be invoked on the render thread.
|
||||
* It should be lightweight and must not call removeFrameListener.
|
||||
* @param scale The scale of the Bitmap passed to the callback, or 0 if no Bitmap is
|
||||
* required.
|
||||
*/
|
||||
public void addFrameListener(EglRenderer.FrameListener listener, float scale) {
|
||||
eglRenderer.addFrameListener(listener, scale);
|
||||
}
|
||||
|
||||
public void removeFrameListener(EglRenderer.FrameListener listener) {
|
||||
eglRenderer.removeFrameListener(listener);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables fixed size for the surface. This provides better performance but might be buggy on some
|
||||
* devices. By default this is turned off.
|
||||
*/
|
||||
public void setEnableHardwareScaler(boolean enabled) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
enableFixedSize = enabled;
|
||||
updateSurfaceSize();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set if the video stream should be mirrored or not.
|
||||
*/
|
||||
public void setMirror(final boolean mirror) {
|
||||
eglRenderer.setMirror(mirror);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set how the video will fill the allowed layout area.
|
||||
*/
|
||||
public void setScalingType(RendererCommon.ScalingType scalingType) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
videoLayoutMeasure.setScalingType(scalingType);
|
||||
requestLayout();
|
||||
}
|
||||
|
||||
public void setScalingType(RendererCommon.ScalingType scalingTypeMatchOrientation,
|
||||
RendererCommon.ScalingType scalingTypeMismatchOrientation) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
videoLayoutMeasure.setScalingType(scalingTypeMatchOrientation, scalingTypeMismatchOrientation);
|
||||
requestLayout();
|
||||
}
|
||||
|
||||
/**
|
||||
* Limit render framerate.
|
||||
*
|
||||
* @param fps Limit render framerate to this value, or use Float.POSITIVE_INFINITY to disable fps
|
||||
* reduction.
|
||||
*/
|
||||
public void setFpsReduction(float fps) {
|
||||
synchronized (layoutLock) {
|
||||
isRenderingPaused = fps == 0f;
|
||||
}
|
||||
eglRenderer.setFpsReduction(fps);
|
||||
}
|
||||
|
||||
public void disableFpsReduction() {
|
||||
synchronized (layoutLock) {
|
||||
isRenderingPaused = false;
|
||||
}
|
||||
eglRenderer.disableFpsReduction();
|
||||
}
|
||||
|
||||
public void pauseVideo() {
|
||||
synchronized (layoutLock) {
|
||||
isRenderingPaused = true;
|
||||
}
|
||||
eglRenderer.pauseVideo();
|
||||
}
|
||||
|
||||
// VideoRenderer.Callbacks interface.
|
||||
@Override
|
||||
public void renderFrame(VideoRenderer.I420Frame frame) {
|
||||
updateFrameDimensionsAndReportEvents(frame);
|
||||
eglRenderer.renderFrame(frame);
|
||||
}
|
||||
|
||||
// VideoSink interface.
|
||||
@Override
|
||||
public void onFrame(VideoFrame frame) {
|
||||
updateFrameDimensionsAndReportEvents(frame);
|
||||
eglRenderer.onFrame(frame);
|
||||
}
|
||||
|
||||
// View layout interface.
|
||||
@Override
|
||||
protected void onMeasure(int widthSpec, int heightSpec) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
final Point size;
|
||||
synchronized (layoutLock) {
|
||||
size =
|
||||
videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight);
|
||||
}
|
||||
setMeasuredDimension(size.x, size.y);
|
||||
logD("onMeasure(). New size: " + size.x + "x" + size.y);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
eglRenderer.setLayoutAspectRatio((right - left) / (float) (bottom - top));
|
||||
updateSurfaceSize();
|
||||
}
|
||||
|
||||
private void updateSurfaceSize() {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
synchronized (layoutLock) {
|
||||
if (enableFixedSize && rotatedFrameWidth != 0 && rotatedFrameHeight != 0 && getWidth() != 0
|
||||
&& getHeight() != 0) {
|
||||
final float layoutAspectRatio = getWidth() / (float) getHeight();
|
||||
final float frameAspectRatio = rotatedFrameWidth / (float) rotatedFrameHeight;
|
||||
final int drawnFrameWidth;
|
||||
final int drawnFrameHeight;
|
||||
if (frameAspectRatio > layoutAspectRatio) {
|
||||
drawnFrameWidth = (int) (rotatedFrameHeight * layoutAspectRatio);
|
||||
drawnFrameHeight = rotatedFrameHeight;
|
||||
} else {
|
||||
drawnFrameWidth = rotatedFrameWidth;
|
||||
drawnFrameHeight = (int) (rotatedFrameWidth / layoutAspectRatio);
|
||||
}
|
||||
// Aspect ratio of the drawn frame and the view is the same.
|
||||
final int width = Math.min(getWidth(), drawnFrameWidth);
|
||||
final int height = Math.min(getHeight(), drawnFrameHeight);
|
||||
logD("updateSurfaceSize. Layout size: " + getWidth() + "x" + getHeight() + ", frame size: "
|
||||
+ rotatedFrameWidth + "x" + rotatedFrameHeight + ", requested surface size: " + width
|
||||
+ "x" + height + ", old surface size: " + surfaceWidth + "x" + surfaceHeight);
|
||||
if (width != surfaceWidth || height != surfaceHeight) {
|
||||
surfaceWidth = width;
|
||||
surfaceHeight = height;
|
||||
getHolder().setFixedSize(width, height);
|
||||
}
|
||||
} else {
|
||||
surfaceWidth = surfaceHeight = 0;
|
||||
getHolder().setSizeFromLayout();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SurfaceHolder.Callback interface.
|
||||
@Override
|
||||
public void surfaceCreated(final SurfaceHolder holder) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
eglRenderer.createEglSurface(holder.getSurface());
|
||||
surfaceWidth = surfaceHeight = 0;
|
||||
updateSurfaceSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void surfaceDestroyed(SurfaceHolder holder) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
final CountDownLatch completionLatch = new CountDownLatch(1);
|
||||
eglRenderer.releaseEglSurface(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
completionLatch.countDown();
|
||||
}
|
||||
});
|
||||
ThreadUtils.awaitUninterruptibly(completionLatch);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
|
||||
ThreadUtils.checkIsOnMainThread();
|
||||
logD("surfaceChanged: format: " + format + " size: " + width + "x" + height);
|
||||
}
|
||||
|
||||
private String getResourceName() {
|
||||
try {
|
||||
return getResources().getResourceEntryName(getId()) + ": ";
|
||||
} catch (NotFoundException e) {
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Post a task to clear the SurfaceView to a transparent uniform color.
|
||||
*/
|
||||
public void clearImage() {
|
||||
eglRenderer.clearImage();
|
||||
}
|
||||
|
||||
// Update frame dimensions and report any changes to |rendererEvents|.
|
||||
private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) {
|
||||
synchronized (layoutLock) {
|
||||
if (isRenderingPaused) {
|
||||
return;
|
||||
}
|
||||
if (!isFirstFrameRendered) {
|
||||
isFirstFrameRendered = true;
|
||||
logD("Reporting first rendered frame.");
|
||||
if (rendererEvents != null) {
|
||||
rendererEvents.onFirstFrameRendered();
|
||||
}
|
||||
}
|
||||
if (rotatedFrameWidth != frame.rotatedWidth() || rotatedFrameHeight != frame.rotatedHeight()
|
||||
|| frameRotation != frame.rotationDegree) {
|
||||
logD("Reporting frame resolution changed to " + frame.width + "x" + frame.height
|
||||
+ " with rotation " + frame.rotationDegree);
|
||||
if (rendererEvents != null) {
|
||||
rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree);
|
||||
}
|
||||
rotatedFrameWidth = frame.rotatedWidth();
|
||||
rotatedFrameHeight = frame.rotatedHeight();
|
||||
frameRotation = frame.rotationDegree;
|
||||
post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
updateSurfaceSize();
|
||||
requestLayout();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update frame dimensions and report any changes to |rendererEvents|.
|
||||
private void updateFrameDimensionsAndReportEvents(VideoFrame frame) {
|
||||
synchronized (layoutLock) {
|
||||
if (isRenderingPaused) {
|
||||
return;
|
||||
}
|
||||
if (!isFirstFrameRendered) {
|
||||
isFirstFrameRendered = true;
|
||||
logD("Reporting first rendered frame.");
|
||||
if (rendererEvents != null) {
|
||||
rendererEvents.onFirstFrameRendered();
|
||||
}
|
||||
}
|
||||
if (rotatedFrameWidth != frame.getRotatedWidth()
|
||||
|| rotatedFrameHeight != frame.getRotatedHeight()
|
||||
|| frameRotation != frame.getRotation()) {
|
||||
logD("Reporting frame resolution changed to " + frame.getBuffer().getWidth() + "x"
|
||||
+ frame.getBuffer().getHeight() + " with rotation " + frame.getRotation());
|
||||
if (rendererEvents != null) {
|
||||
rendererEvents.onFrameResolutionChanged(
|
||||
frame.getBuffer().getWidth(), frame.getBuffer().getHeight(), frame.getRotation());
|
||||
}
|
||||
rotatedFrameWidth = frame.getRotatedWidth();
|
||||
rotatedFrameHeight = frame.getRotatedHeight();
|
||||
frameRotation = frame.getRotation();
|
||||
post(() -> {
|
||||
updateSurfaceSize();
|
||||
requestLayout();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void logD(String string) {
|
||||
Logging.d(TAG, resourceName + string);
|
||||
}
|
||||
}
|
||||
71
sdk/android/api/org/webrtc/VideoCapturer.java
Normal file
71
sdk/android/api/org/webrtc/VideoCapturer.java
Normal file
@ -0,0 +1,71 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import java.util.List;
|
||||
|
||||
// Base interface for all VideoCapturers to implement.
|
||||
public interface VideoCapturer {
|
||||
// Interface used for providing callbacks to an observer.
|
||||
public interface CapturerObserver {
|
||||
// Notify if the camera have been started successfully or not.
|
||||
// Called on a Java thread owned by VideoCapturer.
|
||||
void onCapturerStarted(boolean success);
|
||||
void onCapturerStopped();
|
||||
|
||||
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
|
||||
void onByteBufferFrameCaptured(
|
||||
byte[] data, int width, int height, int rotation, long timeStamp);
|
||||
|
||||
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
|
||||
// owned by VideoCapturer.
|
||||
void onTextureFrameCaptured(int width, int height, int oesTextureId, float[] transformMatrix,
|
||||
int rotation, long timestamp);
|
||||
|
||||
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
|
||||
void onFrameCaptured(VideoFrame frame);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function is used to initialize the camera thread, the android application context, and the
|
||||
* capture observer. It will be called only once and before any startCapture() request. The
|
||||
* camera thread is guaranteed to be valid until dispose() is called. If the VideoCapturer wants
|
||||
* to deliver texture frames, it should do this by rendering on the SurfaceTexture in
|
||||
* |surfaceTextureHelper|, register itself as a listener, and forward the texture frames to
|
||||
* CapturerObserver.onTextureFrameCaptured().
|
||||
*/
|
||||
void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
|
||||
CapturerObserver capturerObserver);
|
||||
|
||||
/**
|
||||
* Start capturing frames in a format that is as close as possible to |width| x |height| and
|
||||
* |framerate|.
|
||||
*/
|
||||
void startCapture(int width, int height, int framerate);
|
||||
|
||||
/**
|
||||
* Stop capturing. This function should block until capture is actually stopped.
|
||||
*/
|
||||
void stopCapture() throws InterruptedException;
|
||||
|
||||
void changeCaptureFormat(int width, int height, int framerate);
|
||||
|
||||
/**
|
||||
* Perform any final cleanup here. No more capturing will be done after this call.
|
||||
*/
|
||||
void dispose();
|
||||
|
||||
/**
|
||||
* @return true if-and-only-if this is a screen capturer.
|
||||
*/
|
||||
boolean isScreencast();
|
||||
}
|
||||
41
sdk/android/api/org/webrtc/VideoCodecInfo.java
Normal file
41
sdk/android/api/org/webrtc/VideoCodecInfo.java
Normal file
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Represent a video codec as encoded in SDP.
|
||||
*/
|
||||
public class VideoCodecInfo {
|
||||
// Keys for H264 VideoCodecInfo properties.
|
||||
public static final String H264_FMTP_PROFILE_LEVEL_ID = "profile-level-id";
|
||||
public static final String H264_FMTP_LEVEL_ASYMMETRY_ALLOWED = "level-asymmetry-allowed";
|
||||
public static final String H264_FMTP_PACKETIZATION_MODE = "packetization-mode";
|
||||
|
||||
public static final String H264_PROFILE_CONSTRAINED_BASELINE = "4200";
|
||||
public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c";
|
||||
public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex.
|
||||
public static final String H264_CONSTRAINED_HIGH_3_1 =
|
||||
H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
|
||||
public static final String H264_CONSTRAINED_BASELINE_3_1 =
|
||||
H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1;
|
||||
|
||||
public final int payload;
|
||||
public final String name;
|
||||
public final Map<String, String> params;
|
||||
|
||||
public VideoCodecInfo(int payload, String name, Map<String, String> params) {
|
||||
this.payload = payload;
|
||||
this.name = name;
|
||||
this.params = params;
|
||||
}
|
||||
}
|
||||
41
sdk/android/api/org/webrtc/VideoCodecStatus.java
Normal file
41
sdk/android/api/org/webrtc/VideoCodecStatus.java
Normal file
@ -0,0 +1,41 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Status codes reported by video encoding/decoding components. This should be kept in sync with
|
||||
* video_error_codes.h.
|
||||
*/
|
||||
public enum VideoCodecStatus {
|
||||
REQUEST_SLI(2),
|
||||
NO_OUTPUT(1),
|
||||
OK(0),
|
||||
ERROR(-1),
|
||||
LEVEL_EXCEEDED(-2),
|
||||
MEMORY(-3),
|
||||
ERR_PARAMETER(-4),
|
||||
ERR_SIZE(-5),
|
||||
TIMEOUT(-6),
|
||||
UNINITIALIZED(-7),
|
||||
ERR_REQUEST_SLI(-12),
|
||||
FALLBACK_SOFTWARE(-13),
|
||||
TARGET_BITRATE_OVERSHOOT(-14);
|
||||
|
||||
private final int number;
|
||||
|
||||
private VideoCodecStatus(int number) {
|
||||
this.number = number;
|
||||
}
|
||||
|
||||
public int getNumber() {
|
||||
return number;
|
||||
}
|
||||
}
|
||||
73
sdk/android/api/org/webrtc/VideoDecoder.java
Normal file
73
sdk/android/api/org/webrtc/VideoDecoder.java
Normal file
@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Interface for a video decoder that can be used in WebRTC. All calls to the class will be made on
|
||||
* a single decoding thread.
|
||||
*/
|
||||
public interface VideoDecoder {
|
||||
/** Settings passed to the decoder by WebRTC. */
|
||||
public class Settings {
|
||||
public final int numberOfCores;
|
||||
public final int width;
|
||||
public final int height;
|
||||
|
||||
public Settings(int numberOfCores, int width, int height) {
|
||||
this.numberOfCores = numberOfCores;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
}
|
||||
}
|
||||
|
||||
/** Additional info for decoding. */
|
||||
public class DecodeInfo {
|
||||
public final boolean isMissingFrames;
|
||||
public final long renderTimeMs;
|
||||
|
||||
public DecodeInfo(boolean isMissingFrames, long renderTimeMs) {
|
||||
this.isMissingFrames = isMissingFrames;
|
||||
this.renderTimeMs = renderTimeMs;
|
||||
}
|
||||
}
|
||||
|
||||
public interface Callback {
|
||||
/**
|
||||
* Call to return a decoded frame. Can be called on any thread.
|
||||
*
|
||||
* @param frame Decoded frame
|
||||
* @param decodeTimeMs Time it took to decode the frame in milliseconds or null if not available
|
||||
* @param qp QP value of the decoded frame or null if not available
|
||||
*/
|
||||
void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the decoding process with specified settings. Will be called on the decoding thread
|
||||
* before any decode calls.
|
||||
*/
|
||||
VideoCodecStatus initDecode(Settings settings, Callback decodeCallback);
|
||||
/**
|
||||
* Called when the decoder is no longer needed. Any more calls to decode will not be made.
|
||||
*/
|
||||
VideoCodecStatus release();
|
||||
/**
|
||||
* Request the decoder to decode a frame.
|
||||
*/
|
||||
VideoCodecStatus decode(EncodedImage frame, DecodeInfo info);
|
||||
/**
|
||||
* The decoder should return true if it prefers late decoding. That is, it can not decode
|
||||
* infinite number of frames before the decoded frame is consumed.
|
||||
*/
|
||||
boolean getPrefersLateDecoding();
|
||||
/** Should return a descriptive name for the implementation. */
|
||||
String getImplementationName();
|
||||
}
|
||||
20
sdk/android/api/org/webrtc/VideoDecoderFactory.java
Normal file
20
sdk/android/api/org/webrtc/VideoDecoderFactory.java
Normal file
@ -0,0 +1,20 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Factory for creating VideoDecoders. */
|
||||
public interface VideoDecoderFactory {
|
||||
/**
|
||||
* Creates a VideoDecoder for the given codec. Supports the same codecs supported by
|
||||
* VideoEncoderFactory.
|
||||
*/
|
||||
public VideoDecoder createDecoder(String codecType);
|
||||
}
|
||||
148
sdk/android/api/org/webrtc/VideoEncoder.java
Normal file
148
sdk/android/api/org/webrtc/VideoEncoder.java
Normal file
@ -0,0 +1,148 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Interface for a video encoder that can be used with WebRTC. All calls will be made on the
|
||||
* encoding thread.
|
||||
*/
|
||||
public interface VideoEncoder {
|
||||
/** Settings passed to the encoder by WebRTC. */
|
||||
public class Settings {
|
||||
public final int numberOfCores;
|
||||
public final int width;
|
||||
public final int height;
|
||||
public final int startBitrate; // Kilobits per second.
|
||||
public final int maxFramerate;
|
||||
public final boolean automaticResizeOn;
|
||||
|
||||
public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate,
|
||||
boolean automaticResizeOn) {
|
||||
this.numberOfCores = numberOfCores;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.startBitrate = startBitrate;
|
||||
this.maxFramerate = maxFramerate;
|
||||
this.automaticResizeOn = automaticResizeOn;
|
||||
}
|
||||
}
|
||||
|
||||
/** Additional info for encoding. */
|
||||
public class EncodeInfo {
|
||||
public final EncodedImage.FrameType[] frameTypes;
|
||||
|
||||
public EncodeInfo(EncodedImage.FrameType[] frameTypes) {
|
||||
this.frameTypes = frameTypes;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(sakal): Add values to these classes as necessary.
|
||||
/** Codec specific information about the encoded frame. */
|
||||
public class CodecSpecificInfo {}
|
||||
|
||||
public class CodecSpecificInfoVP8 extends CodecSpecificInfo {}
|
||||
|
||||
public class CodecSpecificInfoVP9 extends CodecSpecificInfo {}
|
||||
|
||||
public class CodecSpecificInfoH264 extends CodecSpecificInfo {}
|
||||
|
||||
/**
|
||||
* Represents bitrate allocated for an encoder to produce frames. Bitrate can be divided between
|
||||
* spatial and temporal layers.
|
||||
*/
|
||||
public class BitrateAllocation {
|
||||
// First index is the spatial layer and second the temporal layer.
|
||||
public final int[][] bitratesBbs;
|
||||
|
||||
/**
|
||||
* Initializes the allocation with a two dimensional array of bitrates. The first index of the
|
||||
* array is the spatial layer and the second index in the temporal layer.
|
||||
*/
|
||||
public BitrateAllocation(int[][] bitratesBbs) {
|
||||
this.bitratesBbs = bitratesBbs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the total bitrate allocated for all layers.
|
||||
*/
|
||||
public int getSum() {
|
||||
int sum = 0;
|
||||
for (int[] spatialLayer : bitratesBbs) {
|
||||
for (int bitrate : spatialLayer) {
|
||||
sum += bitrate;
|
||||
}
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
}
|
||||
|
||||
/** Settings for WebRTC quality based scaling. */
|
||||
public class ScalingSettings {
|
||||
public final boolean on;
|
||||
public final Integer low;
|
||||
public final Integer high;
|
||||
|
||||
/**
|
||||
* Creates quality based scaling setting.
|
||||
*
|
||||
* @param on True if quality scaling is turned on.
|
||||
*/
|
||||
public ScalingSettings(boolean on) {
|
||||
this.on = on;
|
||||
this.low = null;
|
||||
this.high = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates quality based scaling settings with custom thresholds.
|
||||
*
|
||||
* @param on True if quality scaling is turned on.
|
||||
* @param low Average QP at which to scale up the resolution.
|
||||
* @param high Average QP at which to scale down the resolution.
|
||||
*/
|
||||
public ScalingSettings(boolean on, int low, int high) {
|
||||
this.on = on;
|
||||
this.low = low;
|
||||
this.high = high;
|
||||
}
|
||||
}
|
||||
|
||||
public interface Callback {
|
||||
/** Call to return an encoded frame. */
|
||||
void onEncodedFrame(EncodedImage frame, CodecSpecificInfo info);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the encoding process. Call before any calls to encode.
|
||||
*/
|
||||
VideoCodecStatus initEncode(Settings settings, Callback encodeCallback);
|
||||
/**
|
||||
* Releases the encoder. No more calls to encode will be made after this call.
|
||||
*/
|
||||
VideoCodecStatus release();
|
||||
/**
|
||||
* Requests the encoder to encode a frame.
|
||||
*/
|
||||
VideoCodecStatus encode(VideoFrame frame, EncodeInfo info);
|
||||
/**
|
||||
* Informs the encoder of the packet loss and the round-trip time of the network.
|
||||
*
|
||||
* @param packetLoss How many packets are lost on average per 255 packets.
|
||||
* @param roundTripTimeMs Round-trip time of the network in milliseconds.
|
||||
*/
|
||||
VideoCodecStatus setChannelParameters(short packetLoss, long roundTripTimeMs);
|
||||
/** Sets the bitrate allocation and the target framerate for the encoder. */
|
||||
VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate);
|
||||
/** Any encoder that wants to use WebRTC provided quality scaler must implement this method. */
|
||||
ScalingSettings getScalingSettings();
|
||||
/** Should return a descriptive name for the implementation. Gets called once and cached. */
|
||||
String getImplementationName();
|
||||
}
|
||||
23
sdk/android/api/org/webrtc/VideoEncoderFactory.java
Normal file
23
sdk/android/api/org/webrtc/VideoEncoderFactory.java
Normal file
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Factory for creating VideoEncoders. */
|
||||
public interface VideoEncoderFactory {
|
||||
/** Creates an encoder for the given video codec. */
|
||||
public VideoEncoder createEncoder(VideoCodecInfo info);
|
||||
|
||||
/**
|
||||
* Enumerates the list of supported video codecs. This method will only be called once and the
|
||||
* result will be cached.
|
||||
*/
|
||||
public VideoCodecInfo[] getSupportedCodecs();
|
||||
}
|
||||
174
sdk/android/api/org/webrtc/VideoFileRenderer.java
Normal file
174
sdk/android/api/org/webrtc/VideoFileRenderer.java
Normal file
@ -0,0 +1,174 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
* Can be used to save the video frames to file.
|
||||
*/
|
||||
public class VideoFileRenderer implements VideoRenderer.Callbacks {
|
||||
static {
|
||||
System.loadLibrary("jingle_peerconnection_so");
|
||||
}
|
||||
|
||||
private static final String TAG = "VideoFileRenderer";
|
||||
|
||||
private final HandlerThread renderThread;
|
||||
private final Object handlerLock = new Object();
|
||||
private final Handler renderThreadHandler;
|
||||
private final FileOutputStream videoOutFile;
|
||||
private final String outputFileName;
|
||||
private final int outputFileWidth;
|
||||
private final int outputFileHeight;
|
||||
private final int outputFrameSize;
|
||||
private final ByteBuffer outputFrameBuffer;
|
||||
private EglBase eglBase;
|
||||
private YuvConverter yuvConverter;
|
||||
private ArrayList<ByteBuffer> rawFrames = new ArrayList<>();
|
||||
|
||||
public VideoFileRenderer(String outputFile, int outputFileWidth, int outputFileHeight,
|
||||
final EglBase.Context sharedContext) throws IOException {
|
||||
if ((outputFileWidth % 2) == 1 || (outputFileHeight % 2) == 1) {
|
||||
throw new IllegalArgumentException("Does not support uneven width or height");
|
||||
}
|
||||
|
||||
this.outputFileName = outputFile;
|
||||
this.outputFileWidth = outputFileWidth;
|
||||
this.outputFileHeight = outputFileHeight;
|
||||
|
||||
outputFrameSize = outputFileWidth * outputFileHeight * 3 / 2;
|
||||
outputFrameBuffer = ByteBuffer.allocateDirect(outputFrameSize);
|
||||
|
||||
videoOutFile = new FileOutputStream(outputFile);
|
||||
videoOutFile.write(
|
||||
("YUV4MPEG2 C420 W" + outputFileWidth + " H" + outputFileHeight + " Ip F30:1 A1:1\n")
|
||||
.getBytes());
|
||||
|
||||
renderThread = new HandlerThread(TAG);
|
||||
renderThread.start();
|
||||
renderThreadHandler = new Handler(renderThread.getLooper());
|
||||
|
||||
ThreadUtils.invokeAtFrontUninterruptibly(renderThreadHandler, new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
eglBase.createDummyPbufferSurface();
|
||||
eglBase.makeCurrent();
|
||||
yuvConverter = new YuvConverter();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void renderFrame(final VideoRenderer.I420Frame frame) {
|
||||
renderThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
renderFrameOnRenderThread(frame);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void renderFrameOnRenderThread(VideoRenderer.I420Frame frame) {
|
||||
final float frameAspectRatio = (float) frame.rotatedWidth() / (float) frame.rotatedHeight();
|
||||
|
||||
final float[] rotatedSamplingMatrix =
|
||||
RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree);
|
||||
final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
|
||||
false, frameAspectRatio, (float) outputFileWidth / outputFileHeight);
|
||||
final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
|
||||
|
||||
try {
|
||||
ByteBuffer buffer = nativeCreateNativeByteBuffer(outputFrameSize);
|
||||
if (!frame.yuvFrame) {
|
||||
yuvConverter.convert(outputFrameBuffer, outputFileWidth, outputFileHeight, outputFileWidth,
|
||||
frame.textureId, texMatrix);
|
||||
|
||||
int stride = outputFileWidth;
|
||||
byte[] data = outputFrameBuffer.array();
|
||||
int offset = outputFrameBuffer.arrayOffset();
|
||||
|
||||
// Write Y
|
||||
buffer.put(data, offset, outputFileWidth * outputFileHeight);
|
||||
|
||||
// Write U
|
||||
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
||||
buffer.put(data, offset + r * stride, stride / 2);
|
||||
}
|
||||
|
||||
// Write V
|
||||
for (int r = outputFileHeight; r < outputFileHeight * 3 / 2; ++r) {
|
||||
buffer.put(data, offset + r * stride + stride / 2, stride / 2);
|
||||
}
|
||||
} else {
|
||||
nativeI420Scale(frame.yuvPlanes[0], frame.yuvStrides[0], frame.yuvPlanes[1],
|
||||
frame.yuvStrides[1], frame.yuvPlanes[2], frame.yuvStrides[2], frame.width, frame.height,
|
||||
outputFrameBuffer, outputFileWidth, outputFileHeight);
|
||||
|
||||
buffer.put(outputFrameBuffer.array(), outputFrameBuffer.arrayOffset(), outputFrameSize);
|
||||
}
|
||||
buffer.rewind();
|
||||
rawFrames.add(buffer);
|
||||
} finally {
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Release all resources. All already posted frames will be rendered first.
|
||||
*/
|
||||
public void release() {
|
||||
final CountDownLatch cleanupBarrier = new CountDownLatch(1);
|
||||
renderThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
yuvConverter.release();
|
||||
eglBase.release();
|
||||
renderThread.quit();
|
||||
cleanupBarrier.countDown();
|
||||
}
|
||||
});
|
||||
ThreadUtils.awaitUninterruptibly(cleanupBarrier);
|
||||
try {
|
||||
for (ByteBuffer buffer : rawFrames) {
|
||||
videoOutFile.write("FRAME\n".getBytes());
|
||||
|
||||
byte[] data = new byte[outputFrameSize];
|
||||
buffer.get(data);
|
||||
|
||||
videoOutFile.write(data);
|
||||
|
||||
nativeFreeNativeByteBuffer(buffer);
|
||||
}
|
||||
videoOutFile.close();
|
||||
Logging.d(TAG, "Video written to disk as " + outputFileName + ". Number frames are "
|
||||
+ rawFrames.size() + " and the dimension of the frames are " + outputFileWidth + "x"
|
||||
+ outputFileHeight + ".");
|
||||
} catch (IOException e) {
|
||||
Logging.e(TAG, "Error writing video to disk", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static native void nativeI420Scale(ByteBuffer srcY, int strideY, ByteBuffer srcU,
|
||||
int strideU, ByteBuffer srcV, int strideV, int width, int height, ByteBuffer dst,
|
||||
int dstWidth, int dstHeight);
|
||||
|
||||
public static native ByteBuffer nativeCreateNativeByteBuffer(int size);
|
||||
|
||||
public static native void nativeFreeNativeByteBuffer(ByteBuffer buffer);
|
||||
}
|
||||
181
sdk/android/api/org/webrtc/VideoFrame.java
Normal file
181
sdk/android/api/org/webrtc/VideoFrame.java
Normal file
@ -0,0 +1,181 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Matrix;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Java version of webrtc::VideoFrame and webrtc::VideoFrameBuffer. A difference from the C++
|
||||
* version is that no explicit tag is used, and clients are expected to use 'instanceof' to find the
|
||||
* right subclass of the buffer. This allows clients to create custom VideoFrame.Buffer in
|
||||
* arbitrary format in their custom VideoSources, and then cast it back to the correct subclass in
|
||||
* their custom VideoSinks. All implementations must also implement the toI420() function,
|
||||
* converting from the underlying representation if necessary. I420 is the most widely accepted
|
||||
* format and serves as a fallback for video sinks that can only handle I420, e.g. the internal
|
||||
* WebRTC software encoders.
|
||||
*/
|
||||
public class VideoFrame {
|
||||
public interface Buffer {
|
||||
/**
|
||||
* Resolution of the buffer in pixels.
|
||||
*/
|
||||
int getWidth();
|
||||
int getHeight();
|
||||
|
||||
/**
|
||||
* Returns a memory-backed frame in I420 format. If the pixel data is in another format, a
|
||||
* conversion will take place. All implementations must provide a fallback to I420 for
|
||||
* compatibility with e.g. the internal WebRTC software encoders.
|
||||
*/
|
||||
I420Buffer toI420();
|
||||
|
||||
/**
|
||||
* Reference counting is needed since a video buffer can be shared between multiple VideoSinks,
|
||||
* and the buffer needs to be returned to the VideoSource as soon as all references are gone.
|
||||
*/
|
||||
void retain();
|
||||
void release();
|
||||
|
||||
/**
|
||||
* Crops a region defined by |cropx|, |cropY|, |cropWidth| and |cropHeight|. Scales it to size
|
||||
* |scaleWidth| x |scaleHeight|.
|
||||
*/
|
||||
Buffer cropAndScale(
|
||||
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for I420 buffers.
|
||||
*/
|
||||
public interface I420Buffer extends Buffer {
|
||||
ByteBuffer getDataY();
|
||||
ByteBuffer getDataU();
|
||||
ByteBuffer getDataV();
|
||||
|
||||
int getStrideY();
|
||||
int getStrideU();
|
||||
int getStrideV();
|
||||
}
|
||||
|
||||
/**
|
||||
* Interface for buffers that are stored as a single texture, either in OES or RGB format.
|
||||
*/
|
||||
public interface TextureBuffer extends Buffer {
|
||||
enum Type { OES, RGB }
|
||||
|
||||
Type getType();
|
||||
int getTextureId();
|
||||
|
||||
/**
|
||||
* Retrieve the transform matrix associated with the frame. This transform matrix maps 2D
|
||||
* homogeneous coordinates of the form (s, t, 1) with s and t in the inclusive range [0, 1] to
|
||||
* the coordinate that should be used to sample that location from the buffer.
|
||||
*/
|
||||
public Matrix getTransformMatrix();
|
||||
}
|
||||
|
||||
private final Buffer buffer;
|
||||
private final int rotation;
|
||||
private final long timestampNs;
|
||||
|
||||
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
|
||||
if (buffer == null) {
|
||||
throw new IllegalArgumentException("buffer not allowed to be null");
|
||||
}
|
||||
if (rotation % 90 != 0) {
|
||||
throw new IllegalArgumentException("rotation must be a multiple of 90");
|
||||
}
|
||||
this.buffer = buffer;
|
||||
this.rotation = rotation;
|
||||
this.timestampNs = timestampNs;
|
||||
}
|
||||
|
||||
public Buffer getBuffer() {
|
||||
return buffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Rotation of the frame in degrees.
|
||||
*/
|
||||
public int getRotation() {
|
||||
return rotation;
|
||||
}
|
||||
|
||||
/**
|
||||
* Timestamp of the frame in nano seconds.
|
||||
*/
|
||||
public long getTimestampNs() {
|
||||
return timestampNs;
|
||||
}
|
||||
|
||||
public int getRotatedWidth() {
|
||||
if (rotation % 180 == 0) {
|
||||
return buffer.getWidth();
|
||||
}
|
||||
return buffer.getHeight();
|
||||
}
|
||||
|
||||
public int getRotatedHeight() {
|
||||
if (rotation % 180 == 0) {
|
||||
return buffer.getHeight();
|
||||
}
|
||||
return buffer.getWidth();
|
||||
}
|
||||
|
||||
/**
|
||||
* Reference counting of the underlying buffer.
|
||||
*/
|
||||
public void retain() {
|
||||
buffer.retain();
|
||||
}
|
||||
|
||||
public void release() {
|
||||
buffer.release();
|
||||
}
|
||||
|
||||
public static VideoFrame.Buffer cropAndScaleI420(final I420Buffer buffer, int cropX, int cropY,
|
||||
int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||
if (cropWidth == scaleWidth && cropHeight == scaleHeight) {
|
||||
// No scaling.
|
||||
ByteBuffer dataY = buffer.getDataY();
|
||||
ByteBuffer dataU = buffer.getDataU();
|
||||
ByteBuffer dataV = buffer.getDataV();
|
||||
|
||||
dataY.position(cropX + cropY * buffer.getStrideY());
|
||||
dataU.position(cropX / 2 + cropY / 2 * buffer.getStrideU());
|
||||
dataV.position(cropX / 2 + cropY / 2 * buffer.getStrideV());
|
||||
|
||||
buffer.retain();
|
||||
return new I420BufferImpl(buffer.getWidth(), buffer.getHeight(), dataY.slice(),
|
||||
buffer.getStrideY(), dataU.slice(), buffer.getStrideU(), dataV.slice(),
|
||||
buffer.getStrideV(), new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
buffer.release();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
|
||||
nativeCropAndScaleI420(buffer.getDataY(), buffer.getStrideY(), buffer.getDataU(),
|
||||
buffer.getStrideU(), buffer.getDataV(), buffer.getStrideV(), cropX, cropY, cropWidth,
|
||||
cropHeight, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
|
||||
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV(), scaleWidth,
|
||||
scaleHeight);
|
||||
return newBuffer;
|
||||
}
|
||||
|
||||
private static native void nativeCropAndScaleI420(ByteBuffer srcY, int srcStrideY,
|
||||
ByteBuffer srcU, int srcStrideU, ByteBuffer srcV, int srcStrideV, int cropX, int cropY,
|
||||
int cropWidth, int cropHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU,
|
||||
int dstStrideU, ByteBuffer dstV, int dstStrideV, int scaleWidth, int scaleHeight);
|
||||
}
|
||||
227
sdk/android/api/org/webrtc/VideoFrameDrawer.java
Normal file
227
sdk/android/api/org/webrtc/VideoFrameDrawer.java
Normal file
@ -0,0 +1,227 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Matrix;
|
||||
import android.graphics.Point;
|
||||
import android.opengl.GLES20;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Helper class to draw VideoFrames. Calls either drawer.drawOes, drawer.drawRgb, or
|
||||
* drawer.drawYuv depending on the type of the buffer. The frame will be rendered with rotation
|
||||
* taken into account. You can supply an additional render matrix for custom transformations.
|
||||
*/
|
||||
public class VideoFrameDrawer {
|
||||
/**
|
||||
* Draws a VideoFrame.TextureBuffer. Calls either drawer.drawOes or drawer.drawRgb
|
||||
* depending on the type of the buffer. You can supply an additional render matrix. This is
|
||||
* used multiplied together with the transformation matrix of the frame. (M = renderMatrix *
|
||||
* transformationMatrix)
|
||||
*/
|
||||
static void drawTexture(RendererCommon.GlDrawer drawer, VideoFrame.TextureBuffer buffer,
|
||||
Matrix renderMatrix, int frameWidth, int frameHeight, int viewportX, int viewportY,
|
||||
int viewportWidth, int viewportHeight) {
|
||||
Matrix finalMatrix = new Matrix(buffer.getTransformMatrix());
|
||||
finalMatrix.preConcat(renderMatrix);
|
||||
float[] finalGlMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(finalMatrix);
|
||||
switch (buffer.getType()) {
|
||||
case OES:
|
||||
drawer.drawOes(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
|
||||
viewportY, viewportWidth, viewportHeight);
|
||||
break;
|
||||
case RGB:
|
||||
drawer.drawRgb(buffer.getTextureId(), finalGlMatrix, frameWidth, frameHeight, viewportX,
|
||||
viewportY, viewportWidth, viewportHeight);
|
||||
break;
|
||||
default:
|
||||
throw new RuntimeException("Unknown texture type.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
|
||||
* class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
|
||||
*/
|
||||
private static class YuvUploader {
|
||||
// Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
|
||||
// TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
|
||||
// that handles stride and compare performance with intermediate copy.
|
||||
private ByteBuffer copyBuffer;
|
||||
private int[] yuvTextures;
|
||||
|
||||
/**
|
||||
* Upload |planes| into OpenGL textures, taking stride into consideration.
|
||||
*
|
||||
* @return Array of three texture indices corresponding to Y-, U-, and V-plane respectively.
|
||||
*/
|
||||
public int[] uploadYuvData(int width, int height, int[] strides, ByteBuffer[] planes) {
|
||||
final int[] planeWidths = new int[] {width, width / 2, width / 2};
|
||||
final int[] planeHeights = new int[] {height, height / 2, height / 2};
|
||||
// Make a first pass to see if we need a temporary copy buffer.
|
||||
int copyCapacityNeeded = 0;
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
if (strides[i] > planeWidths[i]) {
|
||||
copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
|
||||
}
|
||||
}
|
||||
// Allocate copy buffer if necessary.
|
||||
if (copyCapacityNeeded > 0
|
||||
&& (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
|
||||
copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
|
||||
}
|
||||
// Make sure YUV textures are allocated.
|
||||
if (yuvTextures == null) {
|
||||
yuvTextures = new int[3];
|
||||
for (int i = 0; i < 3; i++) {
|
||||
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
}
|
||||
}
|
||||
// Upload each plane.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
||||
// GLES only accepts packed data, i.e. stride == planeWidth.
|
||||
final ByteBuffer packedByteBuffer;
|
||||
if (strides[i] == planeWidths[i]) {
|
||||
// Input is packed already.
|
||||
packedByteBuffer = planes[i];
|
||||
} else {
|
||||
VideoRenderer.nativeCopyPlane(
|
||||
planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
|
||||
packedByteBuffer = copyBuffer;
|
||||
}
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
|
||||
planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
|
||||
}
|
||||
return yuvTextures;
|
||||
}
|
||||
|
||||
public int[] uploadFromBuffer(VideoFrame.I420Buffer buffer) {
|
||||
int[] strides = {buffer.getStrideY(), buffer.getStrideU(), buffer.getStrideV()};
|
||||
ByteBuffer[] planes = {buffer.getDataY(), buffer.getDataU(), buffer.getDataV()};
|
||||
return uploadYuvData(buffer.getWidth(), buffer.getHeight(), strides, planes);
|
||||
}
|
||||
|
||||
public int[] getYuvTextures() {
|
||||
return yuvTextures;
|
||||
}
|
||||
|
||||
/**
|
||||
* Releases cached resources. Uploader can still be used and the resources will be reallocated
|
||||
* on first use.
|
||||
*/
|
||||
public void release() {
|
||||
copyBuffer = null;
|
||||
if (yuvTextures != null) {
|
||||
GLES20.glDeleteTextures(3, yuvTextures, 0);
|
||||
yuvTextures = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static int distance(float x0, float y0, float x1, float y1) {
|
||||
return (int) Math.round(Math.hypot(x1 - x0, y1 - y0));
|
||||
}
|
||||
|
||||
// These points are used to calculate the size of the part of the frame we are rendering.
|
||||
final static float[] srcPoints =
|
||||
new float[] {0f /* x0 */, 0f /* y0 */, 1f /* x1 */, 0f /* y1 */, 0f /* x2 */, 1f /* y2 */};
|
||||
private final float[] dstPoints = new float[6];
|
||||
private final Point renderSize = new Point();
|
||||
private int renderWidth;
|
||||
private int renderHeight;
|
||||
|
||||
// Calculate the frame size after |renderMatrix| is applied. Stores the output in member variables
|
||||
// |renderWidth| and |renderHeight| to avoid allocations since this function is called for every
|
||||
// frame.
|
||||
private void calculateTransformedRenderSize(
|
||||
int frameWidth, int frameHeight, Matrix renderMatrix) {
|
||||
if (renderMatrix == null) {
|
||||
renderWidth = frameWidth;
|
||||
renderHeight = frameHeight;
|
||||
return;
|
||||
}
|
||||
// Transform the texture coordinates (in the range [0, 1]) according to |renderMatrix|.
|
||||
renderMatrix.mapPoints(dstPoints, srcPoints);
|
||||
|
||||
// Multiply with the width and height to get the positions in terms of pixels.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
dstPoints[i * 2 + 0] *= frameWidth;
|
||||
dstPoints[i * 2 + 1] *= frameHeight;
|
||||
}
|
||||
|
||||
// Get the length of the sides of the transformed rectangle in terms of pixels.
|
||||
renderWidth = distance(dstPoints[0], dstPoints[1], dstPoints[2], dstPoints[3]);
|
||||
renderHeight = distance(dstPoints[0], dstPoints[1], dstPoints[4], dstPoints[5]);
|
||||
}
|
||||
|
||||
private final YuvUploader yuvUploader = new YuvUploader();
|
||||
// This variable will only be used for checking reference equality and is used for caching I420
|
||||
// textures.
|
||||
private VideoFrame lastI420Frame;
|
||||
private final Matrix renderMatrix = new Matrix();
|
||||
|
||||
public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer) {
|
||||
drawFrame(frame, drawer, null /* additionalRenderMatrix */);
|
||||
}
|
||||
|
||||
public void drawFrame(
|
||||
VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) {
|
||||
drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */,
|
||||
frame.getRotatedWidth(), frame.getRotatedHeight());
|
||||
}
|
||||
|
||||
public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer,
|
||||
Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth,
|
||||
int viewportHeight) {
|
||||
final int width = frame.getRotatedWidth();
|
||||
final int height = frame.getRotatedHeight();
|
||||
|
||||
calculateTransformedRenderSize(width, height, additionalRenderMatrix);
|
||||
|
||||
final boolean isTextureFrame = frame.getBuffer() instanceof VideoFrame.TextureBuffer;
|
||||
renderMatrix.reset();
|
||||
renderMatrix.preTranslate(0.5f, 0.5f);
|
||||
if (!isTextureFrame) {
|
||||
renderMatrix.preScale(1f, -1f); // I420-frames are upside down
|
||||
}
|
||||
renderMatrix.preRotate(frame.getRotation());
|
||||
renderMatrix.preTranslate(-0.5f, -0.5f);
|
||||
if (additionalRenderMatrix != null) {
|
||||
renderMatrix.preConcat(additionalRenderMatrix);
|
||||
}
|
||||
|
||||
if (isTextureFrame) {
|
||||
lastI420Frame = null;
|
||||
drawTexture(drawer, (VideoFrame.TextureBuffer) frame.getBuffer(), renderMatrix, renderWidth,
|
||||
renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
} else {
|
||||
// Only upload the I420 data to textures once per frame, if we are called multiple times
|
||||
// with the same frame.
|
||||
if (frame != lastI420Frame) {
|
||||
lastI420Frame = frame;
|
||||
final VideoFrame.I420Buffer i420Buffer = frame.getBuffer().toI420();
|
||||
yuvUploader.uploadFromBuffer(i420Buffer);
|
||||
i420Buffer.release();
|
||||
}
|
||||
|
||||
drawer.drawYuv(yuvUploader.getYuvTextures(),
|
||||
RendererCommon.convertMatrixFromAndroidGraphicsMatrix(renderMatrix), renderWidth,
|
||||
renderHeight, viewportX, viewportY, viewportWidth, viewportHeight);
|
||||
}
|
||||
}
|
||||
|
||||
public void release() {
|
||||
yuvUploader.release();
|
||||
lastI420Frame = null;
|
||||
}
|
||||
}
|
||||
223
sdk/android/api/org/webrtc/VideoRenderer.java
Normal file
223
sdk/android/api/org/webrtc/VideoRenderer.java
Normal file
@ -0,0 +1,223 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
/**
|
||||
* Java version of VideoSinkInterface. In addition to allowing clients to
|
||||
* define their own rendering behavior (by passing in a Callbacks object), this
|
||||
* class also provides a createGui() method for creating a GUI-rendering window
|
||||
* on various platforms.
|
||||
*/
|
||||
public class VideoRenderer {
|
||||
/**
|
||||
* Java version of webrtc::VideoFrame. Frames are only constructed from native code and test
|
||||
* code.
|
||||
*/
|
||||
public static class I420Frame {
|
||||
public final int width;
|
||||
public final int height;
|
||||
public final int[] yuvStrides;
|
||||
public ByteBuffer[] yuvPlanes;
|
||||
public final boolean yuvFrame;
|
||||
// Matrix that transforms standard coordinates to their proper sampling locations in
|
||||
// the texture. This transform compensates for any properties of the video source that
|
||||
// cause it to appear different from a normalized texture. This matrix does not take
|
||||
// |rotationDegree| into account.
|
||||
public final float[] samplingMatrix;
|
||||
public int textureId;
|
||||
// Frame pointer in C++.
|
||||
private long nativeFramePointer;
|
||||
|
||||
// rotationDegree is the degree that the frame must be rotated clockwisely
|
||||
// to be rendered correctly.
|
||||
public int rotationDegree;
|
||||
|
||||
// If this I420Frame was constructed from VideoFrame.Buffer, this points to
|
||||
// the backing buffer.
|
||||
private final VideoFrame.Buffer backingBuffer;
|
||||
|
||||
/**
|
||||
* Construct a frame of the given dimensions with the specified planar data.
|
||||
*/
|
||||
public I420Frame(int width, int height, int rotationDegree, int[] yuvStrides,
|
||||
ByteBuffer[] yuvPlanes, long nativeFramePointer) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.yuvStrides = yuvStrides;
|
||||
this.yuvPlanes = yuvPlanes;
|
||||
this.yuvFrame = true;
|
||||
this.rotationDegree = rotationDegree;
|
||||
this.nativeFramePointer = nativeFramePointer;
|
||||
backingBuffer = null;
|
||||
if (rotationDegree % 90 != 0) {
|
||||
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
|
||||
}
|
||||
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
|
||||
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
|
||||
// bottom-left corner. This discrepancy is corrected by setting a vertical flip as sampling
|
||||
// matrix.
|
||||
samplingMatrix = RendererCommon.verticalFlipMatrix();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a texture frame of the given dimensions with data in SurfaceTexture
|
||||
*/
|
||||
public I420Frame(int width, int height, int rotationDegree, int textureId,
|
||||
float[] samplingMatrix, long nativeFramePointer) {
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.yuvStrides = null;
|
||||
this.yuvPlanes = null;
|
||||
this.samplingMatrix = samplingMatrix;
|
||||
this.textureId = textureId;
|
||||
this.yuvFrame = false;
|
||||
this.rotationDegree = rotationDegree;
|
||||
this.nativeFramePointer = nativeFramePointer;
|
||||
backingBuffer = null;
|
||||
if (rotationDegree % 90 != 0) {
|
||||
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a frame from VideoFrame.Buffer.
|
||||
*/
|
||||
public I420Frame(int rotationDegree, VideoFrame.Buffer buffer, long nativeFramePointer) {
|
||||
this.width = buffer.getWidth();
|
||||
this.height = buffer.getHeight();
|
||||
this.rotationDegree = rotationDegree;
|
||||
if (rotationDegree % 90 != 0) {
|
||||
throw new IllegalArgumentException("Rotation degree not multiple of 90: " + rotationDegree);
|
||||
}
|
||||
if (buffer instanceof VideoFrame.TextureBuffer
|
||||
&& ((VideoFrame.TextureBuffer) buffer).getType() == VideoFrame.TextureBuffer.Type.OES) {
|
||||
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) buffer;
|
||||
this.yuvFrame = false;
|
||||
this.textureId = textureBuffer.getTextureId();
|
||||
this.samplingMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(
|
||||
textureBuffer.getTransformMatrix());
|
||||
|
||||
this.yuvStrides = null;
|
||||
this.yuvPlanes = null;
|
||||
} else if (buffer instanceof VideoFrame.I420Buffer) {
|
||||
VideoFrame.I420Buffer i420Buffer = (VideoFrame.I420Buffer) buffer;
|
||||
this.yuvFrame = true;
|
||||
this.yuvStrides =
|
||||
new int[] {i420Buffer.getStrideY(), i420Buffer.getStrideU(), i420Buffer.getStrideV()};
|
||||
this.yuvPlanes =
|
||||
new ByteBuffer[] {i420Buffer.getDataY(), i420Buffer.getDataU(), i420Buffer.getDataV()};
|
||||
// The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
|
||||
// top-left corner of the image, but in glTexImage2D() the first element corresponds to the
|
||||
// bottom-left corner. This discrepancy is corrected by multiplying the sampling matrix with
|
||||
// a vertical flip matrix.
|
||||
this.samplingMatrix = RendererCommon.verticalFlipMatrix();
|
||||
|
||||
this.textureId = 0;
|
||||
} else {
|
||||
this.yuvFrame = false;
|
||||
this.textureId = 0;
|
||||
this.samplingMatrix = null;
|
||||
this.yuvStrides = null;
|
||||
this.yuvPlanes = null;
|
||||
}
|
||||
this.nativeFramePointer = nativeFramePointer;
|
||||
backingBuffer = buffer;
|
||||
}
|
||||
|
||||
public int rotatedWidth() {
|
||||
return (rotationDegree % 180 == 0) ? width : height;
|
||||
}
|
||||
|
||||
public int rotatedHeight() {
|
||||
return (rotationDegree % 180 == 0) ? height : width;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
final String type = yuvFrame
|
||||
? "Y: " + yuvStrides[0] + ", U: " + yuvStrides[1] + ", V: " + yuvStrides[2]
|
||||
: "Texture: " + textureId;
|
||||
return width + "x" + height + ", " + type;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the frame to VideoFrame. It is no longer safe to use the I420Frame after calling
|
||||
* this.
|
||||
*/
|
||||
VideoFrame toVideoFrame() {
|
||||
final VideoFrame.Buffer buffer;
|
||||
if (backingBuffer != null) {
|
||||
// We were construted from a VideoFrame.Buffer, just return it.
|
||||
// Make sure webrtc::VideoFrame object is released.
|
||||
backingBuffer.retain();
|
||||
VideoRenderer.renderFrameDone(this);
|
||||
buffer = backingBuffer;
|
||||
} else if (yuvFrame) {
|
||||
buffer = new I420BufferImpl(width, height, yuvPlanes[0], yuvStrides[0], yuvPlanes[1],
|
||||
yuvStrides[1], yuvPlanes[2], yuvStrides[2],
|
||||
() -> { VideoRenderer.renderFrameDone(this); });
|
||||
} else {
|
||||
// Note: surfaceTextureHelper being null means calling toI420 will crash.
|
||||
buffer = new TextureBufferImpl(width, height, VideoFrame.TextureBuffer.Type.OES, textureId,
|
||||
RendererCommon.convertMatrixToAndroidGraphicsMatrix(samplingMatrix),
|
||||
null /* surfaceTextureHelper */, () -> { VideoRenderer.renderFrameDone(this); });
|
||||
}
|
||||
return new VideoFrame(buffer, rotationDegree, 0 /* timestampNs */);
|
||||
}
|
||||
}
|
||||
|
||||
// Helper native function to do a video frame plane copying.
|
||||
public static native void nativeCopyPlane(
|
||||
ByteBuffer src, int width, int height, int srcStride, ByteBuffer dst, int dstStride);
|
||||
|
||||
/** The real meat of VideoSinkInterface. */
|
||||
public static interface Callbacks {
|
||||
// |frame| might have pending rotation and implementation of Callbacks
|
||||
// should handle that by applying rotation during rendering. The callee
|
||||
// is responsible for signaling when it is done with |frame| by calling
|
||||
// renderFrameDone(frame).
|
||||
public void renderFrame(I420Frame frame);
|
||||
}
|
||||
|
||||
/**
|
||||
* This must be called after every renderFrame() to release the frame.
|
||||
*/
|
||||
public static void renderFrameDone(I420Frame frame) {
|
||||
frame.yuvPlanes = null;
|
||||
frame.textureId = 0;
|
||||
if (frame.nativeFramePointer != 0) {
|
||||
releaseNativeFrame(frame.nativeFramePointer);
|
||||
frame.nativeFramePointer = 0;
|
||||
}
|
||||
}
|
||||
|
||||
long nativeVideoRenderer;
|
||||
|
||||
public VideoRenderer(Callbacks callbacks) {
|
||||
nativeVideoRenderer = nativeWrapVideoRenderer(callbacks);
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
if (nativeVideoRenderer == 0) {
|
||||
// Already disposed.
|
||||
return;
|
||||
}
|
||||
|
||||
freeWrappedVideoRenderer(nativeVideoRenderer);
|
||||
nativeVideoRenderer = 0;
|
||||
}
|
||||
|
||||
private static native long nativeWrapVideoRenderer(Callbacks callbacks);
|
||||
private static native void freeWrappedVideoRenderer(long nativeVideoRenderer);
|
||||
private static native void releaseNativeFrame(long nativeFramePointer);
|
||||
}
|
||||
23
sdk/android/api/org/webrtc/VideoSink.java
Normal file
23
sdk/android/api/org/webrtc/VideoSink.java
Normal file
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Java version of rtc::VideoSinkInterface.
|
||||
*/
|
||||
public interface VideoSink {
|
||||
/**
|
||||
* Implementations should call frame.retain() if they need to hold a reference to the frame after
|
||||
* this function returns. Each call to retain() should be followed by a call to frame.release()
|
||||
* when the reference is no longer needed.
|
||||
*/
|
||||
void onFrame(VideoFrame frame);
|
||||
}
|
||||
33
sdk/android/api/org/webrtc/VideoSource.java
Normal file
33
sdk/android/api/org/webrtc/VideoSource.java
Normal file
@ -0,0 +1,33 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* Java wrapper of native AndroidVideoTrackSource.
|
||||
*/
|
||||
public class VideoSource extends MediaSource {
|
||||
public VideoSource(long nativeSource) {
|
||||
super(nativeSource);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calling this function will cause frames to be scaled down to the requested resolution. Also,
|
||||
* frames will be cropped to match the requested aspect ratio, and frames will be dropped to match
|
||||
* the requested fps. The requested aspect ratio is orientation agnostic and will be adjusted to
|
||||
* maintain the input orientation, so it doesn't matter if e.g. 1280x720 or 720x1280 is requested.
|
||||
*/
|
||||
public void adaptOutputFormat(int width, int height, int fps) {
|
||||
nativeAdaptOutputFormat(nativeSource, width, height, fps);
|
||||
}
|
||||
|
||||
private static native void nativeAdaptOutputFormat(
|
||||
long nativeSource, int width, int height, int fps);
|
||||
}
|
||||
82
sdk/android/api/org/webrtc/VideoTrack.java
Normal file
82
sdk/android/api/org/webrtc/VideoTrack.java
Normal file
@ -0,0 +1,82 @@
|
||||
/*
|
||||
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.LinkedList;
|
||||
|
||||
/** Java version of VideoTrackInterface. */
|
||||
public class VideoTrack extends MediaStreamTrack {
|
||||
private final LinkedList<VideoRenderer> renderers = new LinkedList<VideoRenderer>();
|
||||
private final IdentityHashMap<VideoSink, Long> sinks = new IdentityHashMap<VideoSink, Long>();
|
||||
|
||||
public VideoTrack(long nativeTrack) {
|
||||
super(nativeTrack);
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds a VideoSink to the track.
|
||||
*
|
||||
* A track can have any number of VideoSinks. VideoSinks will replace
|
||||
* renderers. However, converting old style texture frames will involve costly
|
||||
* conversion to I420 so it is not recommended to upgrade before all your
|
||||
* sources produce VideoFrames.
|
||||
*/
|
||||
public void addSink(VideoSink sink) {
|
||||
final long nativeSink = nativeWrapSink(sink);
|
||||
sinks.put(sink, nativeSink);
|
||||
nativeAddSink(nativeTrack, nativeSink);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a VideoSink from the track.
|
||||
*
|
||||
* If the VideoSink was not attached to the track, this is a no-op.
|
||||
*/
|
||||
public void removeSink(VideoSink sink) {
|
||||
final long nativeSink = sinks.remove(sink);
|
||||
if (nativeSink != 0) {
|
||||
nativeRemoveSink(nativeTrack, nativeSink);
|
||||
nativeFreeSink(nativeSink);
|
||||
}
|
||||
}
|
||||
|
||||
public void addRenderer(VideoRenderer renderer) {
|
||||
renderers.add(renderer);
|
||||
nativeAddSink(nativeTrack, renderer.nativeVideoRenderer);
|
||||
}
|
||||
|
||||
public void removeRenderer(VideoRenderer renderer) {
|
||||
if (!renderers.remove(renderer)) {
|
||||
return;
|
||||
}
|
||||
nativeRemoveSink(nativeTrack, renderer.nativeVideoRenderer);
|
||||
renderer.dispose();
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
while (!renderers.isEmpty()) {
|
||||
removeRenderer(renderers.getFirst());
|
||||
}
|
||||
for (long nativeSink : sinks.values()) {
|
||||
nativeRemoveSink(nativeTrack, nativeSink);
|
||||
nativeFreeSink(nativeSink);
|
||||
}
|
||||
sinks.clear();
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
private static native void nativeAddSink(long nativeTrack, long nativeSink);
|
||||
private static native void nativeRemoveSink(long nativeTrack, long nativeSink);
|
||||
|
||||
private static native long nativeWrapSink(VideoSink sink);
|
||||
private static native void nativeFreeSink(long nativeSink);
|
||||
}
|
||||
41
sdk/android/instrumentationtests/AndroidManifest.xml
Normal file
41
sdk/android/instrumentationtests/AndroidManifest.xml
Normal file
@ -0,0 +1,41 @@
|
||||
<!--
|
||||
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
-->
|
||||
|
||||
<manifest
|
||||
xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
package="org.webrtc">
|
||||
<uses-feature android:name="android.hardware.camera" />
|
||||
<uses-feature android:name="android.hardware.camera.autofocus" />
|
||||
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
|
||||
|
||||
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21" />
|
||||
|
||||
<uses-permission android:name="android.permission.CAMERA" />
|
||||
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
|
||||
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
||||
<uses-permission android:name="android.permission.INTERNET" />
|
||||
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
|
||||
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
|
||||
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
|
||||
<uses-permission android:name="android.permission.RUN_INSTRUMENTATION" />
|
||||
|
||||
<application>
|
||||
<uses-library android:name="android.test.runner" />
|
||||
</application>
|
||||
|
||||
<!-- tools:ignore needed for chromium-junit4 tag. crbug.com/640116
|
||||
TODO(sakal): Remove once the tag is no longer needed. -->
|
||||
<instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
|
||||
tools:ignore="MissingPrefix"
|
||||
android:targetPackage="org.webrtc"
|
||||
android:label="Tests for WebRTC Android SDK"
|
||||
chromium-junit4="true"/>
|
||||
</manifest>
|
||||
18
sdk/android/instrumentationtests/ant.properties
Normal file
18
sdk/android/instrumentationtests/ant.properties
Normal file
@ -0,0 +1,18 @@
|
||||
# This file is used to override default values used by the Ant build system.
|
||||
#
|
||||
# This file must be checked into Version Control Systems, as it is
|
||||
# integral to the build system of your project.
|
||||
|
||||
# This file is only used by the Ant script.
|
||||
|
||||
# You can use this to override default values such as
|
||||
# 'source.dir' for the location of your java source folder and
|
||||
# 'out.dir' for the location of your output folder.
|
||||
|
||||
# You can also use it define how the release builds are signed by declaring
|
||||
# the following properties:
|
||||
# 'key.store' for the location of your keystore and
|
||||
# 'key.alias' for the name of the key to use.
|
||||
# The password will be asked during the build when you use the 'release' target.
|
||||
|
||||
source.dir=../java/testcommon/src;src
|
||||
92
sdk/android/instrumentationtests/build.xml
Normal file
92
sdk/android/instrumentationtests/build.xml
Normal file
@ -0,0 +1,92 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project name="libjingle_peerconnection_android_unittest" default="help">
|
||||
|
||||
<!-- The local.properties file is created and updated by the 'android' tool.
|
||||
It contains the path to the SDK. It should *NOT* be checked into
|
||||
Version Control Systems. -->
|
||||
<property file="local.properties" />
|
||||
|
||||
<!-- The ant.properties file can be created by you. It is only edited by the
|
||||
'android' tool to add properties to it.
|
||||
This is the place to change some Ant specific build properties.
|
||||
Here are some properties you may want to change/update:
|
||||
|
||||
source.dir
|
||||
The name of the source directory. Default is 'src'.
|
||||
out.dir
|
||||
The name of the output directory. Default is 'bin'.
|
||||
|
||||
For other overridable properties, look at the beginning of the rules
|
||||
files in the SDK, at tools/ant/build.xml
|
||||
|
||||
Properties related to the SDK location or the project target should
|
||||
be updated using the 'android' tool with the 'update' action.
|
||||
|
||||
This file is an integral part of the build system for your
|
||||
application and should be checked into Version Control Systems.
|
||||
|
||||
-->
|
||||
<property file="ant.properties" />
|
||||
|
||||
<!-- if sdk.dir was not set from one of the property file, then
|
||||
get it from the ANDROID_HOME env var.
|
||||
This must be done before we load project.properties since
|
||||
the proguard config can use sdk.dir -->
|
||||
<property environment="env" />
|
||||
<condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
|
||||
<isset property="env.ANDROID_SDK_ROOT" />
|
||||
</condition>
|
||||
|
||||
<!-- The project.properties file is created and updated by the 'android'
|
||||
tool, as well as ADT.
|
||||
|
||||
This contains project specific properties such as project target, and library
|
||||
dependencies. Lower level build properties are stored in ant.properties
|
||||
(or in .classpath for Eclipse projects).
|
||||
|
||||
This file is an integral part of the build system for your
|
||||
application and should be checked into Version Control Systems. -->
|
||||
<loadproperties srcFile="project.properties" />
|
||||
|
||||
<!-- quick check on sdk.dir -->
|
||||
<fail
|
||||
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
|
||||
unless="sdk.dir"
|
||||
/>
|
||||
|
||||
<!--
|
||||
Import per project custom build rules if present at the root of the project.
|
||||
This is the place to put custom intermediary targets such as:
|
||||
-pre-build
|
||||
-pre-compile
|
||||
-post-compile (This is typically used for code obfuscation.
|
||||
Compiled code location: ${out.classes.absolute.dir}
|
||||
If this is not done in place, override ${out.dex.input.absolute.dir})
|
||||
-post-package
|
||||
-post-build
|
||||
-pre-clean
|
||||
-->
|
||||
<import file="custom_rules.xml" optional="true" />
|
||||
|
||||
<!-- Import the actual build file.
|
||||
|
||||
To customize existing targets, there are two options:
|
||||
- Customize only one target:
|
||||
- copy/paste the target into this file, *before* the
|
||||
<import> task.
|
||||
- customize it to your needs.
|
||||
- Customize the whole content of build.xml
|
||||
- copy/paste the content of the rules files (minus the top node)
|
||||
into this file, replacing the <import> task.
|
||||
- customize to your needs.
|
||||
|
||||
***********************
|
||||
****** IMPORTANT ******
|
||||
***********************
|
||||
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
|
||||
in order to avoid having your file be overridden by tools such as "android update project"
|
||||
-->
|
||||
<!-- version-tag: 1 -->
|
||||
<import file="${sdk.dir}/tools/ant/build.xml" />
|
||||
|
||||
</project>
|
||||
16
sdk/android/instrumentationtests/project.properties
Normal file
16
sdk/android/instrumentationtests/project.properties
Normal file
@ -0,0 +1,16 @@
|
||||
# This file is automatically generated by Android Tools.
|
||||
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
|
||||
#
|
||||
# This file must be checked in Version Control Systems.
|
||||
#
|
||||
# To customize properties used by the Ant build system edit
|
||||
# "ant.properties", and override values to adapt the script to your
|
||||
# project structure.
|
||||
#
|
||||
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
|
||||
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
|
||||
|
||||
# Project target.
|
||||
target=android-22
|
||||
|
||||
java.compilerargs=-Xlint:all -Werror
|
||||
@ -0,0 +1,204 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.support.test.InstrumentationRegistry;
|
||||
import android.support.test.filters.LargeTest;
|
||||
import android.support.test.filters.MediumTest;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import java.io.IOException;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class Camera1CapturerUsingByteBufferTest {
|
||||
static final String TAG = "Camera1CapturerUsingByteBufferTest";
|
||||
|
||||
private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
|
||||
@Override
|
||||
public boolean isCapturingToTexture() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CameraEnumerator getCameraEnumerator() {
|
||||
return new Camera1Enumerator(false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Context getAppContext() {
|
||||
return InstrumentationRegistry.getTargetContext();
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Object rawOpenCamera(String cameraName) {
|
||||
return android.hardware.Camera.open(Camera1Enumerator.getCameraIndex(cameraName));
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public void rawCloseCamera(Object camera) {
|
||||
((android.hardware.Camera) camera).release();
|
||||
}
|
||||
}
|
||||
|
||||
private CameraVideoCapturerTestFixtures fixtures;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
// Enable VideoFrame capture.
|
||||
PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
|
||||
+ PeerConnectionFactory.TRIAL_ENABLED + "/");
|
||||
fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
fixtures.dispose();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testCreateAndDispose() throws InterruptedException {
|
||||
fixtures.createCapturerAndDispose();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testCreateNonExistingCamera() throws InterruptedException {
|
||||
fixtures.createNonExistingCamera();
|
||||
}
|
||||
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using a "default" capturer.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testCreateCapturerAndRender() throws InterruptedException {
|
||||
fixtures.createCapturerAndRender();
|
||||
}
|
||||
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using the front facing video capturer.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStartFrontFacingVideoCapturer() throws InterruptedException {
|
||||
fixtures.createFrontFacingCapturerAndRender();
|
||||
}
|
||||
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using the back facing video capturer.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStartBackFacingVideoCapturer() throws InterruptedException {
|
||||
fixtures.createBackFacingCapturerAndRender();
|
||||
}
|
||||
|
||||
// This test that the default camera can be started and that the camera can
|
||||
// later be switched to another camera.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testSwitchVideoCapturer() throws InterruptedException {
|
||||
fixtures.switchCamera();
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testCameraEvents() throws InterruptedException {
|
||||
fixtures.cameraEventsInvoked();
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
|
||||
fixtures.updateMediaRecorder(false /* useSurfaceCapture */);
|
||||
}
|
||||
|
||||
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testCameraCallsAfterStop() throws InterruptedException {
|
||||
fixtures.cameraCallsAfterStop();
|
||||
}
|
||||
|
||||
// This test that the VideoSource that the CameraVideoCapturer is connected to can
|
||||
// be stopped and restarted. It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStopRestartVideoSource() throws InterruptedException {
|
||||
fixtures.stopRestartVideoSource();
|
||||
}
|
||||
|
||||
// This test that the camera can be started at different resolutions.
|
||||
// It does not test or use the C++ layer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStartStopWithDifferentResolutions() throws InterruptedException {
|
||||
fixtures.startStopWithDifferentResolutions();
|
||||
}
|
||||
|
||||
// This test what happens if buffers are returned after the capturer have
|
||||
// been stopped and restarted. It does not test or use the C++ layer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testReturnBufferLate() throws InterruptedException {
|
||||
fixtures.returnBufferLate();
|
||||
}
|
||||
|
||||
// This test that we can capture frames, keep the frames in a local renderer, stop capturing,
|
||||
// and then return the frames. The difference between the test testReturnBufferLate() is that we
|
||||
// also test the JNI and C++ AndroidVideoCapturer parts.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testReturnBufferLateEndToEnd() throws InterruptedException {
|
||||
fixtures.returnBufferLateEndToEnd();
|
||||
}
|
||||
|
||||
// This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
|
||||
// called. This test both Java and C++ parts of of the stack.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testScaleCameraOutput() throws InterruptedException {
|
||||
fixtures.scaleCameraOutput();
|
||||
}
|
||||
|
||||
// This test that an error is reported if the camera is already opened
|
||||
// when CameraVideoCapturer is started.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
|
||||
fixtures.startWhileCameraIsAlreadyOpen();
|
||||
}
|
||||
|
||||
// This test that CameraVideoCapturer can be started, even if the camera is already opened
|
||||
// if the camera is closed while CameraVideoCapturer is re-trying to start.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
|
||||
fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
|
||||
}
|
||||
|
||||
// This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
|
||||
// re-trying to start.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
|
||||
fixtures.startWhileCameraIsAlreadyOpenAndStop();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,207 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.support.test.InstrumentationRegistry;
|
||||
import android.support.test.filters.LargeTest;
|
||||
import android.support.test.filters.MediumTest;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import java.io.IOException;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class Camera1CapturerUsingTextureTest {
|
||||
static final String TAG = "Camera1CapturerUsingTextureTest";
|
||||
|
||||
private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
|
||||
@Override
|
||||
public CameraEnumerator getCameraEnumerator() {
|
||||
return new Camera1Enumerator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Context getAppContext() {
|
||||
return InstrumentationRegistry.getTargetContext();
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Object rawOpenCamera(String cameraName) {
|
||||
return android.hardware.Camera.open(Camera1Enumerator.getCameraIndex(cameraName));
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public void rawCloseCamera(Object camera) {
|
||||
((android.hardware.Camera) camera).release();
|
||||
}
|
||||
}
|
||||
|
||||
private CameraVideoCapturerTestFixtures fixtures;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
// Enable VideoFrame capture.
|
||||
PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
|
||||
+ PeerConnectionFactory.TRIAL_ENABLED + "/");
|
||||
fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
fixtures.dispose();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testCreateAndDispose() throws InterruptedException {
|
||||
fixtures.createCapturerAndDispose();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testCreateNonExistingCamera() throws InterruptedException {
|
||||
fixtures.createNonExistingCamera();
|
||||
}
|
||||
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using a "default" capturer.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testCreateCapturerAndRender() throws InterruptedException {
|
||||
fixtures.createCapturerAndRender();
|
||||
}
|
||||
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using the front facing video capturer.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStartFrontFacingVideoCapturer() throws InterruptedException {
|
||||
fixtures.createFrontFacingCapturerAndRender();
|
||||
}
|
||||
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using the back facing video capturer.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStartBackFacingVideoCapturer() throws InterruptedException {
|
||||
fixtures.createBackFacingCapturerAndRender();
|
||||
}
|
||||
|
||||
// This test that the default camera can be started and that the camera can
|
||||
// later be switched to another camera.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testSwitchVideoCapturer() throws InterruptedException {
|
||||
fixtures.switchCamera();
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testCameraEvents() throws InterruptedException {
|
||||
fixtures.cameraEventsInvoked();
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
|
||||
fixtures.updateMediaRecorder(false /* useSurfaceCapture */);
|
||||
}
|
||||
|
||||
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testCameraCallsAfterStop() throws InterruptedException {
|
||||
fixtures.cameraCallsAfterStop();
|
||||
}
|
||||
|
||||
// This test that the VideoSource that the CameraVideoCapturer is connected to can
|
||||
// be stopped and restarted. It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStopRestartVideoSource() throws InterruptedException {
|
||||
fixtures.stopRestartVideoSource();
|
||||
}
|
||||
|
||||
// This test that the camera can be started at different resolutions.
|
||||
// It does not test or use the C++ layer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStartStopWithDifferentResolutions() throws InterruptedException {
|
||||
fixtures.startStopWithDifferentResolutions();
|
||||
}
|
||||
|
||||
// This test what happens if buffers are returned after the capturer have
|
||||
// been stopped and restarted. It does not test or use the C++ layer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testReturnBufferLate() throws InterruptedException {
|
||||
fixtures.returnBufferLate();
|
||||
}
|
||||
|
||||
// This test that we can capture frames, keep the frames in a local renderer, stop capturing,
|
||||
// and then return the frames. The difference between the test testReturnBufferLate() is that we
|
||||
// also test the JNI and C++ AndroidVideoCapturer parts.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testReturnBufferLateEndToEnd() throws InterruptedException {
|
||||
fixtures.returnBufferLateEndToEnd();
|
||||
}
|
||||
|
||||
// This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
|
||||
// the capturer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
|
||||
fixtures.cameraFreezedEventOnBufferStarvation();
|
||||
}
|
||||
|
||||
// This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
|
||||
// called. This test both Java and C++ parts of of the stack.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testScaleCameraOutput() throws InterruptedException {
|
||||
fixtures.scaleCameraOutput();
|
||||
}
|
||||
|
||||
// This test that an error is reported if the camera is already opened
|
||||
// when CameraVideoCapturer is started.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
|
||||
fixtures.startWhileCameraIsAlreadyOpen();
|
||||
}
|
||||
|
||||
// This test that CameraVideoCapturer can be started, even if the camera is already opened
|
||||
// if the camera is closed while CameraVideoCapturer is re-trying to start.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
|
||||
fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
|
||||
}
|
||||
|
||||
// This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
|
||||
// re-trying to start.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
|
||||
fixtures.startWhileCameraIsAlreadyOpenAndStop();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,336 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraDevice;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.support.test.InstrumentationRegistry;
|
||||
import android.support.test.filters.LargeTest;
|
||||
import android.support.test.filters.MediumTest;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@TargetApi(21)
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class Camera2CapturerTest {
|
||||
static final String TAG = "Camera2CapturerTest";
|
||||
|
||||
/**
|
||||
* Simple camera2 implementation that only knows how to open the camera and close it.
|
||||
*/
|
||||
private class SimpleCamera2 {
|
||||
final CameraManager cameraManager;
|
||||
final LooperThread looperThread;
|
||||
final CountDownLatch openDoneSignal;
|
||||
final Object cameraDeviceLock;
|
||||
CameraDevice cameraDevice; // Guarded by cameraDeviceLock
|
||||
boolean openSucceeded; // Guarded by cameraDeviceLock
|
||||
|
||||
private class LooperThread extends Thread {
|
||||
final CountDownLatch startedSignal = new CountDownLatch(1);
|
||||
private Handler handler;
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
Looper.prepare();
|
||||
handler = new Handler();
|
||||
startedSignal.countDown();
|
||||
Looper.loop();
|
||||
}
|
||||
|
||||
public void waitToStart() {
|
||||
ThreadUtils.awaitUninterruptibly(startedSignal);
|
||||
}
|
||||
|
||||
public void requestStop() {
|
||||
handler.getLooper().quit();
|
||||
}
|
||||
|
||||
public Handler getHandler() {
|
||||
return handler;
|
||||
}
|
||||
}
|
||||
|
||||
private class CameraStateCallback extends CameraDevice.StateCallback {
|
||||
@Override
|
||||
public void onClosed(CameraDevice cameraDevice) {
|
||||
Logging.d(TAG, "Simple camera2 closed.");
|
||||
|
||||
synchronized (cameraDeviceLock) {
|
||||
SimpleCamera2.this.cameraDevice = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisconnected(CameraDevice cameraDevice) {
|
||||
Logging.d(TAG, "Simple camera2 disconnected.");
|
||||
|
||||
synchronized (cameraDeviceLock) {
|
||||
SimpleCamera2.this.cameraDevice = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(CameraDevice cameraDevice, int errorCode) {
|
||||
Logging.w(TAG, "Simple camera2 error: " + errorCode);
|
||||
|
||||
synchronized (cameraDeviceLock) {
|
||||
SimpleCamera2.this.cameraDevice = cameraDevice;
|
||||
openSucceeded = false;
|
||||
}
|
||||
|
||||
openDoneSignal.countDown();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOpened(CameraDevice cameraDevice) {
|
||||
Logging.d(TAG, "Simple camera2 opened.");
|
||||
|
||||
synchronized (cameraDeviceLock) {
|
||||
SimpleCamera2.this.cameraDevice = cameraDevice;
|
||||
openSucceeded = true;
|
||||
}
|
||||
|
||||
openDoneSignal.countDown();
|
||||
}
|
||||
}
|
||||
|
||||
SimpleCamera2(Context context, String deviceName) {
|
||||
cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
|
||||
looperThread = new LooperThread();
|
||||
looperThread.start();
|
||||
looperThread.waitToStart();
|
||||
cameraDeviceLock = new Object();
|
||||
openDoneSignal = new CountDownLatch(1);
|
||||
cameraDevice = null;
|
||||
Logging.d(TAG, "Opening simple camera2.");
|
||||
try {
|
||||
cameraManager.openCamera(deviceName, new CameraStateCallback(), looperThread.getHandler());
|
||||
} catch (CameraAccessException e) {
|
||||
fail("Simple camera2 CameraAccessException: " + e.getMessage());
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Waiting for simple camera2 to open.");
|
||||
ThreadUtils.awaitUninterruptibly(openDoneSignal);
|
||||
synchronized (cameraDeviceLock) {
|
||||
if (!openSucceeded) {
|
||||
fail("Opening simple camera2 failed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void close() {
|
||||
Logging.d(TAG, "Closing simple camera2.");
|
||||
synchronized (cameraDeviceLock) {
|
||||
if (cameraDevice != null) {
|
||||
cameraDevice.close();
|
||||
}
|
||||
}
|
||||
|
||||
looperThread.requestStop();
|
||||
ThreadUtils.joinUninterruptibly(looperThread);
|
||||
}
|
||||
}
|
||||
|
||||
private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
|
||||
@Override
|
||||
public CameraEnumerator getCameraEnumerator() {
|
||||
return new Camera2Enumerator(getAppContext());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Context getAppContext() {
|
||||
return InstrumentationRegistry.getTargetContext();
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public Object rawOpenCamera(String cameraName) {
|
||||
return new SimpleCamera2(getAppContext(), cameraName);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Override
|
||||
public void rawCloseCamera(Object camera) {
|
||||
((SimpleCamera2) camera).close();
|
||||
}
|
||||
}
|
||||
|
||||
private CameraVideoCapturerTestFixtures fixtures;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
// Enable VideoFrame capture.
|
||||
PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
|
||||
+ PeerConnectionFactory.TRIAL_ENABLED + "/");
|
||||
fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
fixtures.dispose();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testCreateAndDispose() throws InterruptedException {
|
||||
fixtures.createCapturerAndDispose();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testCreateNonExistingCamera() throws InterruptedException {
|
||||
fixtures.createNonExistingCamera();
|
||||
}
|
||||
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using a "default" capturer.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testCreateCapturerAndRender() throws InterruptedException {
|
||||
fixtures.createCapturerAndRender();
|
||||
}
|
||||
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using the front facing video capturer.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStartFrontFacingVideoCapturer() throws InterruptedException {
|
||||
fixtures.createFrontFacingCapturerAndRender();
|
||||
}
|
||||
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using the back facing video capturer.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStartBackFacingVideoCapturer() throws InterruptedException {
|
||||
fixtures.createBackFacingCapturerAndRender();
|
||||
}
|
||||
|
||||
// This test that the default camera can be started and that the camera can
|
||||
// later be switched to another camera.
|
||||
// It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testSwitchVideoCapturer() throws InterruptedException {
|
||||
fixtures.switchCamera();
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testCameraEvents() throws InterruptedException {
|
||||
fixtures.cameraEventsInvoked();
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
|
||||
fixtures.updateMediaRecorder(true /* useSurfaceCapture */);
|
||||
}
|
||||
|
||||
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testCameraCallsAfterStop() throws InterruptedException {
|
||||
fixtures.cameraCallsAfterStop();
|
||||
}
|
||||
|
||||
// This test that the VideoSource that the CameraVideoCapturer is connected to can
|
||||
// be stopped and restarted. It tests both the Java and the C++ layer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStopRestartVideoSource() throws InterruptedException {
|
||||
fixtures.stopRestartVideoSource();
|
||||
}
|
||||
|
||||
// This test that the camera can be started at different resolutions.
|
||||
// It does not test or use the C++ layer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStartStopWithDifferentResolutions() throws InterruptedException {
|
||||
fixtures.startStopWithDifferentResolutions();
|
||||
}
|
||||
|
||||
// This test what happens if buffers are returned after the capturer have
|
||||
// been stopped and restarted. It does not test or use the C++ layer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testReturnBufferLate() throws InterruptedException {
|
||||
fixtures.returnBufferLate();
|
||||
}
|
||||
|
||||
// This test that we can capture frames, keep the frames in a local renderer, stop capturing,
|
||||
// and then return the frames. The difference between the test testReturnBufferLate() is that we
|
||||
// also test the JNI and C++ AndroidVideoCapturer parts.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testReturnBufferLateEndToEnd() throws InterruptedException {
|
||||
fixtures.returnBufferLateEndToEnd();
|
||||
}
|
||||
|
||||
// This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
|
||||
// the capturer.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
|
||||
fixtures.cameraFreezedEventOnBufferStarvation();
|
||||
}
|
||||
|
||||
// This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
|
||||
// called. This test both Java and C++ parts of of the stack.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testScaleCameraOutput() throws InterruptedException {
|
||||
fixtures.scaleCameraOutput();
|
||||
}
|
||||
|
||||
// This test that an error is reported if the camera is already opened
|
||||
// when CameraVideoCapturer is started.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
|
||||
fixtures.startWhileCameraIsAlreadyOpen();
|
||||
}
|
||||
|
||||
// This test that CameraVideoCapturer can be started, even if the camera is already opened
|
||||
// if the camera is closed while CameraVideoCapturer is re-trying to start.
|
||||
@Test
|
||||
@LargeTest
|
||||
public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
|
||||
fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
|
||||
}
|
||||
|
||||
// This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
|
||||
// re-trying to start.
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
|
||||
fixtures.startWhileCameraIsAlreadyOpenAndStop();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,844 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.media.CamcorderProfile;
|
||||
import android.media.MediaRecorder;
|
||||
import android.os.Environment;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
import org.webrtc.VideoRenderer.I420Frame;
|
||||
|
||||
class CameraVideoCapturerTestFixtures {
|
||||
static final String TAG = "CameraVideoCapturerTestFixtures";
|
||||
// Default values used for starting capturing
|
||||
static final int DEFAULT_WIDTH = 640;
|
||||
static final int DEFAULT_HEIGHT = 480;
|
||||
static final int DEFAULT_FPS = 15;
|
||||
|
||||
static private class RendererCallbacks implements VideoRenderer.Callbacks {
|
||||
private int framesRendered = 0;
|
||||
private Object frameLock = 0;
|
||||
private int width = 0;
|
||||
private int height = 0;
|
||||
|
||||
@Override
|
||||
public void renderFrame(I420Frame frame) {
|
||||
synchronized (frameLock) {
|
||||
++framesRendered;
|
||||
width = frame.rotatedWidth();
|
||||
height = frame.rotatedHeight();
|
||||
frameLock.notify();
|
||||
}
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
}
|
||||
|
||||
public int frameWidth() {
|
||||
synchronized (frameLock) {
|
||||
return width;
|
||||
}
|
||||
}
|
||||
|
||||
public int frameHeight() {
|
||||
synchronized (frameLock) {
|
||||
return height;
|
||||
}
|
||||
}
|
||||
|
||||
public int waitForNextFrameToRender() throws InterruptedException {
|
||||
Logging.d(TAG, "Waiting for the next frame to render");
|
||||
synchronized (frameLock) {
|
||||
frameLock.wait();
|
||||
return framesRendered;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static private class FakeAsyncRenderer implements VideoRenderer.Callbacks {
|
||||
private final List<I420Frame> pendingFrames = new ArrayList<I420Frame>();
|
||||
|
||||
@Override
|
||||
public void renderFrame(I420Frame frame) {
|
||||
synchronized (pendingFrames) {
|
||||
pendingFrames.add(frame);
|
||||
pendingFrames.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
// Wait until at least one frame have been received, before returning them.
|
||||
public List<I420Frame> waitForPendingFrames() throws InterruptedException {
|
||||
Logging.d(TAG, "Waiting for pending frames");
|
||||
synchronized (pendingFrames) {
|
||||
while (pendingFrames.isEmpty()) {
|
||||
pendingFrames.wait();
|
||||
}
|
||||
return new ArrayList<I420Frame>(pendingFrames);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static private class FakeCapturerObserver implements CameraVideoCapturer.CapturerObserver {
|
||||
private int framesCaptured = 0;
|
||||
private VideoFrame videoFrame;
|
||||
final private Object frameLock = new Object();
|
||||
final private Object capturerStartLock = new Object();
|
||||
private boolean capturerStartResult = false;
|
||||
final private List<Long> timestamps = new ArrayList<Long>();
|
||||
|
||||
@Override
|
||||
public void onCapturerStarted(boolean success) {
|
||||
Logging.d(TAG, "onCapturerStarted: " + success);
|
||||
|
||||
synchronized (capturerStartLock) {
|
||||
capturerStartResult = success;
|
||||
capturerStartLock.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCapturerStopped() {
|
||||
Logging.d(TAG, "onCapturerStopped");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(
|
||||
byte[] frame, int width, int height, int rotation, long timeStamp) {
|
||||
throw new RuntimeException("onByteBufferFrameCaptured called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
|
||||
float[] transformMatrix, int rotation, long timeStamp) {
|
||||
throw new RuntimeException("onTextureFrameCaptured called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameCaptured(VideoFrame frame) {
|
||||
synchronized (frameLock) {
|
||||
++framesCaptured;
|
||||
if (videoFrame != null) {
|
||||
videoFrame.release();
|
||||
}
|
||||
videoFrame = frame;
|
||||
videoFrame.retain();
|
||||
timestamps.add(videoFrame.getTimestampNs());
|
||||
frameLock.notify();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean waitForCapturerToStart() throws InterruptedException {
|
||||
Logging.d(TAG, "Waiting for the capturer to start");
|
||||
synchronized (capturerStartLock) {
|
||||
capturerStartLock.wait();
|
||||
return capturerStartResult;
|
||||
}
|
||||
}
|
||||
|
||||
public int waitForNextCapturedFrame() throws InterruptedException {
|
||||
Logging.d(TAG, "Waiting for the next captured frame");
|
||||
synchronized (frameLock) {
|
||||
frameLock.wait();
|
||||
return framesCaptured;
|
||||
}
|
||||
}
|
||||
|
||||
int frameWidth() {
|
||||
synchronized (frameLock) {
|
||||
return videoFrame.getBuffer().getWidth();
|
||||
}
|
||||
}
|
||||
|
||||
int frameHeight() {
|
||||
synchronized (frameLock) {
|
||||
return videoFrame.getBuffer().getHeight();
|
||||
}
|
||||
}
|
||||
|
||||
void releaseFrame() {
|
||||
synchronized (frameLock) {
|
||||
if (videoFrame != null) {
|
||||
videoFrame.release();
|
||||
videoFrame = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
List<Long> getCopyAndResetListOftimeStamps() {
|
||||
synchronized (frameLock) {
|
||||
ArrayList<Long> list = new ArrayList<Long>(timestamps);
|
||||
timestamps.clear();
|
||||
return list;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static class CameraEvents implements CameraVideoCapturer.CameraEventsHandler {
|
||||
public boolean onCameraOpeningCalled;
|
||||
public boolean onFirstFrameAvailableCalled;
|
||||
public final Object onCameraFreezedLock = new Object();
|
||||
private String onCameraFreezedDescription;
|
||||
public final Object cameraClosedLock = new Object();
|
||||
private boolean cameraClosed = true;
|
||||
|
||||
@Override
|
||||
public void onCameraError(String errorDescription) {
|
||||
Logging.w(TAG, "Camera error: " + errorDescription);
|
||||
cameraClosed = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraDisconnected() {}
|
||||
|
||||
@Override
|
||||
public void onCameraFreezed(String errorDescription) {
|
||||
synchronized (onCameraFreezedLock) {
|
||||
onCameraFreezedDescription = errorDescription;
|
||||
onCameraFreezedLock.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraOpening(String cameraName) {
|
||||
onCameraOpeningCalled = true;
|
||||
synchronized (cameraClosedLock) {
|
||||
cameraClosed = false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFirstFrameAvailable() {
|
||||
onFirstFrameAvailableCalled = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraClosed() {
|
||||
synchronized (cameraClosedLock) {
|
||||
cameraClosed = true;
|
||||
cameraClosedLock.notifyAll();
|
||||
}
|
||||
}
|
||||
|
||||
public String waitForCameraFreezed() throws InterruptedException {
|
||||
Logging.d(TAG, "Waiting for the camera to freeze");
|
||||
synchronized (onCameraFreezedLock) {
|
||||
onCameraFreezedLock.wait();
|
||||
return onCameraFreezedDescription;
|
||||
}
|
||||
}
|
||||
|
||||
public void waitForCameraClosed() throws InterruptedException {
|
||||
synchronized (cameraClosedLock) {
|
||||
while (!cameraClosed) {
|
||||
Logging.d(TAG, "Waiting for the camera to close.");
|
||||
cameraClosedLock.wait();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Class to collect all classes related to single capturer instance.
|
||||
*/
|
||||
static private class CapturerInstance {
|
||||
public CameraVideoCapturer capturer;
|
||||
public CameraEvents cameraEvents;
|
||||
public SurfaceTextureHelper surfaceTextureHelper;
|
||||
public FakeCapturerObserver observer;
|
||||
public List<CaptureFormat> supportedFormats;
|
||||
public CaptureFormat format;
|
||||
}
|
||||
|
||||
/**
|
||||
* Class used for collecting a VideoSource, a VideoTrack and a renderer. The class
|
||||
* is used for testing local rendering from a capturer.
|
||||
*/
|
||||
static private class VideoTrackWithRenderer {
|
||||
public VideoSource source;
|
||||
public VideoTrack track;
|
||||
public RendererCallbacks rendererCallbacks;
|
||||
public FakeAsyncRenderer fakeAsyncRenderer;
|
||||
}
|
||||
|
||||
public abstract static class TestObjectFactory {
|
||||
final CameraEnumerator cameraEnumerator;
|
||||
|
||||
TestObjectFactory() {
|
||||
cameraEnumerator = getCameraEnumerator();
|
||||
}
|
||||
|
||||
public CameraVideoCapturer createCapturer(
|
||||
String name, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
|
||||
return cameraEnumerator.createCapturer(name, eventsHandler);
|
||||
}
|
||||
|
||||
public String getNameOfFrontFacingDevice() {
|
||||
for (String deviceName : cameraEnumerator.getDeviceNames()) {
|
||||
if (cameraEnumerator.isFrontFacing(deviceName)) {
|
||||
return deviceName;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public String getNameOfBackFacingDevice() {
|
||||
for (String deviceName : cameraEnumerator.getDeviceNames()) {
|
||||
if (cameraEnumerator.isBackFacing(deviceName)) {
|
||||
return deviceName;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public boolean haveTwoCameras() {
|
||||
return cameraEnumerator.getDeviceNames().length >= 2;
|
||||
}
|
||||
|
||||
public boolean isCapturingToTexture() {
|
||||
// In the future, we plan to only support capturing to texture, so default to true
|
||||
return true;
|
||||
}
|
||||
|
||||
abstract public CameraEnumerator getCameraEnumerator();
|
||||
abstract public Context getAppContext();
|
||||
|
||||
// CameraVideoCapturer API is too slow for some of our tests where we need to open a competing
|
||||
// camera. These methods are used instead.
|
||||
abstract public Object rawOpenCamera(String cameraName);
|
||||
abstract public void rawCloseCamera(Object camera);
|
||||
}
|
||||
|
||||
private PeerConnectionFactory peerConnectionFactory;
|
||||
private TestObjectFactory testObjectFactory;
|
||||
|
||||
CameraVideoCapturerTestFixtures(TestObjectFactory testObjectFactory) {
|
||||
PeerConnectionFactory.initializeAndroidGlobals(testObjectFactory.getAppContext(), true);
|
||||
|
||||
this.peerConnectionFactory = new PeerConnectionFactory(null /* options */);
|
||||
this.testObjectFactory = testObjectFactory;
|
||||
}
|
||||
|
||||
public void dispose() {
|
||||
this.peerConnectionFactory.dispose();
|
||||
}
|
||||
|
||||
// Internal helper methods
|
||||
private CapturerInstance createCapturer(String name, boolean initialize) {
|
||||
CapturerInstance instance = new CapturerInstance();
|
||||
instance.cameraEvents = new CameraEvents();
|
||||
instance.capturer = testObjectFactory.createCapturer(name, instance.cameraEvents);
|
||||
instance.surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
|
||||
instance.observer = new FakeCapturerObserver();
|
||||
if (initialize) {
|
||||
instance.capturer.initialize(
|
||||
instance.surfaceTextureHelper, testObjectFactory.getAppContext(), instance.observer);
|
||||
}
|
||||
instance.supportedFormats = testObjectFactory.cameraEnumerator.getSupportedFormats(name);
|
||||
return instance;
|
||||
}
|
||||
|
||||
private CapturerInstance createCapturer(boolean initialize) {
|
||||
String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0];
|
||||
return createCapturer(name, initialize);
|
||||
}
|
||||
|
||||
private void startCapture(CapturerInstance instance) {
|
||||
startCapture(instance, 0);
|
||||
}
|
||||
|
||||
private void startCapture(CapturerInstance instance, int formatIndex) {
|
||||
final CameraEnumerationAndroid.CaptureFormat format =
|
||||
instance.supportedFormats.get(formatIndex);
|
||||
|
||||
instance.capturer.startCapture(format.width, format.height, format.framerate.max);
|
||||
instance.format = format;
|
||||
}
|
||||
|
||||
private void disposeCapturer(CapturerInstance instance) throws InterruptedException {
|
||||
instance.capturer.stopCapture();
|
||||
instance.cameraEvents.waitForCameraClosed();
|
||||
instance.capturer.dispose();
|
||||
instance.observer.releaseFrame();
|
||||
instance.surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
private VideoTrackWithRenderer createVideoTrackWithRenderer(
|
||||
CameraVideoCapturer capturer, VideoRenderer.Callbacks rendererCallbacks) {
|
||||
VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
|
||||
videoTrackWithRenderer.source = peerConnectionFactory.createVideoSource(capturer);
|
||||
capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
|
||||
videoTrackWithRenderer.track =
|
||||
peerConnectionFactory.createVideoTrack("dummy", videoTrackWithRenderer.source);
|
||||
videoTrackWithRenderer.track.addRenderer(new VideoRenderer(rendererCallbacks));
|
||||
return videoTrackWithRenderer;
|
||||
}
|
||||
|
||||
private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer) {
|
||||
RendererCallbacks rendererCallbacks = new RendererCallbacks();
|
||||
VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithRenderer(capturer, rendererCallbacks);
|
||||
videoTrackWithRenderer.rendererCallbacks = rendererCallbacks;
|
||||
return videoTrackWithRenderer;
|
||||
}
|
||||
|
||||
private VideoTrackWithRenderer createVideoTrackWithFakeAsyncRenderer(
|
||||
CameraVideoCapturer capturer) {
|
||||
FakeAsyncRenderer fakeAsyncRenderer = new FakeAsyncRenderer();
|
||||
VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithRenderer(capturer, fakeAsyncRenderer);
|
||||
videoTrackWithRenderer.fakeAsyncRenderer = fakeAsyncRenderer;
|
||||
return videoTrackWithRenderer;
|
||||
}
|
||||
|
||||
private void disposeVideoTrackWithRenderer(VideoTrackWithRenderer videoTrackWithRenderer) {
|
||||
videoTrackWithRenderer.track.dispose();
|
||||
videoTrackWithRenderer.source.dispose();
|
||||
}
|
||||
|
||||
private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException {
|
||||
final CountDownLatch barrier = new CountDownLatch(1);
|
||||
capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
barrier.countDown();
|
||||
}
|
||||
});
|
||||
barrier.await();
|
||||
}
|
||||
|
||||
private void createCapturerAndRender(String name) throws InterruptedException {
|
||||
if (name == null) {
|
||||
Logging.w(TAG, "Skipping video capturer test because device name is null.");
|
||||
return;
|
||||
}
|
||||
|
||||
final CapturerInstance capturerInstance = createCapturer(name, false /* initialize */);
|
||||
final VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithRenderer(capturerInstance.capturer);
|
||||
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
|
||||
disposeCapturer(capturerInstance);
|
||||
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
|
||||
}
|
||||
|
||||
// Test methods
|
||||
public void createCapturerAndDispose() throws InterruptedException {
|
||||
disposeCapturer(createCapturer(true /* initialize */));
|
||||
}
|
||||
|
||||
public void createNonExistingCamera() throws InterruptedException {
|
||||
try {
|
||||
disposeCapturer(createCapturer("non-existing camera", false /* initialize */));
|
||||
} catch (IllegalArgumentException e) {
|
||||
return;
|
||||
}
|
||||
|
||||
fail("Expected illegal argument exception when creating non-existing camera.");
|
||||
}
|
||||
|
||||
public void createCapturerAndRender() throws InterruptedException {
|
||||
String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0];
|
||||
createCapturerAndRender(name);
|
||||
}
|
||||
|
||||
public void createFrontFacingCapturerAndRender() throws InterruptedException {
|
||||
createCapturerAndRender(testObjectFactory.getNameOfFrontFacingDevice());
|
||||
}
|
||||
|
||||
public void createBackFacingCapturerAndRender() throws InterruptedException {
|
||||
createCapturerAndRender(testObjectFactory.getNameOfBackFacingDevice());
|
||||
}
|
||||
|
||||
public void switchCamera() throws InterruptedException {
|
||||
if (!testObjectFactory.haveTwoCameras()) {
|
||||
Logging.w(
|
||||
TAG, "Skipping test switch video capturer because the device doesn't have two cameras.");
|
||||
return;
|
||||
}
|
||||
|
||||
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
|
||||
final VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithRenderer(capturerInstance.capturer);
|
||||
// Wait for the camera to start so we can switch it
|
||||
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
|
||||
|
||||
// Array with one element to avoid final problem in nested classes.
|
||||
final boolean[] cameraSwitchSuccessful = new boolean[1];
|
||||
final CountDownLatch barrier = new CountDownLatch(1);
|
||||
capturerInstance.capturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() {
|
||||
@Override
|
||||
public void onCameraSwitchDone(boolean isFrontCamera) {
|
||||
cameraSwitchSuccessful[0] = true;
|
||||
barrier.countDown();
|
||||
}
|
||||
@Override
|
||||
public void onCameraSwitchError(String errorDescription) {
|
||||
cameraSwitchSuccessful[0] = false;
|
||||
barrier.countDown();
|
||||
}
|
||||
});
|
||||
// Wait until the camera has been switched.
|
||||
barrier.await();
|
||||
|
||||
// Check result.
|
||||
assertTrue(cameraSwitchSuccessful[0]);
|
||||
// Ensure that frames are received.
|
||||
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
|
||||
disposeCapturer(capturerInstance);
|
||||
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
private static void prepareMediaRecorderForTests(
|
||||
MediaRecorder mediaRecorder, File outputFile, boolean useSurfaceCapture) throws IOException {
|
||||
mediaRecorder.setVideoSource(
|
||||
useSurfaceCapture ? MediaRecorder.VideoSource.SURFACE : MediaRecorder.VideoSource.CAMERA);
|
||||
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
|
||||
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
|
||||
profile.videoBitRate = 2500000;
|
||||
profile.videoFrameWidth = 640;
|
||||
profile.videoFrameHeight = 480;
|
||||
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
|
||||
mediaRecorder.setVideoFrameRate(profile.videoFrameRate);
|
||||
mediaRecorder.setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
|
||||
mediaRecorder.setVideoEncodingBitRate(profile.videoBitRate);
|
||||
mediaRecorder.setVideoEncoder(profile.videoCodec);
|
||||
mediaRecorder.setOutputFile(outputFile.getPath());
|
||||
mediaRecorder.prepare();
|
||||
}
|
||||
|
||||
@TargetApi(21)
|
||||
public void updateMediaRecorder(boolean useSurfaceCapture)
|
||||
throws InterruptedException, IOException {
|
||||
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
|
||||
final VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithRenderer(capturerInstance.capturer);
|
||||
// Wait for the camera to start so we can add and remove MediaRecorder.
|
||||
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
|
||||
|
||||
final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
|
||||
+ "/chromium_tests_root/testmediarecorder.mp4";
|
||||
File outputFile = new File(videoOutPath);
|
||||
|
||||
// Create MediaRecorder object
|
||||
MediaRecorder mediaRecorder = new MediaRecorder();
|
||||
if (useSurfaceCapture) {
|
||||
// When using using surface capture, media recorder has to be prepared before adding it to the
|
||||
// camera.
|
||||
prepareMediaRecorderForTests(mediaRecorder, outputFile, useSurfaceCapture);
|
||||
}
|
||||
|
||||
// Add MediaRecorder to camera pipeline.
|
||||
final boolean[] addMediaRecorderSuccessful = new boolean[1];
|
||||
final CountDownLatch addBarrier = new CountDownLatch(1);
|
||||
CameraVideoCapturer.MediaRecorderHandler addMediaRecorderHandler =
|
||||
new CameraVideoCapturer.MediaRecorderHandler() {
|
||||
@Override
|
||||
public void onMediaRecorderSuccess() {
|
||||
addMediaRecorderSuccessful[0] = true;
|
||||
addBarrier.countDown();
|
||||
}
|
||||
@Override
|
||||
public void onMediaRecorderError(String errorDescription) {
|
||||
Logging.e(TAG, errorDescription);
|
||||
addMediaRecorderSuccessful[0] = false;
|
||||
addBarrier.countDown();
|
||||
}
|
||||
};
|
||||
capturerInstance.capturer.addMediaRecorderToCamera(mediaRecorder, addMediaRecorderHandler);
|
||||
// Wait until MediaRecoder has been added.
|
||||
addBarrier.await();
|
||||
// Check result.
|
||||
assertTrue(addMediaRecorderSuccessful[0]);
|
||||
|
||||
// Start MediaRecorder and wait for a few frames to capture.
|
||||
if (!useSurfaceCapture) {
|
||||
// When using using camera capture, media recorder has to be prepared after adding it to the
|
||||
// camera.
|
||||
prepareMediaRecorderForTests(mediaRecorder, outputFile, useSurfaceCapture);
|
||||
}
|
||||
mediaRecorder.start();
|
||||
for (int i = 0; i < 5; i++) {
|
||||
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
|
||||
}
|
||||
mediaRecorder.stop();
|
||||
|
||||
// Remove MediaRecorder from camera pipeline.
|
||||
final boolean[] removeMediaRecorderSuccessful = new boolean[1];
|
||||
final CountDownLatch removeBarrier = new CountDownLatch(1);
|
||||
CameraVideoCapturer.MediaRecorderHandler removeMediaRecorderHandler =
|
||||
new CameraVideoCapturer.MediaRecorderHandler() {
|
||||
@Override
|
||||
public void onMediaRecorderSuccess() {
|
||||
removeMediaRecorderSuccessful[0] = true;
|
||||
removeBarrier.countDown();
|
||||
}
|
||||
@Override
|
||||
public void onMediaRecorderError(String errorDescription) {
|
||||
removeMediaRecorderSuccessful[0] = false;
|
||||
removeBarrier.countDown();
|
||||
}
|
||||
};
|
||||
capturerInstance.capturer.removeMediaRecorderFromCamera(removeMediaRecorderHandler);
|
||||
// Wait until MediaRecoder has been removed.
|
||||
removeBarrier.await();
|
||||
// Check result.
|
||||
assertTrue(removeMediaRecorderSuccessful[0]);
|
||||
// Ensure that frames are received after removing MediaRecorder.
|
||||
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
|
||||
// Check that recorded file contains some data.
|
||||
assertTrue(outputFile.length() > 0);
|
||||
|
||||
disposeCapturer(capturerInstance);
|
||||
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
|
||||
}
|
||||
|
||||
public void cameraEventsInvoked() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
|
||||
startCapture(capturerInstance);
|
||||
// Make sure camera is started and first frame is received and then stop it.
|
||||
assertTrue(capturerInstance.observer.waitForCapturerToStart());
|
||||
capturerInstance.observer.waitForNextCapturedFrame();
|
||||
disposeCapturer(capturerInstance);
|
||||
|
||||
assertTrue(capturerInstance.cameraEvents.onCameraOpeningCalled);
|
||||
assertTrue(capturerInstance.cameraEvents.onFirstFrameAvailableCalled);
|
||||
}
|
||||
|
||||
public void cameraCallsAfterStop() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
|
||||
startCapture(capturerInstance);
|
||||
// Make sure camera is started and then stop it.
|
||||
assertTrue(capturerInstance.observer.waitForCapturerToStart());
|
||||
capturerInstance.capturer.stopCapture();
|
||||
capturerInstance.observer.releaseFrame();
|
||||
|
||||
// We can't change |capturer| at this point, but we should not crash.
|
||||
capturerInstance.capturer.switchCamera(null /* switchEventsHandler */);
|
||||
capturerInstance.capturer.changeCaptureFormat(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
|
||||
|
||||
disposeCapturer(capturerInstance);
|
||||
}
|
||||
|
||||
public void stopRestartVideoSource() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
|
||||
final VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithRenderer(capturerInstance.capturer);
|
||||
|
||||
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
|
||||
assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
|
||||
|
||||
capturerInstance.capturer.stopCapture();
|
||||
assertEquals(MediaSource.State.ENDED, videoTrackWithRenderer.source.state());
|
||||
|
||||
startCapture(capturerInstance);
|
||||
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
|
||||
assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
|
||||
|
||||
disposeCapturer(capturerInstance);
|
||||
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
|
||||
}
|
||||
|
||||
public void startStopWithDifferentResolutions() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
|
||||
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
startCapture(capturerInstance, i);
|
||||
assertTrue(capturerInstance.observer.waitForCapturerToStart());
|
||||
capturerInstance.observer.waitForNextCapturedFrame();
|
||||
|
||||
// Check the frame size. The actual width and height depend on how the capturer is mounted.
|
||||
final boolean identicalResolution =
|
||||
(capturerInstance.observer.frameWidth() == capturerInstance.format.width
|
||||
&& capturerInstance.observer.frameHeight() == capturerInstance.format.height);
|
||||
final boolean flippedResolution =
|
||||
(capturerInstance.observer.frameWidth() == capturerInstance.format.height
|
||||
&& capturerInstance.observer.frameHeight() == capturerInstance.format.width);
|
||||
if (!identicalResolution && !flippedResolution) {
|
||||
fail("Wrong resolution, got: " + capturerInstance.observer.frameWidth() + "x"
|
||||
+ capturerInstance.observer.frameHeight() + " expected: "
|
||||
+ capturerInstance.format.width + "x" + capturerInstance.format.height + " or "
|
||||
+ capturerInstance.format.height + "x" + capturerInstance.format.width);
|
||||
}
|
||||
|
||||
capturerInstance.capturer.stopCapture();
|
||||
capturerInstance.observer.releaseFrame();
|
||||
}
|
||||
disposeCapturer(capturerInstance);
|
||||
}
|
||||
|
||||
public void returnBufferLate() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
|
||||
startCapture(capturerInstance);
|
||||
assertTrue(capturerInstance.observer.waitForCapturerToStart());
|
||||
|
||||
capturerInstance.observer.waitForNextCapturedFrame();
|
||||
capturerInstance.capturer.stopCapture();
|
||||
List<Long> listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
|
||||
assertTrue(listOftimestamps.size() >= 1);
|
||||
|
||||
startCapture(capturerInstance, 1);
|
||||
capturerInstance.observer.waitForCapturerToStart();
|
||||
capturerInstance.observer.releaseFrame();
|
||||
|
||||
capturerInstance.observer.waitForNextCapturedFrame();
|
||||
capturerInstance.capturer.stopCapture();
|
||||
|
||||
listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
|
||||
assertTrue(listOftimestamps.size() >= 1);
|
||||
|
||||
disposeCapturer(capturerInstance);
|
||||
}
|
||||
|
||||
public void returnBufferLateEndToEnd() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
|
||||
final VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);
|
||||
// Wait for at least one frame that has not been returned.
|
||||
assertFalse(videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames().isEmpty());
|
||||
|
||||
capturerInstance.capturer.stopCapture();
|
||||
|
||||
// Dispose everything.
|
||||
disposeCapturer(capturerInstance);
|
||||
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
|
||||
|
||||
// Return the frame(s), on a different thread out of spite.
|
||||
final List<I420Frame> pendingFrames =
|
||||
videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames();
|
||||
final Thread returnThread = new Thread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
for (I420Frame frame : pendingFrames) {
|
||||
VideoRenderer.renderFrameDone(frame);
|
||||
}
|
||||
}
|
||||
});
|
||||
returnThread.start();
|
||||
returnThread.join();
|
||||
}
|
||||
|
||||
public void cameraFreezedEventOnBufferStarvation() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
|
||||
startCapture(capturerInstance);
|
||||
// Make sure camera is started.
|
||||
assertTrue(capturerInstance.observer.waitForCapturerToStart());
|
||||
// Since we don't return the buffer, we should get a starvation message if we are
|
||||
// capturing to a texture.
|
||||
assertEquals("Camera failure. Client must return video buffers.",
|
||||
capturerInstance.cameraEvents.waitForCameraFreezed());
|
||||
|
||||
capturerInstance.capturer.stopCapture();
|
||||
disposeCapturer(capturerInstance);
|
||||
}
|
||||
|
||||
public void scaleCameraOutput() throws InterruptedException {
|
||||
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
|
||||
final VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithRenderer(capturerInstance.capturer);
|
||||
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
|
||||
|
||||
final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth();
|
||||
final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight();
|
||||
final int frameRate = 30;
|
||||
final int scaledWidth = startWidth / 2;
|
||||
final int scaledHeight = startHeight / 2;
|
||||
|
||||
// Request the captured frames to be scaled.
|
||||
videoTrackWithRenderer.source.adaptOutputFormat(scaledWidth, scaledHeight, frameRate);
|
||||
|
||||
boolean gotExpectedResolution = false;
|
||||
int numberOfInspectedFrames = 0;
|
||||
|
||||
do {
|
||||
videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
|
||||
++numberOfInspectedFrames;
|
||||
|
||||
gotExpectedResolution = (videoTrackWithRenderer.rendererCallbacks.frameWidth() == scaledWidth
|
||||
&& videoTrackWithRenderer.rendererCallbacks.frameHeight() == scaledHeight);
|
||||
} while (!gotExpectedResolution && numberOfInspectedFrames < 30);
|
||||
|
||||
disposeCapturer(capturerInstance);
|
||||
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
|
||||
|
||||
assertTrue(gotExpectedResolution);
|
||||
}
|
||||
|
||||
public void startWhileCameraIsAlreadyOpen() throws InterruptedException {
|
||||
final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
|
||||
// At this point camera is not actually opened.
|
||||
final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
|
||||
|
||||
final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
|
||||
|
||||
startCapture(capturerInstance);
|
||||
|
||||
if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
|
||||
// The first opened camera client will be evicted.
|
||||
assertTrue(capturerInstance.observer.waitForCapturerToStart());
|
||||
} else {
|
||||
assertFalse(capturerInstance.observer.waitForCapturerToStart());
|
||||
}
|
||||
|
||||
testObjectFactory.rawCloseCamera(competingCamera);
|
||||
disposeCapturer(capturerInstance);
|
||||
}
|
||||
|
||||
public void startWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
|
||||
final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
|
||||
// At this point camera is not actually opened.
|
||||
final CapturerInstance capturerInstance = createCapturer(cameraName, false /* initialize */);
|
||||
|
||||
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening competing camera.");
|
||||
final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
|
||||
|
||||
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening camera.");
|
||||
final VideoTrackWithRenderer videoTrackWithRenderer =
|
||||
createVideoTrackWithRenderer(capturerInstance.capturer);
|
||||
waitUntilIdle(capturerInstance);
|
||||
|
||||
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Closing competing camera.");
|
||||
testObjectFactory.rawCloseCamera(competingCamera);
|
||||
|
||||
// Make sure camera is started and first frame is received and then stop it.
|
||||
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Waiting for capture to start.");
|
||||
videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
|
||||
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Stopping capture.");
|
||||
disposeCapturer(capturerInstance);
|
||||
}
|
||||
|
||||
public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
|
||||
final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
|
||||
// At this point camera is not actually opened.
|
||||
final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
|
||||
|
||||
final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
|
||||
|
||||
startCapture(capturerInstance);
|
||||
disposeCapturer(capturerInstance);
|
||||
|
||||
testObjectFactory.rawCloseCamera(competingCamera);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,338 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.support.test.InstrumentationRegistry;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
// EmptyActivity is needed for the surface.
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class EglRendererTest {
|
||||
final static String TAG = "EglRendererTest";
|
||||
final static int RENDER_WAIT_MS = 1000;
|
||||
final static int SURFACE_WAIT_MS = 1000;
|
||||
final static int TEST_FRAME_WIDTH = 4;
|
||||
final static int TEST_FRAME_HEIGHT = 4;
|
||||
final static int REMOVE_FRAME_LISTENER_RACY_NUM_TESTS = 10;
|
||||
// Some arbitrary frames.
|
||||
final static ByteBuffer[][] TEST_FRAMES = {
|
||||
{
|
||||
ByteBuffer.wrap(new byte[] {
|
||||
11, -12, 13, -14, -15, 16, -17, 18, 19, -110, 111, -112, -113, 114, -115, 116}),
|
||||
ByteBuffer.wrap(new byte[] {117, 118, 119, 120}),
|
||||
ByteBuffer.wrap(new byte[] {121, 122, 123, 124}),
|
||||
},
|
||||
{
|
||||
ByteBuffer.wrap(new byte[] {-11, -12, -13, -14, -15, -16, -17, -18, -19, -110, -111, -112,
|
||||
-113, -114, -115, -116}),
|
||||
ByteBuffer.wrap(new byte[] {-121, -122, -123, -124}),
|
||||
ByteBuffer.wrap(new byte[] {-117, -118, -119, -120}),
|
||||
},
|
||||
{
|
||||
ByteBuffer.wrap(new byte[] {-11, -12, -13, -14, -15, -16, -17, -18, -19, -110, -111, -112,
|
||||
-113, -114, -115, -116}),
|
||||
ByteBuffer.wrap(new byte[] {117, 118, 119, 120}),
|
||||
ByteBuffer.wrap(new byte[] {121, 122, 123, 124}),
|
||||
},
|
||||
};
|
||||
|
||||
private class TestFrameListener implements EglRenderer.FrameListener {
|
||||
final private ArrayList<Bitmap> bitmaps = new ArrayList<Bitmap>();
|
||||
boolean bitmapReceived;
|
||||
Bitmap storedBitmap;
|
||||
|
||||
@Override
|
||||
public synchronized void onFrame(Bitmap bitmap) {
|
||||
if (bitmapReceived) {
|
||||
fail("Unexpected bitmap was received.");
|
||||
}
|
||||
|
||||
bitmapReceived = true;
|
||||
storedBitmap = bitmap;
|
||||
notify();
|
||||
}
|
||||
|
||||
public synchronized boolean waitForBitmap(int timeoutMs) throws InterruptedException {
|
||||
if (!bitmapReceived) {
|
||||
wait(timeoutMs);
|
||||
}
|
||||
return bitmapReceived;
|
||||
}
|
||||
|
||||
public synchronized Bitmap resetAndGetBitmap() {
|
||||
bitmapReceived = false;
|
||||
return storedBitmap;
|
||||
}
|
||||
}
|
||||
|
||||
final TestFrameListener testFrameListener = new TestFrameListener();
|
||||
|
||||
EglRenderer eglRenderer;
|
||||
CountDownLatch surfaceReadyLatch = new CountDownLatch(1);
|
||||
int oesTextureId;
|
||||
SurfaceTexture surfaceTexture;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
PeerConnectionFactory.initializeAndroidGlobals(
|
||||
InstrumentationRegistry.getTargetContext(), true /* videoHwAcceleration */);
|
||||
eglRenderer = new EglRenderer("TestRenderer: ");
|
||||
eglRenderer.init(null /* sharedContext */, EglBase.CONFIG_RGBA, new GlRectDrawer());
|
||||
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
||||
surfaceTexture = new SurfaceTexture(oesTextureId);
|
||||
surfaceTexture.setDefaultBufferSize(1 /* width */, 1 /* height */);
|
||||
eglRenderer.createEglSurface(surfaceTexture);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
surfaceTexture.release();
|
||||
GLES20.glDeleteTextures(1 /* n */, new int[] {oesTextureId}, 0 /* offset */);
|
||||
eglRenderer.release();
|
||||
}
|
||||
|
||||
/** Checks the bitmap is not null and the correct size. */
|
||||
private static void checkBitmap(Bitmap bitmap, float scale) {
|
||||
assertNotNull(bitmap);
|
||||
assertEquals((int) (TEST_FRAME_WIDTH * scale), bitmap.getWidth());
|
||||
assertEquals((int) (TEST_FRAME_HEIGHT * scale), bitmap.getHeight());
|
||||
}
|
||||
|
||||
/**
|
||||
* Does linear sampling on U/V plane of test data.
|
||||
*
|
||||
* @param data Plane data to be sampled from.
|
||||
* @param planeWidth Width of the plane data. This is also assumed to be the stride.
|
||||
* @param planeHeight Height of the plane data.
|
||||
* @param x X-coordinate in range [0, 1].
|
||||
* @param y Y-coordinate in range [0, 1].
|
||||
*/
|
||||
private static float linearSample(
|
||||
ByteBuffer plane, int planeWidth, int planeHeight, float x, float y) {
|
||||
final int stride = planeWidth;
|
||||
|
||||
final float coordX = x * planeWidth;
|
||||
final float coordY = y * planeHeight;
|
||||
|
||||
int lowIndexX = (int) Math.floor(coordX - 0.5f);
|
||||
int lowIndexY = (int) Math.floor(coordY - 0.5f);
|
||||
int highIndexX = lowIndexX + 1;
|
||||
int highIndexY = lowIndexY + 1;
|
||||
|
||||
final float highWeightX = coordX - lowIndexX - 0.5f;
|
||||
final float highWeightY = coordY - lowIndexY - 0.5f;
|
||||
final float lowWeightX = 1f - highWeightX;
|
||||
final float lowWeightY = 1f - highWeightY;
|
||||
|
||||
// Clamp on the edges.
|
||||
lowIndexX = Math.max(0, lowIndexX);
|
||||
lowIndexY = Math.max(0, lowIndexY);
|
||||
highIndexX = Math.min(planeWidth - 1, highIndexX);
|
||||
highIndexY = Math.min(planeHeight - 1, highIndexY);
|
||||
|
||||
float lowYValue = (plane.get(lowIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
|
||||
+ (plane.get(lowIndexY * stride + highIndexX) & 0xFF) * highWeightX;
|
||||
float highYValue = (plane.get(highIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
|
||||
+ (plane.get(highIndexY * stride + highIndexX) & 0xFF) * highWeightX;
|
||||
|
||||
return (lowWeightY * lowYValue + highWeightY * highYValue) / 255f;
|
||||
}
|
||||
|
||||
private static byte saturatedFloatToByte(float c) {
|
||||
return (byte) Math.round(255f * Math.max(0f, Math.min(1f, c)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts test data YUV frame to expected RGBA frame. Tries to match the behavior of OpenGL
|
||||
* YUV drawer shader. Does linear sampling on the U- and V-planes.
|
||||
*
|
||||
* @param yuvFrame Array of size 3 containing Y-, U-, V-planes for image of size
|
||||
* (TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT). U- and V-planes should be half the size
|
||||
* of the Y-plane.
|
||||
*/
|
||||
private static byte[] convertYUVFrameToRGBA(ByteBuffer[] yuvFrame) {
|
||||
final byte[] argbFrame = new byte[TEST_FRAME_WIDTH * TEST_FRAME_HEIGHT * 4];
|
||||
final int argbStride = TEST_FRAME_WIDTH * 4;
|
||||
final int yStride = TEST_FRAME_WIDTH;
|
||||
|
||||
final int vStride = TEST_FRAME_WIDTH / 2;
|
||||
|
||||
for (int y = 0; y < TEST_FRAME_HEIGHT; y++) {
|
||||
for (int x = 0; x < TEST_FRAME_WIDTH; x++) {
|
||||
final int x2 = x / 2;
|
||||
final int y2 = y / 2;
|
||||
|
||||
final float yC = (yuvFrame[0].get(y * yStride + x) & 0xFF) / 255f;
|
||||
final float uC = linearSample(yuvFrame[1], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
|
||||
(x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
|
||||
- 0.5f;
|
||||
final float vC = linearSample(yuvFrame[2], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
|
||||
(x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
|
||||
- 0.5f;
|
||||
final float rC = yC + 1.403f * vC;
|
||||
final float gC = yC - 0.344f * uC - 0.714f * vC;
|
||||
final float bC = yC + 1.77f * uC;
|
||||
|
||||
argbFrame[y * argbStride + x * 4 + 0] = saturatedFloatToByte(rC);
|
||||
argbFrame[y * argbStride + x * 4 + 1] = saturatedFloatToByte(gC);
|
||||
argbFrame[y * argbStride + x * 4 + 2] = saturatedFloatToByte(bC);
|
||||
argbFrame[y * argbStride + x * 4 + 3] = (byte) 255;
|
||||
}
|
||||
}
|
||||
|
||||
return argbFrame;
|
||||
}
|
||||
|
||||
/** Checks that the bitmap content matches the test frame with the given index. */
|
||||
private static void checkBitmapContent(Bitmap bitmap, int frame) {
|
||||
checkBitmap(bitmap, 1f);
|
||||
|
||||
byte[] expectedRGBA = convertYUVFrameToRGBA(TEST_FRAMES[frame]);
|
||||
ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(bitmap.getByteCount());
|
||||
bitmap.copyPixelsToBuffer(bitmapBuffer);
|
||||
|
||||
for (int i = 0; i < expectedRGBA.length; i++) {
|
||||
int expected = expectedRGBA[i] & 0xFF;
|
||||
int value = bitmapBuffer.get(i) & 0xFF;
|
||||
// Due to unknown conversion differences check value matches +-1.
|
||||
if (Math.abs(value - expected) > 1) {
|
||||
Logging.d(TAG, "Expected bitmap content: " + Arrays.toString(expectedRGBA));
|
||||
Logging.d(TAG, "Bitmap content: " + Arrays.toString(bitmapBuffer.array()));
|
||||
fail("Frame doesn't match original frame on byte " + i + ". Expected: " + expected
|
||||
+ " Result: " + value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Tells eglRenderer to render test frame with given index. */
|
||||
private void feedFrame(int i) {
|
||||
eglRenderer.renderFrame(new VideoRenderer.I420Frame(TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT, 0,
|
||||
new int[] {TEST_FRAME_WIDTH, TEST_FRAME_WIDTH / 2, TEST_FRAME_WIDTH / 2}, TEST_FRAMES[i],
|
||||
0));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testAddFrameListener() throws Exception {
|
||||
eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
|
||||
feedFrame(0);
|
||||
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
assertNull(testFrameListener.resetAndGetBitmap());
|
||||
eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
|
||||
feedFrame(1);
|
||||
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
assertNull(testFrameListener.resetAndGetBitmap());
|
||||
feedFrame(2);
|
||||
// Check we get no more bitmaps than two.
|
||||
assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testAddFrameListenerBitmap() throws Exception {
|
||||
eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
|
||||
feedFrame(0);
|
||||
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 0);
|
||||
eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
|
||||
feedFrame(1);
|
||||
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testAddFrameListenerBitmapScale() throws Exception {
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
float scale = i * 0.5f + 0.5f;
|
||||
eglRenderer.addFrameListener(testFrameListener, scale);
|
||||
feedFrame(i);
|
||||
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
checkBitmap(testFrameListener.resetAndGetBitmap(), scale);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks that the frame listener will not be called with a frame that was delivered before the
|
||||
* frame listener was added.
|
||||
*/
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testFrameListenerNotCalledWithOldFrames() throws Exception {
|
||||
feedFrame(0);
|
||||
eglRenderer.addFrameListener(testFrameListener, 0f);
|
||||
// Check the old frame does not trigger frame listener.
|
||||
assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
}
|
||||
|
||||
/** Checks that the frame listener will not be called after it is removed. */
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testRemoveFrameListenerNotRacy() throws Exception {
|
||||
for (int i = 0; i < REMOVE_FRAME_LISTENER_RACY_NUM_TESTS; i++) {
|
||||
feedFrame(0);
|
||||
eglRenderer.addFrameListener(testFrameListener, 0f);
|
||||
eglRenderer.removeFrameListener(testFrameListener);
|
||||
feedFrame(1);
|
||||
}
|
||||
// Check the frame listener hasn't triggered.
|
||||
assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testFrameListenersFpsReduction() throws Exception {
|
||||
// Test that normal frame listeners receive frames while the renderer is paused.
|
||||
eglRenderer.pauseVideo();
|
||||
eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
|
||||
feedFrame(0);
|
||||
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 0);
|
||||
|
||||
// Test that frame listeners with FPS reduction applied receive frames while the renderer is not
|
||||
// paused.
|
||||
eglRenderer.disableFpsReduction();
|
||||
eglRenderer.addFrameListener(
|
||||
testFrameListener, 1f /* scaleFactor */, null, true /* applyFpsReduction */);
|
||||
feedFrame(1);
|
||||
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 1);
|
||||
|
||||
// Test that frame listeners with FPS reduction applied will not receive frames while the
|
||||
// renderer is paused.
|
||||
eglRenderer.pauseVideo();
|
||||
eglRenderer.addFrameListener(
|
||||
testFrameListener, 1f /* scaleFactor */, null, true /* applyFpsReduction */);
|
||||
feedFrame(1);
|
||||
assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import android.os.Environment;
|
||||
import android.support.test.filters.LargeTest;
|
||||
import android.support.test.filters.MediumTest;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import java.io.IOException;
|
||||
import java.lang.Thread;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class FileVideoCapturerTest {
|
||||
private static class Frame {
|
||||
public byte[] data;
|
||||
public int width;
|
||||
public int height;
|
||||
}
|
||||
|
||||
public class MockCapturerObserver implements VideoCapturer.CapturerObserver {
|
||||
private final ArrayList<Frame> frameDatas = new ArrayList<Frame>();
|
||||
|
||||
@Override
|
||||
public void onCapturerStarted(boolean success) {
|
||||
assertTrue(success);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCapturerStopped() {
|
||||
// Empty on purpose.
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void onByteBufferFrameCaptured(
|
||||
byte[] data, int width, int height, int rotation, long timeStamp) {
|
||||
Frame frame = new Frame();
|
||||
frame.data = data;
|
||||
frame.width = width;
|
||||
frame.height = height;
|
||||
assertTrue(data.length != 0);
|
||||
frameDatas.add(frame);
|
||||
notify();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
|
||||
float[] transformMatrix, int rotation, long timestamp) {
|
||||
// Empty on purpose.
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameCaptured(VideoFrame frame) {
|
||||
// Empty on purpose.
|
||||
}
|
||||
|
||||
public synchronized ArrayList<Frame> getMinimumFramesBlocking(int minFrames)
|
||||
throws InterruptedException {
|
||||
while (frameDatas.size() < minFrames) {
|
||||
wait();
|
||||
}
|
||||
return new ArrayList<Frame>(frameDatas);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testVideoCaptureFromFile() throws InterruptedException, IOException {
|
||||
final int FRAME_WIDTH = 4;
|
||||
final int FRAME_HEIGHT = 4;
|
||||
final FileVideoCapturer fileVideoCapturer =
|
||||
new FileVideoCapturer(Environment.getExternalStorageDirectory().getPath()
|
||||
+ "/chromium_tests_root/webrtc/sdk/android/instrumentationtests/src/org/webrtc/"
|
||||
+ "capturetestvideo.y4m");
|
||||
final MockCapturerObserver capturerObserver = new MockCapturerObserver();
|
||||
fileVideoCapturer.initialize(null, null, capturerObserver);
|
||||
fileVideoCapturer.startCapture(FRAME_WIDTH, FRAME_HEIGHT, 33);
|
||||
|
||||
final String[] expectedFrames = {
|
||||
"THIS IS JUST SOME TEXT x", "THE SECOND FRAME qwerty.", "HERE IS THE THRID FRAME!"};
|
||||
|
||||
final ArrayList<Frame> frameDatas;
|
||||
frameDatas = capturerObserver.getMinimumFramesBlocking(expectedFrames.length);
|
||||
|
||||
assertEquals(expectedFrames.length, frameDatas.size());
|
||||
|
||||
fileVideoCapturer.stopCapture();
|
||||
fileVideoCapturer.dispose();
|
||||
|
||||
for (int i = 0; i < expectedFrames.length; ++i) {
|
||||
Frame frame = frameDatas.get(i);
|
||||
|
||||
assertEquals(FRAME_WIDTH, frame.width);
|
||||
assertEquals(FRAME_HEIGHT, frame.height);
|
||||
assertEquals(FRAME_WIDTH * FRAME_HEIGHT * 3 / 2, frame.data.length);
|
||||
|
||||
byte[] expectedNV12Bytes = new byte[frame.data.length];
|
||||
FileVideoCapturer.nativeI420ToNV21(expectedFrames[i].getBytes(Charset.forName("US-ASCII")),
|
||||
FRAME_WIDTH, FRAME_HEIGHT, expectedNV12Bytes);
|
||||
|
||||
assertTrue(Arrays.equals(expectedNV12Bytes, frame.data));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,305 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES20;
|
||||
import android.support.test.filters.MediumTest;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Random;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class GlRectDrawerTest {
|
||||
// Resolution of the test image.
|
||||
private static final int WIDTH = 16;
|
||||
private static final int HEIGHT = 16;
|
||||
// Seed for random pixel creation.
|
||||
private static final int SEED = 42;
|
||||
// When comparing pixels, allow some slack for float arithmetic and integer rounding.
|
||||
private static final float MAX_DIFF = 1.5f;
|
||||
|
||||
private static float normalizedByte(byte b) {
|
||||
return (b & 0xFF) / 255.0f;
|
||||
}
|
||||
|
||||
private static float saturatedConvert(float c) {
|
||||
return 255.0f * Math.max(0, Math.min(c, 1));
|
||||
}
|
||||
|
||||
// Assert RGB ByteBuffers are pixel perfect identical.
|
||||
private static void assertByteBufferEquals(
|
||||
int width, int height, ByteBuffer actual, ByteBuffer expected) {
|
||||
actual.rewind();
|
||||
expected.rewind();
|
||||
assertEquals(actual.remaining(), width * height * 3);
|
||||
assertEquals(expected.remaining(), width * height * 3);
|
||||
for (int y = 0; y < height; ++y) {
|
||||
for (int x = 0; x < width; ++x) {
|
||||
final int actualR = actual.get() & 0xFF;
|
||||
final int actualG = actual.get() & 0xFF;
|
||||
final int actualB = actual.get() & 0xFF;
|
||||
final int expectedR = expected.get() & 0xFF;
|
||||
final int expectedG = expected.get() & 0xFF;
|
||||
final int expectedB = expected.get() & 0xFF;
|
||||
if (actualR != expectedR || actualG != expectedG || actualB != expectedB) {
|
||||
fail("ByteBuffers of size " + width + "x" + height + " not equal at position "
|
||||
+ "(" + x + ", " + y + "). Expected color (R,G,B): "
|
||||
+ "(" + expectedR + ", " + expectedG + ", " + expectedB + ")"
|
||||
+ " but was: "
|
||||
+ "(" + actualR + ", " + actualG + ", " + actualB + ").");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Convert RGBA ByteBuffer to RGB ByteBuffer.
|
||||
private static ByteBuffer stripAlphaChannel(ByteBuffer rgbaBuffer) {
|
||||
rgbaBuffer.rewind();
|
||||
assertEquals(rgbaBuffer.remaining() % 4, 0);
|
||||
final int numberOfPixels = rgbaBuffer.remaining() / 4;
|
||||
final ByteBuffer rgbBuffer = ByteBuffer.allocateDirect(numberOfPixels * 3);
|
||||
while (rgbaBuffer.hasRemaining()) {
|
||||
// Copy RGB.
|
||||
for (int channel = 0; channel < 3; ++channel) {
|
||||
rgbBuffer.put(rgbaBuffer.get());
|
||||
}
|
||||
// Drop alpha.
|
||||
rgbaBuffer.get();
|
||||
}
|
||||
return rgbBuffer;
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testRgbRendering() {
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(WIDTH, HEIGHT);
|
||||
eglBase.makeCurrent();
|
||||
|
||||
// Create RGB byte buffer plane with random content.
|
||||
final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
|
||||
final Random random = new Random(SEED);
|
||||
random.nextBytes(rgbPlane.array());
|
||||
|
||||
// Upload the RGB byte buffer data as a texture.
|
||||
final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
|
||||
GLES20.GL_UNSIGNED_BYTE, rgbPlane);
|
||||
GlUtil.checkNoGLES2Error("glTexImage2D");
|
||||
|
||||
// Draw the RGB frame onto the pixel buffer.
|
||||
final GlRectDrawer drawer = new GlRectDrawer();
|
||||
drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
|
||||
0 /* viewportY */, WIDTH, HEIGHT);
|
||||
|
||||
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
|
||||
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
|
||||
GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
|
||||
GlUtil.checkNoGLES2Error("glReadPixels");
|
||||
|
||||
// Assert rendered image is pixel perfect to source RGB.
|
||||
assertByteBufferEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
|
||||
|
||||
drawer.release();
|
||||
GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testYuvRendering() {
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(WIDTH, HEIGHT);
|
||||
eglBase.makeCurrent();
|
||||
|
||||
// Create YUV byte buffer planes with random content.
|
||||
final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
|
||||
final Random random = new Random(SEED);
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
yuvPlanes[i] = ByteBuffer.allocateDirect(WIDTH * HEIGHT);
|
||||
random.nextBytes(yuvPlanes[i].array());
|
||||
}
|
||||
|
||||
// Generate 3 texture ids for Y/U/V.
|
||||
final int yuvTextures[] = new int[3];
|
||||
for (int i = 0; i < 3; i++) {
|
||||
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
}
|
||||
|
||||
// Upload the YUV byte buffer data as textures.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH, HEIGHT, 0,
|
||||
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
|
||||
GlUtil.checkNoGLES2Error("glTexImage2D");
|
||||
}
|
||||
|
||||
// Draw the YUV frame onto the pixel buffer.
|
||||
final GlRectDrawer drawer = new GlRectDrawer();
|
||||
drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
|
||||
0 /* viewportY */, WIDTH, HEIGHT);
|
||||
|
||||
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
|
||||
final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
|
||||
GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
|
||||
GlUtil.checkNoGLES2Error("glReadPixels");
|
||||
|
||||
// Compare the YUV data with the RGBA result.
|
||||
for (int y = 0; y < HEIGHT; ++y) {
|
||||
for (int x = 0; x < WIDTH; ++x) {
|
||||
// YUV color space. Y in [0, 1], UV in [-0.5, 0.5]. The constants are taken from the YUV
|
||||
// fragment shader code in GlRectDrawer.
|
||||
final float y_luma = normalizedByte(yuvPlanes[0].get());
|
||||
final float u_chroma = normalizedByte(yuvPlanes[1].get()) - 0.5f;
|
||||
final float v_chroma = normalizedByte(yuvPlanes[2].get()) - 0.5f;
|
||||
// Expected color in unrounded RGB [0.0f, 255.0f].
|
||||
final float expectedRed = saturatedConvert(y_luma + 1.403f * v_chroma);
|
||||
final float expectedGreen =
|
||||
saturatedConvert(y_luma - 0.344f * u_chroma - 0.714f * v_chroma);
|
||||
final float expectedBlue = saturatedConvert(y_luma + 1.77f * u_chroma);
|
||||
|
||||
// Actual color in RGB8888.
|
||||
final int actualRed = data.get() & 0xFF;
|
||||
final int actualGreen = data.get() & 0xFF;
|
||||
final int actualBlue = data.get() & 0xFF;
|
||||
final int actualAlpha = data.get() & 0xFF;
|
||||
|
||||
// Assert rendered image is close to pixel perfect from source YUV.
|
||||
assertTrue(Math.abs(actualRed - expectedRed) < MAX_DIFF);
|
||||
assertTrue(Math.abs(actualGreen - expectedGreen) < MAX_DIFF);
|
||||
assertTrue(Math.abs(actualBlue - expectedBlue) < MAX_DIFF);
|
||||
assertEquals(actualAlpha, 255);
|
||||
}
|
||||
}
|
||||
|
||||
drawer.release();
|
||||
GLES20.glDeleteTextures(3, yuvTextures, 0);
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* The purpose here is to test GlRectDrawer.oesDraw(). Unfortunately, there is no easy way to
|
||||
* create an OES texture, which is needed for input to oesDraw(). Most of the test is concerned
|
||||
* with creating OES textures in the following way:
|
||||
* - Create SurfaceTexture with help from SurfaceTextureHelper.
|
||||
* - Create an EglBase with the SurfaceTexture as EGLSurface.
|
||||
* - Upload RGB texture with known content.
|
||||
* - Draw the RGB texture onto the EglBase with the SurfaceTexture as target.
|
||||
* - Wait for an OES texture to be produced.
|
||||
* The actual oesDraw() test is this:
|
||||
* - Create an EglBase with a pixel buffer as target.
|
||||
* - Render the OES texture onto the pixel buffer.
|
||||
* - Read back the pixel buffer and compare it with the known RGB data.
|
||||
*/
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testOesRendering() throws InterruptedException {
|
||||
/**
|
||||
* Stub class to convert RGB ByteBuffers to OES textures by drawing onto a SurfaceTexture.
|
||||
*/
|
||||
class StubOesTextureProducer {
|
||||
private final EglBase eglBase;
|
||||
private final GlRectDrawer drawer;
|
||||
private final int rgbTexture;
|
||||
|
||||
public StubOesTextureProducer(
|
||||
EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width, int height) {
|
||||
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
|
||||
surfaceTexture.setDefaultBufferSize(width, height);
|
||||
eglBase.createSurface(surfaceTexture);
|
||||
assertEquals(eglBase.surfaceWidth(), width);
|
||||
assertEquals(eglBase.surfaceHeight(), height);
|
||||
|
||||
drawer = new GlRectDrawer();
|
||||
|
||||
eglBase.makeCurrent();
|
||||
rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
|
||||
}
|
||||
|
||||
public void draw(ByteBuffer rgbPlane) {
|
||||
eglBase.makeCurrent();
|
||||
|
||||
// Upload RGB data to texture.
|
||||
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
|
||||
GLES20.GL_UNSIGNED_BYTE, rgbPlane);
|
||||
// Draw the RGB data onto the SurfaceTexture.
|
||||
drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT,
|
||||
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
|
||||
eglBase.swapBuffers();
|
||||
}
|
||||
|
||||
public void release() {
|
||||
eglBase.makeCurrent();
|
||||
drawer.release();
|
||||
GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
|
||||
eglBase.release();
|
||||
}
|
||||
}
|
||||
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(WIDTH, HEIGHT);
|
||||
|
||||
// Create resources for generating OES textures.
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
|
||||
final StubOesTextureProducer oesProducer = new StubOesTextureProducer(
|
||||
eglBase.getEglBaseContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT);
|
||||
final SurfaceTextureHelperTest.MockTextureListener listener =
|
||||
new SurfaceTextureHelperTest.MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
|
||||
// Create RGB byte buffer plane with random content.
|
||||
final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
|
||||
final Random random = new Random(SEED);
|
||||
random.nextBytes(rgbPlane.array());
|
||||
|
||||
// Draw the frame and block until an OES texture is delivered.
|
||||
oesProducer.draw(rgbPlane);
|
||||
listener.waitForNewFrame();
|
||||
|
||||
// Real test starts here.
|
||||
// Draw the OES texture on the pixel buffer.
|
||||
eglBase.makeCurrent();
|
||||
final GlRectDrawer drawer = new GlRectDrawer();
|
||||
drawer.drawOes(listener.oesTextureId, listener.transformMatrix, WIDTH, HEIGHT,
|
||||
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
|
||||
|
||||
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
|
||||
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
|
||||
GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
|
||||
GlUtil.checkNoGLES2Error("glReadPixels");
|
||||
|
||||
// Assert rendered image is pixel perfect to source RGB.
|
||||
assertByteBufferEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
|
||||
|
||||
drawer.release();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
oesProducer.release();
|
||||
surfaceTextureHelper.dispose();
|
||||
eglBase.release();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,233 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.Matrix;
|
||||
import android.support.test.filters.MediumTest;
|
||||
import android.util.Log;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/** Unit tests for {@link HardwareVideoDecoder}. */
|
||||
@TargetApi(16)
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public final class HardwareVideoDecoderTest {
|
||||
private static final String TAG = "HardwareVideoDecoderTest";
|
||||
|
||||
private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
|
||||
private static final boolean ENABLE_H264_HIGH_PROFILE = true;
|
||||
private static final VideoDecoder.Settings SETTINGS =
|
||||
new VideoDecoder.Settings(1 /* core */, 640 /* width */, 480 /* height */);
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testInitialize() {
|
||||
HardwareVideoEncoderFactory encoderFactory =
|
||||
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.i(TAG, "No hardware encoding support, skipping testInitialize");
|
||||
return;
|
||||
}
|
||||
|
||||
HardwareVideoDecoderFactory decoderFactory = new HardwareVideoDecoderFactory(null);
|
||||
|
||||
VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name);
|
||||
assertEquals(decoder.initDecode(SETTINGS, null), VideoCodecStatus.OK);
|
||||
assertEquals(decoder.release(), VideoCodecStatus.OK);
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testInitializeUsingTextures() {
|
||||
HardwareVideoEncoderFactory encoderFactory =
|
||||
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.i(TAG, "No hardware encoding support, skipping testInitialize");
|
||||
return;
|
||||
}
|
||||
|
||||
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
|
||||
HardwareVideoDecoderFactory decoderFactory =
|
||||
new HardwareVideoDecoderFactory(eglBase.getEglBaseContext());
|
||||
|
||||
VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name);
|
||||
assertEquals(decoder.initDecode(SETTINGS, null), VideoCodecStatus.OK);
|
||||
assertEquals(decoder.release(), VideoCodecStatus.OK);
|
||||
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testDecode() throws InterruptedException {
|
||||
HardwareVideoEncoderFactory encoderFactory =
|
||||
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.i(TAG, "No hardware encoding support, skipping testEncodeYuvBuffer");
|
||||
return;
|
||||
}
|
||||
|
||||
// Set up the decoder.
|
||||
HardwareVideoDecoderFactory decoderFactory = new HardwareVideoDecoderFactory(null);
|
||||
VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name);
|
||||
|
||||
final long presentationTimestampUs = 20000;
|
||||
final int rotation = 270;
|
||||
|
||||
final CountDownLatch decodeDone = new CountDownLatch(1);
|
||||
final AtomicReference<VideoFrame> decoded = new AtomicReference<>();
|
||||
VideoDecoder.Callback decodeCallback = new VideoDecoder.Callback() {
|
||||
@Override
|
||||
public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) {
|
||||
frame.retain();
|
||||
decoded.set(frame);
|
||||
decodeDone.countDown();
|
||||
}
|
||||
};
|
||||
assertEquals(decoder.initDecode(SETTINGS, decodeCallback), VideoCodecStatus.OK);
|
||||
|
||||
// Set up an encoder to produce a valid encoded frame.
|
||||
VideoEncoder encoder = encoderFactory.createEncoder(supportedCodecs[0]);
|
||||
final CountDownLatch encodeDone = new CountDownLatch(1);
|
||||
final AtomicReference<EncodedImage> encoded = new AtomicReference<>();
|
||||
VideoEncoder.Callback encodeCallback = new VideoEncoder.Callback() {
|
||||
@Override
|
||||
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
|
||||
encoded.set(image);
|
||||
encodeDone.countDown();
|
||||
}
|
||||
};
|
||||
assertEquals(encoder.initEncode(new VideoEncoder.Settings(1, SETTINGS.width, SETTINGS.height,
|
||||
300, 30, true /* automaticResizeOn */),
|
||||
encodeCallback),
|
||||
VideoCodecStatus.OK);
|
||||
|
||||
// First, encode a frame.
|
||||
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
|
||||
VideoFrame frame = new VideoFrame(buffer, rotation, presentationTimestampUs * 1000);
|
||||
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
||||
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
||||
|
||||
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
|
||||
|
||||
ThreadUtils.awaitUninterruptibly(encodeDone);
|
||||
|
||||
// Now decode the frame.
|
||||
assertEquals(
|
||||
decoder.decode(encoded.get(), new VideoDecoder.DecodeInfo(false, 0)), VideoCodecStatus.OK);
|
||||
|
||||
ThreadUtils.awaitUninterruptibly(decodeDone);
|
||||
|
||||
frame = decoded.get();
|
||||
assertEquals(frame.getRotation(), rotation);
|
||||
assertEquals(frame.getTimestampNs(), presentationTimestampUs * 1000);
|
||||
assertEquals(frame.getBuffer().getWidth(), SETTINGS.width);
|
||||
assertEquals(frame.getBuffer().getHeight(), SETTINGS.height);
|
||||
|
||||
frame.release();
|
||||
assertEquals(decoder.release(), VideoCodecStatus.OK);
|
||||
assertEquals(encoder.release(), VideoCodecStatus.OK);
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testDecodeUsingTextures() throws InterruptedException {
|
||||
HardwareVideoEncoderFactory encoderFactory =
|
||||
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.i(TAG, "No hardware encoding support, skipping testEncodeYuvBuffer");
|
||||
return;
|
||||
}
|
||||
|
||||
// Set up the decoder.
|
||||
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
|
||||
HardwareVideoDecoderFactory decoderFactory =
|
||||
new HardwareVideoDecoderFactory(eglBase.getEglBaseContext());
|
||||
VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name);
|
||||
|
||||
final long presentationTimestampUs = 20000;
|
||||
final int rotation = 270;
|
||||
|
||||
final CountDownLatch decodeDone = new CountDownLatch(1);
|
||||
final AtomicReference<VideoFrame> decoded = new AtomicReference<>();
|
||||
VideoDecoder.Callback decodeCallback = new VideoDecoder.Callback() {
|
||||
@Override
|
||||
public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) {
|
||||
frame.retain();
|
||||
decoded.set(frame);
|
||||
decodeDone.countDown();
|
||||
}
|
||||
};
|
||||
assertEquals(decoder.initDecode(SETTINGS, decodeCallback), VideoCodecStatus.OK);
|
||||
|
||||
// Set up an encoder to produce a valid encoded frame.
|
||||
VideoEncoder encoder = encoderFactory.createEncoder(supportedCodecs[0]);
|
||||
final CountDownLatch encodeDone = new CountDownLatch(1);
|
||||
final AtomicReference<EncodedImage> encoded = new AtomicReference<>();
|
||||
VideoEncoder.Callback encodeCallback = new VideoEncoder.Callback() {
|
||||
@Override
|
||||
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
|
||||
encoded.set(image);
|
||||
encodeDone.countDown();
|
||||
}
|
||||
};
|
||||
assertEquals(encoder.initEncode(new VideoEncoder.Settings(1, SETTINGS.width, SETTINGS.height,
|
||||
300, 30, true /* automaticResizeOn */),
|
||||
encodeCallback),
|
||||
VideoCodecStatus.OK);
|
||||
|
||||
// First, encode a frame.
|
||||
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
|
||||
VideoFrame frame = new VideoFrame(buffer, rotation, presentationTimestampUs * 1000);
|
||||
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
||||
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
||||
|
||||
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
|
||||
|
||||
ThreadUtils.awaitUninterruptibly(encodeDone);
|
||||
|
||||
// Now decode the frame.
|
||||
assertEquals(
|
||||
decoder.decode(encoded.get(), new VideoDecoder.DecodeInfo(false, 0)), VideoCodecStatus.OK);
|
||||
|
||||
ThreadUtils.awaitUninterruptibly(decodeDone);
|
||||
|
||||
frame = decoded.get();
|
||||
assertEquals(frame.getRotation(), rotation);
|
||||
assertEquals(frame.getTimestampNs(), presentationTimestampUs * 1000);
|
||||
|
||||
assertTrue(frame.getBuffer() instanceof VideoFrame.TextureBuffer);
|
||||
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) frame.getBuffer();
|
||||
// TODO(mellem): Compare the matrix to whatever we expect to get back?
|
||||
assertNotNull(textureBuffer.getTransformMatrix());
|
||||
assertEquals(textureBuffer.getWidth(), SETTINGS.width);
|
||||
assertEquals(textureBuffer.getHeight(), SETTINGS.height);
|
||||
assertEquals(textureBuffer.getType(), VideoFrame.TextureBuffer.Type.OES);
|
||||
|
||||
assertEquals(decoder.release(), VideoCodecStatus.OK);
|
||||
assertEquals(encoder.release(), VideoCodecStatus.OK);
|
||||
|
||||
frame.release();
|
||||
eglBase.release();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,212 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.Matrix;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import android.util.Log;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@TargetApi(16)
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class HardwareVideoEncoderTest {
|
||||
final static String TAG = "MediaCodecVideoEncoderTest";
|
||||
|
||||
private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
|
||||
private static final boolean ENABLE_H264_HIGH_PROFILE = true;
|
||||
private static final VideoEncoder.Settings SETTINGS =
|
||||
new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */,
|
||||
30 /* fps */, true /* automaticResizeOn */);
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testInitializeUsingYuvBuffer() {
|
||||
HardwareVideoEncoderFactory factory =
|
||||
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.w(TAG, "No hardware encoding support, skipping testInitializeUsingYuvBuffer");
|
||||
return;
|
||||
}
|
||||
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
|
||||
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
|
||||
assertEquals(VideoCodecStatus.OK, encoder.release());
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testInitializeUsingTextures() {
|
||||
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
|
||||
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory(
|
||||
eglBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.w(TAG, "No hardware encoding support, skipping testInitializeUsingTextures");
|
||||
return;
|
||||
}
|
||||
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
|
||||
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
|
||||
assertEquals(VideoCodecStatus.OK, encoder.release());
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testEncodeYuvBuffer() throws InterruptedException {
|
||||
HardwareVideoEncoderFactory factory =
|
||||
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.w(TAG, "No hardware encoding support, skipping testEncodeYuvBuffer");
|
||||
return;
|
||||
}
|
||||
|
||||
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
|
||||
|
||||
final long presentationTimestampNs = 20000;
|
||||
final CountDownLatch encodeDone = new CountDownLatch(1);
|
||||
|
||||
VideoEncoder.Callback callback = new VideoEncoder.Callback() {
|
||||
@Override
|
||||
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
|
||||
assertTrue(image.buffer.capacity() > 0);
|
||||
assertEquals(image.encodedWidth, SETTINGS.width);
|
||||
assertEquals(image.encodedHeight, SETTINGS.height);
|
||||
assertEquals(image.captureTimeNs, presentationTimestampNs);
|
||||
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
|
||||
assertEquals(image.rotation, 0);
|
||||
assertTrue(image.completeFrame);
|
||||
|
||||
encodeDone.countDown();
|
||||
}
|
||||
};
|
||||
|
||||
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
|
||||
|
||||
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
|
||||
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
|
||||
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
||||
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
||||
|
||||
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
|
||||
|
||||
ThreadUtils.awaitUninterruptibly(encodeDone);
|
||||
|
||||
assertEquals(encoder.release(), VideoCodecStatus.OK);
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testEncodeTextures() throws InterruptedException {
|
||||
final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory(
|
||||
eglOesBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.w(TAG, "No hardware encoding support, skipping testEncodeTextures");
|
||||
return;
|
||||
}
|
||||
|
||||
eglOesBase.createDummyPbufferSurface();
|
||||
eglOesBase.makeCurrent();
|
||||
final int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
||||
|
||||
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
|
||||
|
||||
final long presentationTimestampNs = 20000;
|
||||
final CountDownLatch encodeDone = new CountDownLatch(1);
|
||||
|
||||
VideoEncoder.Callback callback = new VideoEncoder.Callback() {
|
||||
@Override
|
||||
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
|
||||
assertTrue(image.buffer.capacity() > 0);
|
||||
assertEquals(image.encodedWidth, SETTINGS.width);
|
||||
assertEquals(image.encodedHeight, SETTINGS.height);
|
||||
assertEquals(image.captureTimeNs, presentationTimestampNs);
|
||||
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
|
||||
assertEquals(image.rotation, 0);
|
||||
assertTrue(image.completeFrame);
|
||||
|
||||
encodeDone.countDown();
|
||||
}
|
||||
};
|
||||
|
||||
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
|
||||
|
||||
VideoFrame.TextureBuffer buffer = new VideoFrame.TextureBuffer() {
|
||||
@Override
|
||||
public VideoFrame.TextureBuffer.Type getType() {
|
||||
return VideoFrame.TextureBuffer.Type.OES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTextureId() {
|
||||
return oesTextureId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Matrix getTransformMatrix() {
|
||||
return new Matrix();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getWidth() {
|
||||
return SETTINGS.width;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getHeight() {
|
||||
return SETTINGS.height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.I420Buffer toI420() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retain() {}
|
||||
|
||||
@Override
|
||||
public void release() {}
|
||||
|
||||
@Override
|
||||
public VideoFrame.Buffer cropAndScale(
|
||||
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
|
||||
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
||||
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
||||
|
||||
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
|
||||
GlUtil.checkNoGLES2Error("encodeTexture");
|
||||
|
||||
// It should be Ok to delete the texture after calling encodeTexture.
|
||||
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
|
||||
|
||||
ThreadUtils.awaitUninterruptibly(encodeDone);
|
||||
|
||||
assertEquals(encoder.release(), VideoCodecStatus.OK);
|
||||
eglOesBase.release();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,170 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Build;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import android.util.Log;
|
||||
import java.nio.ByteBuffer;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo;
|
||||
|
||||
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class MediaCodecVideoEncoderTest {
|
||||
final static String TAG = "MediaCodecVideoEncoderTest";
|
||||
final static int profile = MediaCodecVideoEncoder.H264Profile.CONSTRAINED_BASELINE.getValue();
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testInitializeUsingByteBuffer() {
|
||||
if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
|
||||
Log.i(TAG, "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
|
||||
return;
|
||||
}
|
||||
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
||||
assertTrue(encoder.initEncode(
|
||||
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, 640, 480, 300, 30, null));
|
||||
encoder.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testInitilizeUsingTextures() {
|
||||
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
|
||||
Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
|
||||
return;
|
||||
}
|
||||
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
|
||||
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
||||
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile,
|
||||
640, 480, 300, 30, eglBase.getEglBaseContext()));
|
||||
encoder.release();
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testInitializeUsingByteBufferReInitilizeUsingTextures() {
|
||||
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
|
||||
Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
|
||||
return;
|
||||
}
|
||||
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
||||
assertTrue(encoder.initEncode(
|
||||
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, 640, 480, 300, 30, null));
|
||||
encoder.release();
|
||||
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
|
||||
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile,
|
||||
640, 480, 300, 30, eglBase.getEglBaseContext()));
|
||||
encoder.release();
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testEncoderUsingByteBuffer() throws InterruptedException {
|
||||
if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
|
||||
Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingByteBuffer");
|
||||
return;
|
||||
}
|
||||
|
||||
final int width = 640;
|
||||
final int height = 480;
|
||||
final int min_size = width * height * 3 / 2;
|
||||
final long presentationTimestampUs = 2;
|
||||
|
||||
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
||||
|
||||
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile,
|
||||
width, height, 300, 30, null));
|
||||
ByteBuffer[] inputBuffers = encoder.getInputBuffers();
|
||||
assertNotNull(inputBuffers);
|
||||
assertTrue(min_size <= inputBuffers[0].capacity());
|
||||
|
||||
int bufferIndex;
|
||||
do {
|
||||
Thread.sleep(10);
|
||||
bufferIndex = encoder.dequeueInputBuffer();
|
||||
} while (bufferIndex == -1); // |-1| is returned when there is no buffer available yet.
|
||||
|
||||
assertTrue(bufferIndex >= 0);
|
||||
assertTrue(bufferIndex < inputBuffers.length);
|
||||
assertTrue(encoder.encodeBuffer(true, bufferIndex, min_size, presentationTimestampUs));
|
||||
|
||||
OutputBufferInfo info;
|
||||
do {
|
||||
info = encoder.dequeueOutputBuffer();
|
||||
Thread.sleep(10);
|
||||
} while (info == null);
|
||||
assertTrue(info.index >= 0);
|
||||
assertEquals(presentationTimestampUs, info.presentationTimestampUs);
|
||||
assertTrue(info.buffer.capacity() > 0);
|
||||
encoder.releaseOutputBuffer(info.index);
|
||||
|
||||
encoder.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testEncoderUsingTextures() throws InterruptedException {
|
||||
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
|
||||
Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
|
||||
return;
|
||||
}
|
||||
|
||||
final int width = 640;
|
||||
final int height = 480;
|
||||
final long presentationTs = 2;
|
||||
|
||||
final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
eglOesBase.createDummyPbufferSurface();
|
||||
eglOesBase.makeCurrent();
|
||||
int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
||||
|
||||
// TODO(perkj): This test is week since we don't fill the texture with valid data with correct
|
||||
// width and height and verify the encoded data. Fill the OES texture and figure out a way to
|
||||
// verify that the output make sense.
|
||||
|
||||
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
|
||||
|
||||
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile,
|
||||
width, height, 300, 30, eglOesBase.getEglBaseContext()));
|
||||
assertTrue(
|
||||
encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(), presentationTs));
|
||||
GlUtil.checkNoGLES2Error("encodeTexture");
|
||||
|
||||
// It should be Ok to delete the texture after calling encodeTexture.
|
||||
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
|
||||
|
||||
OutputBufferInfo info = encoder.dequeueOutputBuffer();
|
||||
while (info == null) {
|
||||
info = encoder.dequeueOutputBuffer();
|
||||
Thread.sleep(20);
|
||||
}
|
||||
assertTrue(info.index != -1);
|
||||
assertTrue(info.buffer.capacity() > 0);
|
||||
assertEquals(presentationTs, info.presentationTimestampUs);
|
||||
encoder.releaseOutputBuffer(info.index);
|
||||
|
||||
encoder.release();
|
||||
eglOesBase.release();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,291 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.ConnectivityManagerDelegate;
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
|
||||
import static org.webrtc.NetworkMonitorAutoDetect.NetworkState;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.Context;
|
||||
import android.content.Intent;
|
||||
import android.net.ConnectivityManager;
|
||||
import android.net.Network;
|
||||
import android.net.wifi.WifiManager;
|
||||
import android.os.Build;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.support.test.InstrumentationRegistry;
|
||||
import android.support.test.annotation.UiThreadTest;
|
||||
import android.support.test.filters.MediumTest;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import android.support.test.rule.UiThreadTestRule;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
/**
|
||||
* Tests for org.webrtc.NetworkMonitor.
|
||||
*
|
||||
* TODO(deadbeef): These tests don't cover the interaction between
|
||||
* NetworkManager.java and androidnetworkmonitor_jni.cc, which is how this
|
||||
* class is used in practice in WebRTC.
|
||||
*/
|
||||
@SuppressLint("NewApi")
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class NetworkMonitorTest {
|
||||
@Rule public UiThreadTestRule uiThreadTestRule = new UiThreadTestRule();
|
||||
|
||||
/**
|
||||
* Listens for alerts fired by the NetworkMonitor when network status changes.
|
||||
*/
|
||||
private static class NetworkMonitorTestObserver implements NetworkMonitor.NetworkObserver {
|
||||
private boolean receivedNotification = false;
|
||||
|
||||
@Override
|
||||
public void onConnectionTypeChanged(ConnectionType connectionType) {
|
||||
receivedNotification = true;
|
||||
}
|
||||
|
||||
public boolean hasReceivedNotification() {
|
||||
return receivedNotification;
|
||||
}
|
||||
|
||||
public void resetHasReceivedNotification() {
|
||||
receivedNotification = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks out calls to the ConnectivityManager.
|
||||
*/
|
||||
private static class MockConnectivityManagerDelegate extends ConnectivityManagerDelegate {
|
||||
private boolean activeNetworkExists;
|
||||
private int networkType;
|
||||
private int networkSubtype;
|
||||
|
||||
@Override
|
||||
public NetworkState getNetworkState() {
|
||||
return new NetworkState(activeNetworkExists, networkType, networkSubtype);
|
||||
}
|
||||
|
||||
// Dummy implementations to avoid NullPointerExceptions in default implementations:
|
||||
|
||||
@Override
|
||||
public long getDefaultNetId() {
|
||||
return INVALID_NET_ID;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Network[] getAllNetworks() {
|
||||
return new Network[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public NetworkState getNetworkState(Network network) {
|
||||
return new NetworkState(false, -1, -1);
|
||||
}
|
||||
|
||||
public void setActiveNetworkExists(boolean networkExists) {
|
||||
activeNetworkExists = networkExists;
|
||||
}
|
||||
|
||||
public void setNetworkType(int networkType) {
|
||||
this.networkType = networkType;
|
||||
}
|
||||
|
||||
public void setNetworkSubtype(int networkSubtype) {
|
||||
this.networkSubtype = networkSubtype;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mocks out calls to the WifiManager.
|
||||
*/
|
||||
private static class MockWifiManagerDelegate
|
||||
extends NetworkMonitorAutoDetect.WifiManagerDelegate {
|
||||
private String wifiSSID;
|
||||
|
||||
@Override
|
||||
public String getWifiSSID() {
|
||||
return wifiSSID;
|
||||
}
|
||||
|
||||
public void setWifiSSID(String wifiSSID) {
|
||||
this.wifiSSID = wifiSSID;
|
||||
}
|
||||
}
|
||||
|
||||
// A dummy NetworkMonitorAutoDetect.Observer.
|
||||
private static class TestNetworkMonitorAutoDetectObserver
|
||||
implements NetworkMonitorAutoDetect.Observer {
|
||||
@Override
|
||||
public void onConnectionTypeChanged(ConnectionType newConnectionType) {}
|
||||
|
||||
@Override
|
||||
public void onNetworkConnect(NetworkInformation networkInfo) {}
|
||||
|
||||
@Override
|
||||
public void onNetworkDisconnect(long networkHandle) {}
|
||||
}
|
||||
|
||||
private static final Object lock = new Object();
|
||||
private static Handler uiThreadHandler = null;
|
||||
|
||||
private NetworkMonitorAutoDetect receiver;
|
||||
private MockConnectivityManagerDelegate connectivityDelegate;
|
||||
private MockWifiManagerDelegate wifiDelegate;
|
||||
|
||||
private static Handler getUiThreadHandler() {
|
||||
synchronized (lock) {
|
||||
if (uiThreadHandler == null) {
|
||||
uiThreadHandler = new Handler(Looper.getMainLooper());
|
||||
}
|
||||
return uiThreadHandler;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to create a network monitor and delegates for testing.
|
||||
*/
|
||||
private void createTestMonitor() {
|
||||
Context context = InstrumentationRegistry.getTargetContext();
|
||||
NetworkMonitor.resetInstanceForTests();
|
||||
NetworkMonitor.createAutoDetectorForTest();
|
||||
receiver = NetworkMonitor.getAutoDetectorForTest();
|
||||
assertNotNull(receiver);
|
||||
|
||||
connectivityDelegate = new MockConnectivityManagerDelegate();
|
||||
connectivityDelegate.setActiveNetworkExists(true);
|
||||
receiver.setConnectivityManagerDelegateForTests(connectivityDelegate);
|
||||
|
||||
wifiDelegate = new MockWifiManagerDelegate();
|
||||
receiver.setWifiManagerDelegateForTests(wifiDelegate);
|
||||
wifiDelegate.setWifiSSID("foo");
|
||||
}
|
||||
|
||||
private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
|
||||
final NetworkMonitorAutoDetect.NetworkState networkState = receiver.getCurrentNetworkState();
|
||||
return receiver.getConnectionType(networkState);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
ContextUtils.initialize(InstrumentationRegistry.getTargetContext());
|
||||
createTestMonitor();
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that the receiver registers for connectivity intents during construction.
|
||||
*/
|
||||
@Test
|
||||
@UiThreadTest
|
||||
@SmallTest
|
||||
public void testNetworkMonitorRegistersInConstructor() throws InterruptedException {
|
||||
Context context = InstrumentationRegistry.getTargetContext();
|
||||
|
||||
NetworkMonitorAutoDetect.Observer observer = new TestNetworkMonitorAutoDetectObserver();
|
||||
|
||||
NetworkMonitorAutoDetect receiver = new NetworkMonitorAutoDetect(observer, context);
|
||||
|
||||
assertTrue(receiver.isReceiverRegisteredForTesting());
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that when there is an intent indicating a change in network connectivity, it sends a
|
||||
* notification to Java observers.
|
||||
*/
|
||||
@Test
|
||||
@UiThreadTest
|
||||
@MediumTest
|
||||
public void testNetworkMonitorJavaObservers() throws InterruptedException {
|
||||
// Initialize the NetworkMonitor with a connection.
|
||||
Intent connectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
|
||||
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
|
||||
|
||||
// We shouldn't be re-notified if the connection hasn't actually changed.
|
||||
NetworkMonitorTestObserver observer = new NetworkMonitorTestObserver();
|
||||
NetworkMonitor.addNetworkObserver(observer);
|
||||
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
|
||||
assertFalse(observer.hasReceivedNotification());
|
||||
|
||||
// We shouldn't be notified if we're connected to non-Wifi and the Wifi SSID changes.
|
||||
wifiDelegate.setWifiSSID("bar");
|
||||
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
|
||||
assertFalse(observer.hasReceivedNotification());
|
||||
|
||||
// We should be notified when we change to Wifi.
|
||||
connectivityDelegate.setNetworkType(ConnectivityManager.TYPE_WIFI);
|
||||
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
|
||||
assertTrue(observer.hasReceivedNotification());
|
||||
observer.resetHasReceivedNotification();
|
||||
|
||||
// We should be notified when the Wifi SSID changes.
|
||||
wifiDelegate.setWifiSSID("foo");
|
||||
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
|
||||
assertTrue(observer.hasReceivedNotification());
|
||||
observer.resetHasReceivedNotification();
|
||||
|
||||
// We shouldn't be re-notified if the Wifi SSID hasn't actually changed.
|
||||
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
|
||||
assertFalse(observer.hasReceivedNotification());
|
||||
|
||||
// Mimic that connectivity has been lost and ensure that the observer gets the notification.
|
||||
connectivityDelegate.setActiveNetworkExists(false);
|
||||
Intent noConnectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
|
||||
receiver.onReceive(InstrumentationRegistry.getTargetContext(), noConnectivityIntent);
|
||||
assertTrue(observer.hasReceivedNotification());
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that ConnectivityManagerDelegate doesn't crash. This test cannot rely on having any
|
||||
* active network connections so it cannot usefully check results, but it can at least check
|
||||
* that the functions don't crash.
|
||||
*/
|
||||
@Test
|
||||
@UiThreadTest
|
||||
@SmallTest
|
||||
public void testConnectivityManagerDelegateDoesNotCrash() {
|
||||
ConnectivityManagerDelegate delegate =
|
||||
new ConnectivityManagerDelegate(InstrumentationRegistry.getTargetContext());
|
||||
delegate.getNetworkState();
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
|
||||
Network[] networks = delegate.getAllNetworks();
|
||||
if (networks.length >= 1) {
|
||||
delegate.getNetworkState(networks[0]);
|
||||
delegate.hasInternetCapability(networks[0]);
|
||||
}
|
||||
delegate.getDefaultNetId();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests that NetworkMonitorAutoDetect queryable APIs don't crash. This test cannot rely
|
||||
* on having any active network connections so it cannot usefully check results, but it can at
|
||||
* least check that the functions don't crash.
|
||||
*/
|
||||
@Test
|
||||
@UiThreadTest
|
||||
@SmallTest
|
||||
public void testQueryableAPIsDoNotCrash() {
|
||||
NetworkMonitorAutoDetect.Observer observer = new TestNetworkMonitorAutoDetectObserver();
|
||||
NetworkMonitorAutoDetect ncn =
|
||||
new NetworkMonitorAutoDetect(observer, InstrumentationRegistry.getTargetContext());
|
||||
ncn.getDefaultNetId();
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,200 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.webrtc.RendererCommon.ScalingType.*;
|
||||
import static org.webrtc.RendererCommon.getDisplaySize;
|
||||
import static org.webrtc.RendererCommon.getLayoutMatrix;
|
||||
import static org.webrtc.RendererCommon.rotateTextureMatrix;
|
||||
|
||||
import android.graphics.Point;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class RendererCommonTest {
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testDisplaySizeNoFrame() {
|
||||
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 0, 0));
|
||||
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 0, 0));
|
||||
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 0, 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testDisplaySizeDegenerateAspectRatio() {
|
||||
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 1280, 720));
|
||||
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 1280, 720));
|
||||
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 1280, 720));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testZeroDisplaySize() {
|
||||
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 0, 0));
|
||||
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 0, 0));
|
||||
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 0, 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testDisplaySizePerfectFit() {
|
||||
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 1280, 720));
|
||||
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 1280, 720));
|
||||
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 1280, 720));
|
||||
assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 720, 1280));
|
||||
assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 720, 1280));
|
||||
assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 720, 1280));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testLandscapeVideoInPortraitDisplay() {
|
||||
assertEquals(new Point(720, 405), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 720, 1280));
|
||||
assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 720, 1280));
|
||||
assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 720, 1280));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testPortraitVideoInLandscapeDisplay() {
|
||||
assertEquals(new Point(405, 720), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 1280, 720));
|
||||
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 1280, 720));
|
||||
assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 1280, 720));
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testFourToThreeVideoInSixteenToNineDisplay() {
|
||||
assertEquals(new Point(960, 720), getDisplaySize(SCALE_ASPECT_FIT, 4.0f / 3, 1280, 720));
|
||||
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 4.0f / 3, 1280, 720));
|
||||
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 4.0f / 3, 1280, 720));
|
||||
}
|
||||
|
||||
// Only keep 2 rounded decimals to make float comparison robust.
|
||||
private static double[] round(float[] array) {
|
||||
assertEquals(16, array.length);
|
||||
final double[] doubleArray = new double[16];
|
||||
for (int i = 0; i < 16; ++i) {
|
||||
doubleArray[i] = Math.round(100 * array[i]) / 100.0;
|
||||
}
|
||||
return doubleArray;
|
||||
}
|
||||
|
||||
// Brief summary about matrix transformations:
|
||||
// A coordinate p = [u, v, 0, 1] is transformed by matrix m like this p' = [u', v', 0, 1] = m * p.
|
||||
// OpenGL uses column-major order, so:
|
||||
// u' = u * m[0] + v * m[4] + m[12].
|
||||
// v' = u * m[1] + v * m[5] + m[13].
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testLayoutMatrixDefault() {
|
||||
final float layoutMatrix[] = getLayoutMatrix(false, 1.0f, 1.0f);
|
||||
// Assert:
|
||||
// u' = u.
|
||||
// v' = v.
|
||||
// clang-format off
|
||||
assertArrayEquals(new double[] {
|
||||
1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0, 0, 0, 1}, round(layoutMatrix), 0.0);
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testLayoutMatrixMirror() {
|
||||
final float layoutMatrix[] = getLayoutMatrix(true, 1.0f, 1.0f);
|
||||
// Assert:
|
||||
// u' = 1 - u.
|
||||
// v' = v.
|
||||
// clang-format off
|
||||
assertArrayEquals(new double[] {
|
||||
-1, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
1, 0, 0, 1}, round(layoutMatrix), 0.0);
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testLayoutMatrixScale() {
|
||||
// Video has aspect ratio 2, but layout is square. This will cause only the center part of the
|
||||
// video to be visible, i.e. the u coordinate will go from 0.25 to 0.75 instead of from 0 to 1.
|
||||
final float layoutMatrix[] = getLayoutMatrix(false, 2.0f, 1.0f);
|
||||
// Assert:
|
||||
// u' = 0.25 + 0.5 u.
|
||||
// v' = v.
|
||||
// clang-format off
|
||||
assertArrayEquals(new double[] {
|
||||
0.5, 0, 0, 0,
|
||||
0, 1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
0.25, 0, 0, 1}, round(layoutMatrix), 0.0);
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testRotateTextureMatrixDefault() {
|
||||
// Test that rotation with 0 degrees returns an identical matrix.
|
||||
// clang-format off
|
||||
final float[] matrix = new float[] {
|
||||
1, 2, 3, 4,
|
||||
5, 6, 7, 8,
|
||||
9, 0, 1, 2,
|
||||
3, 4, 5, 6
|
||||
};
|
||||
// clang-format on
|
||||
final float rotatedMatrix[] = rotateTextureMatrix(matrix, 0);
|
||||
assertArrayEquals(round(matrix), round(rotatedMatrix), 0.0);
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testRotateTextureMatrix90Deg() {
|
||||
final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 90);
|
||||
// Assert:
|
||||
// u' = 1 - v.
|
||||
// v' = u.
|
||||
// clang-format off
|
||||
assertArrayEquals(new double[] {
|
||||
0, 1, 0, 0,
|
||||
-1, 0, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
1, 0, 0, 1}, round(samplingMatrix), 0.0);
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testRotateTextureMatrix180Deg() {
|
||||
final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 180);
|
||||
// Assert:
|
||||
// u' = 1 - u.
|
||||
// v' = 1 - v.
|
||||
// clang-format off
|
||||
assertArrayEquals(new double[] {
|
||||
-1, 0, 0, 0,
|
||||
0, -1, 0, 0,
|
||||
0, 0, 1, 0,
|
||||
1, 1, 0, 1}, round(samplingMatrix), 0.0);
|
||||
// clang-format on
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,506 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.SystemClock;
|
||||
import android.support.test.filters.MediumTest;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class SurfaceTextureHelperTest {
|
||||
/**
|
||||
* Mock texture listener with blocking wait functionality.
|
||||
*/
|
||||
public static final class MockTextureListener
|
||||
implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
|
||||
public int oesTextureId;
|
||||
public float[] transformMatrix;
|
||||
private boolean hasNewFrame = false;
|
||||
// Thread where frames are expected to be received on.
|
||||
private final Thread expectedThread;
|
||||
|
||||
MockTextureListener() {
|
||||
this.expectedThread = null;
|
||||
}
|
||||
|
||||
MockTextureListener(Thread expectedThread) {
|
||||
this.expectedThread = expectedThread;
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
if (expectedThread != null && Thread.currentThread() != expectedThread) {
|
||||
throw new IllegalStateException("onTextureFrameAvailable called on wrong thread.");
|
||||
}
|
||||
this.oesTextureId = oesTextureId;
|
||||
this.transformMatrix = transformMatrix;
|
||||
hasNewFrame = true;
|
||||
notifyAll();
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait indefinitely for a new frame.
|
||||
*/
|
||||
public synchronized void waitForNewFrame() throws InterruptedException {
|
||||
while (!hasNewFrame) {
|
||||
wait();
|
||||
}
|
||||
hasNewFrame = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for a new frame, or until the specified timeout elapses. Returns true if a new frame was
|
||||
* received before the timeout.
|
||||
*/
|
||||
public synchronized boolean waitForNewFrame(final long timeoutMs) throws InterruptedException {
|
||||
final long startTimeMs = SystemClock.elapsedRealtime();
|
||||
long timeRemainingMs = timeoutMs;
|
||||
while (!hasNewFrame && timeRemainingMs > 0) {
|
||||
wait(timeRemainingMs);
|
||||
final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
|
||||
timeRemainingMs = timeoutMs - elapsedTimeMs;
|
||||
}
|
||||
final boolean didReceiveFrame = hasNewFrame;
|
||||
hasNewFrame = false;
|
||||
return didReceiveFrame;
|
||||
}
|
||||
}
|
||||
|
||||
/** Assert that two integers are close, with difference at most
|
||||
* {@code threshold}. */
|
||||
public static void assertClose(int threshold, int expected, int actual) {
|
||||
if (Math.abs(expected - actual) <= threshold)
|
||||
return;
|
||||
fail("Not close enough, threshold " + threshold + ". Expected: " + expected + " Actual: "
|
||||
+ actual);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test normal use by receiving three uniform texture frames. Texture frames are returned as early
|
||||
* as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
|
||||
* buffer and reading it back with glReadPixels().
|
||||
*/
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testThreeConstantColorFrames() throws InterruptedException {
|
||||
final int width = 16;
|
||||
final int height = 16;
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(width, height);
|
||||
final GlRectDrawer drawer = new GlRectDrawer();
|
||||
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
|
||||
|
||||
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
|
||||
// |surfaceTextureHelper| as the target EGLSurface.
|
||||
final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
|
||||
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
assertEquals(eglOesBase.surfaceWidth(), width);
|
||||
assertEquals(eglOesBase.surfaceHeight(), height);
|
||||
|
||||
final int red[] = new int[] {79, 144, 185};
|
||||
final int green[] = new int[] {66, 210, 162};
|
||||
final int blue[] = new int[] {161, 117, 158};
|
||||
// Draw three frames.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
// Draw a constant color frame onto the SurfaceTexture.
|
||||
eglOesBase.makeCurrent();
|
||||
GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
|
||||
eglOesBase.swapBuffers();
|
||||
|
||||
// Wait for an OES texture to arrive and draw it onto the pixel buffer.
|
||||
listener.waitForNewFrame();
|
||||
eglBase.makeCurrent();
|
||||
drawer.drawOes(
|
||||
listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
|
||||
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
|
||||
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g.
|
||||
// Nexus 9.
|
||||
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
|
||||
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
|
||||
GlUtil.checkNoGLES2Error("glReadPixels");
|
||||
|
||||
// Assert rendered image is expected constant color.
|
||||
while (rgbaData.hasRemaining()) {
|
||||
assertEquals(rgbaData.get() & 0xFF, red[i]);
|
||||
assertEquals(rgbaData.get() & 0xFF, green[i]);
|
||||
assertEquals(rgbaData.get() & 0xFF, blue[i]);
|
||||
assertEquals(rgbaData.get() & 0xFF, 255);
|
||||
}
|
||||
}
|
||||
|
||||
drawer.release();
|
||||
surfaceTextureHelper.dispose();
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test disposing the SurfaceTextureHelper while holding a pending texture frame. The pending
|
||||
* texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
|
||||
* buffer and reading it back with glReadPixels().
|
||||
*/
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testLateReturnFrame() throws InterruptedException {
|
||||
final int width = 16;
|
||||
final int height = 16;
|
||||
// Create EGL base with a pixel buffer as display output.
|
||||
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
eglBase.createPbufferSurface(width, height);
|
||||
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
|
||||
|
||||
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
|
||||
// |surfaceTextureHelper| as the target EGLSurface.
|
||||
final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
|
||||
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
assertEquals(eglOesBase.surfaceWidth(), width);
|
||||
assertEquals(eglOesBase.surfaceHeight(), height);
|
||||
|
||||
final int red = 79;
|
||||
final int green = 66;
|
||||
final int blue = 161;
|
||||
// Draw a constant color frame onto the SurfaceTexture.
|
||||
eglOesBase.makeCurrent();
|
||||
GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
|
||||
eglOesBase.swapBuffers();
|
||||
eglOesBase.release();
|
||||
|
||||
// Wait for OES texture frame.
|
||||
listener.waitForNewFrame();
|
||||
// Diconnect while holding the frame.
|
||||
surfaceTextureHelper.dispose();
|
||||
|
||||
// Draw the pending texture frame onto the pixel buffer.
|
||||
eglBase.makeCurrent();
|
||||
final GlRectDrawer drawer = new GlRectDrawer();
|
||||
drawer.drawOes(
|
||||
listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
|
||||
drawer.release();
|
||||
|
||||
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
|
||||
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
|
||||
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
|
||||
GlUtil.checkNoGLES2Error("glReadPixels");
|
||||
eglBase.release();
|
||||
|
||||
// Assert rendered image is expected constant color.
|
||||
while (rgbaData.hasRemaining()) {
|
||||
assertEquals(rgbaData.get() & 0xFF, red);
|
||||
assertEquals(rgbaData.get() & 0xFF, green);
|
||||
assertEquals(rgbaData.get() & 0xFF, blue);
|
||||
assertEquals(rgbaData.get() & 0xFF, 255);
|
||||
}
|
||||
// Late frame return after everything has been disposed and released.
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test disposing the SurfaceTextureHelper, but keep trying to produce more texture frames. No
|
||||
* frames should be delivered to the listener.
|
||||
*/
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testDispose() throws InterruptedException {
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
// Create EglBase with the SurfaceTexture as target EGLSurface.
|
||||
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
|
||||
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
eglBase.makeCurrent();
|
||||
// Assert no frame has been received yet.
|
||||
assertFalse(listener.waitForNewFrame(1));
|
||||
// Draw and wait for one frame.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
|
||||
eglBase.swapBuffers();
|
||||
listener.waitForNewFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
|
||||
// Dispose - we should not receive any textures after this.
|
||||
surfaceTextureHelper.dispose();
|
||||
|
||||
// Draw one frame.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
eglBase.swapBuffers();
|
||||
// swapBuffers() should not trigger onTextureFrameAvailable() because disposed has been called.
|
||||
// Assert that no OES texture was delivered.
|
||||
assertFalse(listener.waitForNewFrame(500));
|
||||
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test disposing the SurfaceTextureHelper immediately after is has been setup to use a
|
||||
* shared context. No frames should be delivered to the listener.
|
||||
*/
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testDisposeImmediately() {
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
/**
|
||||
* Call stopListening(), but keep trying to produce more texture frames. No frames should be
|
||||
* delivered to the listener.
|
||||
*/
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testStopListening() throws InterruptedException {
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
// Create EglBase with the SurfaceTexture as target EGLSurface.
|
||||
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
|
||||
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
eglBase.makeCurrent();
|
||||
// Assert no frame has been received yet.
|
||||
assertFalse(listener.waitForNewFrame(1));
|
||||
// Draw and wait for one frame.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
|
||||
eglBase.swapBuffers();
|
||||
listener.waitForNewFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
|
||||
// Stop listening - we should not receive any textures after this.
|
||||
surfaceTextureHelper.stopListening();
|
||||
|
||||
// Draw one frame.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
eglBase.swapBuffers();
|
||||
// swapBuffers() should not trigger onTextureFrameAvailable() because disposed has been called.
|
||||
// Assert that no OES texture was delivered.
|
||||
assertFalse(listener.waitForNewFrame(500));
|
||||
|
||||
surfaceTextureHelper.dispose();
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test stopListening() immediately after the SurfaceTextureHelper has been setup.
|
||||
*/
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testStopListeningImmediately() throws InterruptedException {
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
surfaceTextureHelper.stopListening();
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test stopListening() immediately after the SurfaceTextureHelper has been setup on the handler
|
||||
* thread.
|
||||
*/
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testStopListeningImmediatelyOnHandlerThread() throws InterruptedException {
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
|
||||
final CountDownLatch stopListeningBarrier = new CountDownLatch(1);
|
||||
final CountDownLatch stopListeningBarrierDone = new CountDownLatch(1);
|
||||
// Start by posting to the handler thread to keep it occupied.
|
||||
surfaceTextureHelper.getHandler().post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
ThreadUtils.awaitUninterruptibly(stopListeningBarrier);
|
||||
surfaceTextureHelper.stopListening();
|
||||
stopListeningBarrierDone.countDown();
|
||||
}
|
||||
});
|
||||
|
||||
// startListening() is asynchronous and will post to the occupied handler thread.
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
// Wait for stopListening() to be called on the handler thread.
|
||||
stopListeningBarrier.countDown();
|
||||
stopListeningBarrierDone.await();
|
||||
// Wait until handler thread is idle to try to catch late startListening() call.
|
||||
final CountDownLatch barrier = new CountDownLatch(1);
|
||||
surfaceTextureHelper.getHandler().post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
barrier.countDown();
|
||||
}
|
||||
});
|
||||
ThreadUtils.awaitUninterruptibly(barrier);
|
||||
// Previous startListening() call should never have taken place and it should be ok to call it
|
||||
// again.
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test calling startListening() with a new listener after stopListening() has been called.
|
||||
*/
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testRestartListeningWithNewListener() throws InterruptedException {
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
|
||||
final MockTextureListener listener1 = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener1);
|
||||
// Create EglBase with the SurfaceTexture as target EGLSurface.
|
||||
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
|
||||
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
eglBase.makeCurrent();
|
||||
// Assert no frame has been received yet.
|
||||
assertFalse(listener1.waitForNewFrame(1));
|
||||
// Draw and wait for one frame.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
|
||||
eglBase.swapBuffers();
|
||||
listener1.waitForNewFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
|
||||
// Stop listening - |listener1| should not receive any textures after this.
|
||||
surfaceTextureHelper.stopListening();
|
||||
|
||||
// Connect different listener.
|
||||
final MockTextureListener listener2 = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener2);
|
||||
// Assert no frame has been received yet.
|
||||
assertFalse(listener2.waitForNewFrame(1));
|
||||
|
||||
// Draw one frame.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
eglBase.swapBuffers();
|
||||
|
||||
// Check that |listener2| received the frame, and not |listener1|.
|
||||
listener2.waitForNewFrame();
|
||||
assertFalse(listener1.waitForNewFrame(1));
|
||||
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
|
||||
surfaceTextureHelper.dispose();
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
@MediumTest
|
||||
public void testTexturetoYUV() throws InterruptedException {
|
||||
final int width = 16;
|
||||
final int height = 16;
|
||||
|
||||
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
|
||||
|
||||
// Create SurfaceTextureHelper and listener.
|
||||
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
|
||||
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
|
||||
final MockTextureListener listener = new MockTextureListener();
|
||||
surfaceTextureHelper.startListening(listener);
|
||||
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
|
||||
|
||||
// Create resources for stubbing an OES texture producer. |eglBase| has the SurfaceTexture in
|
||||
// |surfaceTextureHelper| as the target EGLSurface.
|
||||
|
||||
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
|
||||
assertEquals(eglBase.surfaceWidth(), width);
|
||||
assertEquals(eglBase.surfaceHeight(), height);
|
||||
|
||||
final int red[] = new int[] {79, 144, 185};
|
||||
final int green[] = new int[] {66, 210, 162};
|
||||
final int blue[] = new int[] {161, 117, 158};
|
||||
|
||||
final int ref_y[] = new int[] {81, 180, 168};
|
||||
final int ref_u[] = new int[] {173, 93, 122};
|
||||
final int ref_v[] = new int[] {127, 103, 140};
|
||||
|
||||
// Draw three frames.
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
// Draw a constant color frame onto the SurfaceTexture.
|
||||
eglBase.makeCurrent();
|
||||
GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
|
||||
eglBase.swapBuffers();
|
||||
|
||||
// Wait for an OES texture to arrive.
|
||||
listener.waitForNewFrame();
|
||||
|
||||
// Memory layout: Lines are 16 bytes. First 16 lines are
|
||||
// the Y data. These are followed by 8 lines with 8 bytes of U
|
||||
// data on the left and 8 bytes of V data on the right.
|
||||
//
|
||||
// Offset
|
||||
// 0 YYYYYYYY YYYYYYYY
|
||||
// 16 YYYYYYYY YYYYYYYY
|
||||
// ...
|
||||
// 240 YYYYYYYY YYYYYYYY
|
||||
// 256 UUUUUUUU VVVVVVVV
|
||||
// 272 UUUUUUUU VVVVVVVV
|
||||
// ...
|
||||
// 368 UUUUUUUU VVVVVVVV
|
||||
// 384 buffer end
|
||||
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2);
|
||||
surfaceTextureHelper.textureToYUV(
|
||||
buffer, width, height, width, listener.oesTextureId, listener.transformMatrix);
|
||||
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
|
||||
// Allow off-by-one differences due to different rounding.
|
||||
while (buffer.position() < width * height) {
|
||||
assertClose(1, buffer.get() & 0xff, ref_y[i]);
|
||||
}
|
||||
while (buffer.hasRemaining()) {
|
||||
if (buffer.position() % width < width / 2)
|
||||
assertClose(1, buffer.get() & 0xff, ref_u[i]);
|
||||
else
|
||||
assertClose(1, buffer.get() & 0xff, ref_v[i]);
|
||||
}
|
||||
}
|
||||
|
||||
surfaceTextureHelper.dispose();
|
||||
eglBase.release();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,235 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.graphics.Point;
|
||||
import android.support.test.InstrumentationRegistry;
|
||||
import android.support.test.annotation.UiThreadTest;
|
||||
import android.support.test.filters.MediumTest;
|
||||
import android.support.test.rule.UiThreadTestRule;
|
||||
import android.view.View.MeasureSpec;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class SurfaceViewRendererOnMeasureTest {
|
||||
@Rule public UiThreadTestRule uiThreadTestRule = new UiThreadTestRule();
|
||||
|
||||
/**
|
||||
* List with all possible scaling types.
|
||||
*/
|
||||
private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList(
|
||||
RendererCommon.ScalingType.SCALE_ASPECT_FIT, RendererCommon.ScalingType.SCALE_ASPECT_FILL,
|
||||
RendererCommon.ScalingType.SCALE_ASPECT_BALANCED);
|
||||
|
||||
/**
|
||||
* List with MeasureSpec modes.
|
||||
*/
|
||||
private static final List<Integer> measureSpecModes =
|
||||
Arrays.asList(MeasureSpec.EXACTLY, MeasureSpec.AT_MOST);
|
||||
|
||||
/**
|
||||
* Returns a dummy YUV frame.
|
||||
*/
|
||||
static VideoRenderer.I420Frame createFrame(int width, int height, int rotationDegree) {
|
||||
final int[] yuvStrides = new int[] {width, (width + 1) / 2, (width + 1) / 2};
|
||||
final int[] yuvHeights = new int[] {height, (height + 1) / 2, (height + 1) / 2};
|
||||
final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
|
||||
for (int i = 0; i < 3; ++i) {
|
||||
yuvPlanes[i] = ByteBuffer.allocateDirect(yuvStrides[i] * yuvHeights[i]);
|
||||
}
|
||||
return new VideoRenderer.I420Frame(width, height, rotationDegree, yuvStrides, yuvPlanes, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert onMeasure() with given parameters will result in expected measured size.
|
||||
*/
|
||||
@SuppressLint("WrongCall")
|
||||
private static void assertMeasuredSize(SurfaceViewRenderer surfaceViewRenderer,
|
||||
RendererCommon.ScalingType scalingType, String frameDimensions, int expectedWidth,
|
||||
int expectedHeight, int widthSpec, int heightSpec) {
|
||||
surfaceViewRenderer.setScalingType(scalingType);
|
||||
surfaceViewRenderer.onMeasure(widthSpec, heightSpec);
|
||||
final int measuredWidth = surfaceViewRenderer.getMeasuredWidth();
|
||||
final int measuredHeight = surfaceViewRenderer.getMeasuredHeight();
|
||||
if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) {
|
||||
fail("onMeasure(" + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec)
|
||||
+ ")"
|
||||
+ " with scaling type " + scalingType + " and frame: " + frameDimensions
|
||||
+ " expected measured size " + expectedWidth + "x" + expectedHeight + ", but was "
|
||||
+ measuredWidth + "x" + measuredHeight);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test how SurfaceViewRenderer.onMeasure() behaves when no frame has been delivered.
|
||||
*/
|
||||
@Test
|
||||
@UiThreadTest
|
||||
@MediumTest
|
||||
public void testNoFrame() {
|
||||
final SurfaceViewRenderer surfaceViewRenderer =
|
||||
new SurfaceViewRenderer(InstrumentationRegistry.getContext());
|
||||
final String frameDimensions = "null";
|
||||
|
||||
// Test behaviour before SurfaceViewRenderer.init() is called.
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
for (int measureSpecMode : measureSpecModes) {
|
||||
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
|
||||
zeroMeasureSize);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
|
||||
MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
|
||||
MeasureSpec.makeMeasureSpec(720, measureSpecMode));
|
||||
}
|
||||
}
|
||||
|
||||
// Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
|
||||
surfaceViewRenderer.init((EglBase.Context) null, null);
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
for (int measureSpecMode : measureSpecModes) {
|
||||
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
|
||||
zeroMeasureSize);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
|
||||
MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
|
||||
MeasureSpec.makeMeasureSpec(720, measureSpecMode));
|
||||
}
|
||||
}
|
||||
|
||||
surfaceViewRenderer.release();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test how SurfaceViewRenderer.onMeasure() behaves with a 1280x720 frame.
|
||||
*/
|
||||
@Test
|
||||
@UiThreadTest
|
||||
@MediumTest
|
||||
public void testFrame1280x720() throws InterruptedException {
|
||||
final SurfaceViewRenderer surfaceViewRenderer =
|
||||
new SurfaceViewRenderer(InstrumentationRegistry.getContext());
|
||||
/**
|
||||
* Mock renderer events with blocking wait functionality for frame size changes.
|
||||
*/
|
||||
class MockRendererEvents implements RendererCommon.RendererEvents {
|
||||
private int frameWidth;
|
||||
private int frameHeight;
|
||||
private int rotation;
|
||||
|
||||
public synchronized void waitForFrameSize(int frameWidth, int frameHeight, int rotation)
|
||||
throws InterruptedException {
|
||||
while (this.frameWidth != frameWidth || this.frameHeight != frameHeight
|
||||
|| this.rotation != rotation) {
|
||||
wait();
|
||||
}
|
||||
}
|
||||
|
||||
public void onFirstFrameRendered() {}
|
||||
|
||||
public synchronized void onFrameResolutionChanged(
|
||||
int frameWidth, int frameHeight, int rotation) {
|
||||
this.frameWidth = frameWidth;
|
||||
this.frameHeight = frameHeight;
|
||||
this.rotation = rotation;
|
||||
notifyAll();
|
||||
}
|
||||
}
|
||||
final MockRendererEvents rendererEvents = new MockRendererEvents();
|
||||
surfaceViewRenderer.init((EglBase.Context) null, rendererEvents);
|
||||
|
||||
// Test different rotation degress, but same rotated size.
|
||||
for (int rotationDegree : new int[] {0, 90, 180, 270}) {
|
||||
final int rotatedWidth = 1280;
|
||||
final int rotatedHeight = 720;
|
||||
final int unrotatedWidth = (rotationDegree % 180 == 0 ? rotatedWidth : rotatedHeight);
|
||||
final int unrotatedHeight = (rotationDegree % 180 == 0 ? rotatedHeight : rotatedWidth);
|
||||
final VideoRenderer.I420Frame frame =
|
||||
createFrame(unrotatedWidth, unrotatedHeight, rotationDegree);
|
||||
assertEquals(rotatedWidth, frame.rotatedWidth());
|
||||
assertEquals(rotatedHeight, frame.rotatedHeight());
|
||||
final String frameDimensions =
|
||||
unrotatedWidth + "x" + unrotatedHeight + " with rotation " + rotationDegree;
|
||||
surfaceViewRenderer.renderFrame(frame);
|
||||
rendererEvents.waitForFrameSize(unrotatedWidth, unrotatedHeight, rotationDegree);
|
||||
|
||||
// Test forcing to zero size.
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
for (int measureSpecMode : measureSpecModes) {
|
||||
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0,
|
||||
zeroMeasureSize, zeroMeasureSize);
|
||||
}
|
||||
}
|
||||
|
||||
// Test perfect fit.
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
for (int measureSpecMode : measureSpecModes) {
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, rotatedWidth,
|
||||
rotatedHeight, MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
|
||||
MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode));
|
||||
}
|
||||
}
|
||||
|
||||
// Force spec size with different aspect ratio than frame aspect ratio.
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 720, 1280,
|
||||
MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY),
|
||||
MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY));
|
||||
}
|
||||
|
||||
final float videoAspectRatio = (float) rotatedWidth / rotatedHeight;
|
||||
{
|
||||
// Relax both width and height constraints.
|
||||
final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
|
||||
final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
final Point expectedSize =
|
||||
RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
|
||||
expectedSize.y, widthSpec, heightSpec);
|
||||
}
|
||||
}
|
||||
{
|
||||
// Force width to 720, but relax height constraint. This will give the same result as
|
||||
// above, because width is already the limiting factor and will be maxed out.
|
||||
final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY);
|
||||
final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
final Point expectedSize =
|
||||
RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
|
||||
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
|
||||
expectedSize.y, widthSpec, heightSpec);
|
||||
}
|
||||
}
|
||||
{
|
||||
// Force height, but relax width constraint. This will force a bad layout size.
|
||||
final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
|
||||
final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY);
|
||||
for (RendererCommon.ScalingType scalingType : scalingTypes) {
|
||||
assertMeasuredSize(
|
||||
surfaceViewRenderer, scalingType, frameDimensions, 720, 1280, widthSpec, heightSpec);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
surfaceViewRenderer.release();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,85 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import android.os.Environment;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.RandomAccessFile;
|
||||
import java.lang.Thread;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.Charset;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Random;
|
||||
import org.chromium.base.test.BaseJUnit4ClassRunner;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
@RunWith(BaseJUnit4ClassRunner.class)
|
||||
public class VideoFileRendererTest {
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testYuvRenderingToFile() throws InterruptedException, IOException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
|
||||
+ "/chromium_tests_root/testvideoout.y4m";
|
||||
int frameWidth = 4;
|
||||
int frameHeight = 4;
|
||||
VideoFileRenderer videoFileRenderer =
|
||||
new VideoFileRenderer(videoOutPath, frameWidth, frameHeight, eglBase.getEglBaseContext());
|
||||
|
||||
String[] frames = {
|
||||
"THIS IS JUST SOME TEXT x", "THE SECOND FRAME qwerty.", "HERE IS THE THRID FRAME!"};
|
||||
|
||||
for (String frameStr : frames) {
|
||||
int[] planeSizes = {
|
||||
frameWidth * frameWidth, frameWidth * frameHeight / 4, frameWidth * frameHeight / 4};
|
||||
|
||||
byte[] frameBytes = frameStr.getBytes(Charset.forName("US-ASCII"));
|
||||
ByteBuffer[] yuvPlanes = new ByteBuffer[3];
|
||||
int pos = 0;
|
||||
for (int i = 0; i < 3; i++) {
|
||||
yuvPlanes[i] = ByteBuffer.allocateDirect(planeSizes[i]);
|
||||
yuvPlanes[i].put(frameBytes, pos, planeSizes[i]);
|
||||
pos += planeSizes[i];
|
||||
}
|
||||
|
||||
int[] yuvStrides = {frameWidth, frameWidth / 2, frameWidth / 2};
|
||||
|
||||
VideoRenderer.I420Frame frame =
|
||||
new VideoRenderer.I420Frame(frameWidth, frameHeight, 0, yuvStrides, yuvPlanes, 0);
|
||||
|
||||
videoFileRenderer.renderFrame(frame);
|
||||
}
|
||||
videoFileRenderer.release();
|
||||
|
||||
RandomAccessFile writtenFile = new RandomAccessFile(videoOutPath, "r");
|
||||
try {
|
||||
int length = (int) writtenFile.length();
|
||||
byte[] data = new byte[length];
|
||||
writtenFile.readFully(data);
|
||||
String fileContent = new String(data, Charset.forName("US-ASCII"));
|
||||
String expected = "YUV4MPEG2 C420 W4 H4 Ip F30:1 A1:1\n"
|
||||
+ "FRAME\n"
|
||||
+ "THIS IS JUST SOME TEXT xFRAME\n"
|
||||
+ "THE SECOND FRAME qwerty.FRAME\n"
|
||||
+ "HERE IS THE THRID FRAME!";
|
||||
assertEquals(expected, fileContent);
|
||||
} finally {
|
||||
writtenFile.close();
|
||||
}
|
||||
|
||||
new File(videoOutPath).delete();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.support.test.InstrumentationRegistry;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import android.support.test.runner.AndroidJUnit4;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.webrtc.PeerConnectionFactory;
|
||||
|
||||
// This test is intended to run on ARM and catch LoadLibrary errors when we load the WebRTC
|
||||
// JNI. It can't really be setting up calls since ARM emulators are too slow, but instantiating
|
||||
// a peer connection isn't timing-sensitive, so we can at least do that.
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class WebRtcJniBootTest {
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testJniLoadsWithoutError() throws InterruptedException {
|
||||
PeerConnectionFactory.initializeAndroidGlobals(InstrumentationRegistry.getTargetContext(),
|
||||
false /* videoCodecHwAcceleration */);
|
||||
|
||||
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
|
||||
new PeerConnectionFactory(options);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,5 @@
|
||||
YUV4MPEG2 C420 W4 H4 Ip F30:1 A1:1
|
||||
FRAME
|
||||
THIS IS JUST SOME TEXT xFRAME
|
||||
THE SECOND FRAME qwerty.FRAME
|
||||
HERE IS THE THRID FRAME!
|
||||
@ -0,0 +1,60 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** An implementation of CapturerObserver that forwards all calls from Java to the C layer. */
|
||||
class AndroidVideoTrackSourceObserver implements VideoCapturer.CapturerObserver {
|
||||
// Pointer to VideoTrackSourceProxy proxying AndroidVideoTrackSource.
|
||||
private final long nativeSource;
|
||||
|
||||
public AndroidVideoTrackSourceObserver(long nativeSource) {
|
||||
this.nativeSource = nativeSource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCapturerStarted(boolean success) {
|
||||
nativeCapturerStarted(nativeSource, success);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCapturerStopped() {
|
||||
nativeCapturerStopped(nativeSource);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(
|
||||
byte[] data, int width, int height, int rotation, long timeStamp) {
|
||||
nativeOnByteBufferFrameCaptured(
|
||||
nativeSource, data, data.length, width, height, rotation, timeStamp);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
|
||||
float[] transformMatrix, int rotation, long timestamp) {
|
||||
nativeOnTextureFrameCaptured(
|
||||
nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameCaptured(VideoFrame frame) {
|
||||
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(), frame.getBuffer().getHeight(),
|
||||
frame.getRotation(), frame.getTimestampNs(), frame.getBuffer());
|
||||
}
|
||||
|
||||
private native void nativeCapturerStarted(long nativeSource, boolean success);
|
||||
private native void nativeCapturerStopped(long nativeSource);
|
||||
private native void nativeOnByteBufferFrameCaptured(long nativeSource, byte[] data, int length,
|
||||
int width, int height, int rotation, long timeStamp);
|
||||
private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height,
|
||||
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
|
||||
private native void nativeOnFrameCaptured(long nativeSource, int width, int height, int rotation,
|
||||
long timestampNs, VideoFrame.Buffer frame);
|
||||
}
|
||||
38
sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java
Normal file
38
sdk/android/src/java/org/webrtc/BaseBitrateAdjuster.java
Normal file
@ -0,0 +1,38 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
|
||||
class BaseBitrateAdjuster implements BitrateAdjuster {
|
||||
protected int targetBitrateBps = 0;
|
||||
protected int targetFps = 0;
|
||||
|
||||
@Override
|
||||
public void setTargets(int targetBitrateBps, int targetFps) {
|
||||
this.targetBitrateBps = targetBitrateBps;
|
||||
this.targetFps = targetFps;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reportEncodedFrame(int size) {
|
||||
// No op.
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getAdjustedBitrateBps() {
|
||||
return targetBitrateBps;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getAdjustedFramerate() {
|
||||
return targetFps;
|
||||
}
|
||||
}
|
||||
31
sdk/android/src/java/org/webrtc/BitrateAdjuster.java
Normal file
31
sdk/android/src/java/org/webrtc/BitrateAdjuster.java
Normal file
@ -0,0 +1,31 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/** Object that adjusts the bitrate of a hardware codec. */
|
||||
interface BitrateAdjuster {
|
||||
/**
|
||||
* Sets the target bitrate in bits per second and framerate in frames per second.
|
||||
*/
|
||||
void setTargets(int targetBitrateBps, int targetFps);
|
||||
|
||||
/**
|
||||
* Reports that a frame of the given size has been encoded. Returns true if the bitrate should
|
||||
* be adjusted.
|
||||
*/
|
||||
void reportEncodedFrame(int size);
|
||||
|
||||
/** Gets the current bitrate. */
|
||||
int getAdjustedBitrateBps();
|
||||
|
||||
/** Gets the current framerate. */
|
||||
int getAdjustedFramerate();
|
||||
}
|
||||
369
sdk/android/src/java/org/webrtc/Camera1Session.java
Normal file
369
sdk/android/src/java/org/webrtc/Camera1Session.java
Normal file
@ -0,0 +1,369 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.MediaRecorder;
|
||||
import android.os.Handler;
|
||||
import android.os.SystemClock;
|
||||
import android.view.Surface;
|
||||
import android.view.WindowManager;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
class Camera1Session implements CameraSession {
|
||||
private static final String TAG = "Camera1Session";
|
||||
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
|
||||
|
||||
private static final Histogram camera1StartTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera1StopTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
|
||||
"WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final boolean videoFrameEmitTrialEnabled;
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final Events events;
|
||||
private final boolean captureToTexture;
|
||||
private final Context applicationContext;
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
private final int cameraId;
|
||||
private final android.hardware.Camera camera;
|
||||
private final android.hardware.Camera.CameraInfo info;
|
||||
private final CaptureFormat captureFormat;
|
||||
// Used only for stats. Only used on the camera thread.
|
||||
private final long constructionTimeNs; // Construction time of this class.
|
||||
|
||||
private SessionState state;
|
||||
private boolean firstFrameReported = false;
|
||||
|
||||
public static void create(final CreateSessionCallback callback, final Events events,
|
||||
final boolean captureToTexture, final Context applicationContext,
|
||||
final SurfaceTextureHelper surfaceTextureHelper, final MediaRecorder mediaRecorder,
|
||||
final int cameraId, final int width, final int height, final int framerate) {
|
||||
final long constructionTimeNs = System.nanoTime();
|
||||
Logging.d(TAG, "Open camera " + cameraId);
|
||||
events.onCameraOpening();
|
||||
|
||||
final android.hardware.Camera camera;
|
||||
try {
|
||||
camera = android.hardware.Camera.open(cameraId);
|
||||
} catch (RuntimeException e) {
|
||||
callback.onFailure(FailureType.ERROR, e.getMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
|
||||
} catch (IOException e) {
|
||||
camera.release();
|
||||
callback.onFailure(FailureType.ERROR, e.getMessage());
|
||||
return;
|
||||
}
|
||||
|
||||
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
|
||||
android.hardware.Camera.getCameraInfo(cameraId, info);
|
||||
|
||||
final android.hardware.Camera.Parameters parameters = camera.getParameters();
|
||||
final CaptureFormat captureFormat =
|
||||
findClosestCaptureFormat(parameters, width, height, framerate);
|
||||
final Size pictureSize = findClosestPictureSize(parameters, width, height);
|
||||
|
||||
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
|
||||
|
||||
if (!captureToTexture) {
|
||||
final int frameSize = captureFormat.frameSize();
|
||||
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
||||
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
|
||||
camera.addCallbackBuffer(buffer.array());
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate orientation manually and send it as CVO insted.
|
||||
camera.setDisplayOrientation(0 /* degrees */);
|
||||
|
||||
callback.onDone(
|
||||
new Camera1Session(events, captureToTexture, applicationContext, surfaceTextureHelper,
|
||||
mediaRecorder, cameraId, camera, info, captureFormat, constructionTimeNs));
|
||||
}
|
||||
|
||||
private static void updateCameraParameters(android.hardware.Camera camera,
|
||||
android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
|
||||
boolean captureToTexture) {
|
||||
final List<String> focusModes = parameters.getSupportedFocusModes();
|
||||
|
||||
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
|
||||
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
|
||||
parameters.setPictureSize(pictureSize.width, pictureSize.height);
|
||||
if (!captureToTexture) {
|
||||
parameters.setPreviewFormat(captureFormat.imageFormat);
|
||||
}
|
||||
|
||||
if (parameters.isVideoStabilizationSupported()) {
|
||||
parameters.setVideoStabilization(true);
|
||||
}
|
||||
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
|
||||
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
}
|
||||
camera.setParameters(parameters);
|
||||
}
|
||||
|
||||
private static CaptureFormat findClosestCaptureFormat(
|
||||
android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
|
||||
// Find closest supported format for |width| x |height| @ |framerate|.
|
||||
final List<CaptureFormat.FramerateRange> supportedFramerates =
|
||||
Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
|
||||
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
|
||||
|
||||
final CaptureFormat.FramerateRange fpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
|
||||
|
||||
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
|
||||
CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
|
||||
|
||||
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
|
||||
}
|
||||
|
||||
private static Size findClosestPictureSize(
|
||||
android.hardware.Camera.Parameters parameters, int width, int height) {
|
||||
return CameraEnumerationAndroid.getClosestSupportedSize(
|
||||
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
|
||||
}
|
||||
|
||||
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
|
||||
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, int cameraId,
|
||||
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
|
||||
CaptureFormat captureFormat, long constructionTimeNs) {
|
||||
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
|
||||
videoFrameEmitTrialEnabled =
|
||||
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
|
||||
.equals(PeerConnectionFactory.TRIAL_ENABLED);
|
||||
|
||||
this.cameraThreadHandler = new Handler();
|
||||
this.events = events;
|
||||
this.captureToTexture = captureToTexture;
|
||||
this.applicationContext = applicationContext;
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.cameraId = cameraId;
|
||||
this.camera = camera;
|
||||
this.info = info;
|
||||
this.captureFormat = captureFormat;
|
||||
this.constructionTimeNs = constructionTimeNs;
|
||||
|
||||
startCapturing();
|
||||
|
||||
if (mediaRecorder != null) {
|
||||
camera.unlock();
|
||||
mediaRecorder.setCamera(camera);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
|
||||
checkIsOnCameraThread();
|
||||
if (state != SessionState.STOPPED) {
|
||||
final long stopStartTime = System.nanoTime();
|
||||
stopInternal();
|
||||
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
camera1StopTimeMsHistogram.addSample(stopTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
private void startCapturing() {
|
||||
Logging.d(TAG, "Start capturing");
|
||||
checkIsOnCameraThread();
|
||||
|
||||
state = SessionState.RUNNING;
|
||||
|
||||
camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() {
|
||||
@Override
|
||||
public void onError(int error, android.hardware.Camera camera) {
|
||||
String errorMessage;
|
||||
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
|
||||
errorMessage = "Camera server died!";
|
||||
} else {
|
||||
errorMessage = "Camera error: " + error;
|
||||
}
|
||||
Logging.e(TAG, errorMessage);
|
||||
stopInternal();
|
||||
if (error == android.hardware.Camera.CAMERA_ERROR_EVICTED) {
|
||||
events.onCameraDisconnected(Camera1Session.this);
|
||||
} else {
|
||||
events.onCameraError(Camera1Session.this, errorMessage);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (captureToTexture) {
|
||||
listenForTextureFrames();
|
||||
} else {
|
||||
listenForBytebufferFrames();
|
||||
}
|
||||
try {
|
||||
camera.startPreview();
|
||||
} catch (RuntimeException e) {
|
||||
stopInternal();
|
||||
events.onCameraError(this, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private void stopInternal() {
|
||||
Logging.d(TAG, "Stop internal");
|
||||
checkIsOnCameraThread();
|
||||
if (state == SessionState.STOPPED) {
|
||||
Logging.d(TAG, "Camera is already stopped");
|
||||
return;
|
||||
}
|
||||
|
||||
state = SessionState.STOPPED;
|
||||
surfaceTextureHelper.stopListening();
|
||||
// Note: stopPreview or other driver code might deadlock. Deadlock in
|
||||
// android.hardware.Camera._stopPreview(Native Method) has been observed on
|
||||
// Nexus 5 (hammerhead), OS version LMY48I.
|
||||
camera.stopPreview();
|
||||
camera.release();
|
||||
events.onCameraClosed(this);
|
||||
Logging.d(TAG, "Stop done");
|
||||
}
|
||||
|
||||
private void listenForTextureFrames() {
|
||||
surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
|
||||
@Override
|
||||
public void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (state != SessionState.RUNNING) {
|
||||
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!firstFrameReported) {
|
||||
final int startTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
|
||||
camera1StartTimeMsHistogram.addSample(startTimeMs);
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
int rotation = getFrameOrientation();
|
||||
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
|
||||
// Undo the mirror that the OS "helps" us with.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
||||
transformMatrix = RendererCommon.multiplyMatrices(
|
||||
transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||
}
|
||||
if (videoFrameEmitTrialEnabled) {
|
||||
final VideoFrame.Buffer buffer =
|
||||
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
|
||||
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
|
||||
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
|
||||
events.onFrameCaptured(Camera1Session.this, frame);
|
||||
frame.release();
|
||||
} else {
|
||||
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void listenForBytebufferFrames() {
|
||||
camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
|
||||
@Override
|
||||
public void onPreviewFrame(final byte[] data, android.hardware.Camera callbackCamera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (callbackCamera != camera) {
|
||||
Logging.e(TAG, "Callback from a different camera. This should never happen.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (state != SessionState.RUNNING) {
|
||||
Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
|
||||
return;
|
||||
}
|
||||
|
||||
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
|
||||
|
||||
if (!firstFrameReported) {
|
||||
final int startTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
|
||||
camera1StartTimeMsHistogram.addSample(startTimeMs);
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
if (videoFrameEmitTrialEnabled) {
|
||||
VideoFrame.Buffer frameBuffer = new NV21Buffer(data, captureFormat.width,
|
||||
captureFormat.height, () -> cameraThreadHandler.post(() -> {
|
||||
if (state == SessionState.RUNNING) {
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
}));
|
||||
final VideoFrame frame =
|
||||
new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
|
||||
events.onFrameCaptured(Camera1Session.this, frame);
|
||||
frame.release();
|
||||
} else {
|
||||
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
|
||||
captureFormat.height, getFrameOrientation(), captureTimeNs);
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
|
||||
switch (wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
orientation = 90;
|
||||
break;
|
||||
case Surface.ROTATION_180:
|
||||
orientation = 180;
|
||||
break;
|
||||
case Surface.ROTATION_270:
|
||||
orientation = 270;
|
||||
break;
|
||||
case Surface.ROTATION_0:
|
||||
default:
|
||||
orientation = 0;
|
||||
break;
|
||||
}
|
||||
return orientation;
|
||||
}
|
||||
|
||||
private int getFrameOrientation() {
|
||||
int rotation = getDeviceOrientation();
|
||||
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
|
||||
rotation = 360 - rotation;
|
||||
}
|
||||
return (info.orientation + rotation) % 360;
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
}
|
||||
480
sdk/android/src/java/org/webrtc/Camera2Session.java
Normal file
480
sdk/android/src/java/org/webrtc/Camera2Session.java
Normal file
@ -0,0 +1,480 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.graphics.Matrix;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCaptureSession;
|
||||
import android.hardware.camera2.CameraCharacteristics;
|
||||
import android.hardware.camera2.CameraDevice;
|
||||
import android.hardware.camera2.CameraManager;
|
||||
import android.hardware.camera2.CameraMetadata;
|
||||
import android.hardware.camera2.CaptureFailure;
|
||||
import android.hardware.camera2.CaptureRequest;
|
||||
import android.media.MediaRecorder;
|
||||
import android.os.Handler;
|
||||
import android.util.Range;
|
||||
import android.view.Surface;
|
||||
import android.view.WindowManager;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
|
||||
|
||||
@TargetApi(21)
|
||||
class Camera2Session implements CameraSession {
|
||||
private static final String TAG = "Camera2Session";
|
||||
|
||||
private static final Histogram camera2StartTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera2StopTimeMsHistogram =
|
||||
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
|
||||
private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
|
||||
"WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final boolean videoFrameEmitTrialEnabled;
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final CreateSessionCallback callback;
|
||||
private final Events events;
|
||||
private final Context applicationContext;
|
||||
private final CameraManager cameraManager;
|
||||
private final SurfaceTextureHelper surfaceTextureHelper;
|
||||
private final Surface mediaRecorderSurface;
|
||||
private final String cameraId;
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final int framerate;
|
||||
|
||||
// Initialized at start
|
||||
private CameraCharacteristics cameraCharacteristics;
|
||||
private int cameraOrientation;
|
||||
private boolean isCameraFrontFacing;
|
||||
private int fpsUnitFactor;
|
||||
private CaptureFormat captureFormat;
|
||||
|
||||
// Initialized when camera opens
|
||||
private CameraDevice cameraDevice;
|
||||
private Surface surface;
|
||||
|
||||
// Initialized when capture session is created
|
||||
private CameraCaptureSession captureSession;
|
||||
|
||||
// State
|
||||
private SessionState state = SessionState.RUNNING;
|
||||
private boolean firstFrameReported = false;
|
||||
|
||||
// Used only for stats. Only used on the camera thread.
|
||||
private final long constructionTimeNs; // Construction time of this class.
|
||||
|
||||
private class CameraStateCallback extends CameraDevice.StateCallback {
|
||||
private String getErrorDescription(int errorCode) {
|
||||
switch (errorCode) {
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
|
||||
return "Camera device has encountered a fatal error.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
|
||||
return "Camera device could not be opened due to a device policy.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
|
||||
return "Camera device is in use already.";
|
||||
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
|
||||
return "Camera service has encountered a fatal error.";
|
||||
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
|
||||
return "Camera device could not be opened because"
|
||||
+ " there are too many other open camera devices.";
|
||||
default:
|
||||
return "Unknown camera error: " + errorCode;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisconnected(CameraDevice camera) {
|
||||
checkIsOnCameraThread();
|
||||
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
if (startFailure) {
|
||||
callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
|
||||
} else {
|
||||
events.onCameraDisconnected(Camera2Session.this);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(CameraDevice camera, int errorCode) {
|
||||
checkIsOnCameraThread();
|
||||
reportError(getErrorDescription(errorCode));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onOpened(CameraDevice camera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Camera opened.");
|
||||
cameraDevice = camera;
|
||||
|
||||
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
|
||||
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
|
||||
surface = new Surface(surfaceTexture);
|
||||
List<Surface> surfaces = new ArrayList<Surface>();
|
||||
surfaces.add(surface);
|
||||
if (mediaRecorderSurface != null) {
|
||||
Logging.d(TAG, "Add MediaRecorder surface to capture session.");
|
||||
surfaces.add(mediaRecorderSurface);
|
||||
}
|
||||
try {
|
||||
camera.createCaptureSession(surfaces, new CaptureSessionCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to create capture session. " + e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onClosed(CameraDevice camera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Camera device closed.");
|
||||
events.onCameraClosed(Camera2Session.this);
|
||||
}
|
||||
}
|
||||
|
||||
private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
|
||||
@Override
|
||||
public void onConfigureFailed(CameraCaptureSession session) {
|
||||
checkIsOnCameraThread();
|
||||
session.close();
|
||||
reportError("Failed to configure capture session.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConfigured(CameraCaptureSession session) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.d(TAG, "Camera capture session configured.");
|
||||
captureSession = session;
|
||||
try {
|
||||
/*
|
||||
* The viable options for video capture requests are:
|
||||
* TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
|
||||
* post-processing.
|
||||
* TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
|
||||
* quality.
|
||||
*/
|
||||
final CaptureRequest.Builder captureRequestBuilder =
|
||||
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
|
||||
// Set auto exposure fps range.
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
|
||||
new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
|
||||
captureFormat.framerate.max / fpsUnitFactor));
|
||||
captureRequestBuilder.set(
|
||||
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
|
||||
chooseStabilizationMode(captureRequestBuilder);
|
||||
chooseFocusMode(captureRequestBuilder);
|
||||
|
||||
captureRequestBuilder.addTarget(surface);
|
||||
if (mediaRecorderSurface != null) {
|
||||
Logging.d(TAG, "Add MediaRecorder surface to CaptureRequest.Builder");
|
||||
captureRequestBuilder.addTarget(mediaRecorderSurface);
|
||||
}
|
||||
session.setRepeatingRequest(
|
||||
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to start capture request. " + e);
|
||||
return;
|
||||
}
|
||||
|
||||
surfaceTextureHelper.startListening(
|
||||
new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
|
||||
@Override
|
||||
public void onTextureFrameAvailable(
|
||||
int oesTextureId, float[] transformMatrix, long timestampNs) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (state != SessionState.RUNNING) {
|
||||
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!firstFrameReported) {
|
||||
firstFrameReported = true;
|
||||
final int startTimeMs =
|
||||
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
|
||||
camera2StartTimeMsHistogram.addSample(startTimeMs);
|
||||
}
|
||||
|
||||
int rotation = getFrameOrientation();
|
||||
if (isCameraFrontFacing) {
|
||||
// Undo the mirror that the OS "helps" us with.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
|
||||
transformMatrix = RendererCommon.multiplyMatrices(
|
||||
transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||
}
|
||||
|
||||
// Undo camera orientation - we report it as rotation instead.
|
||||
transformMatrix =
|
||||
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
|
||||
|
||||
if (videoFrameEmitTrialEnabled) {
|
||||
VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
|
||||
captureFormat.width, captureFormat.height,
|
||||
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
|
||||
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
|
||||
events.onFrameCaptured(Camera2Session.this, frame);
|
||||
frame.release();
|
||||
} else {
|
||||
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
}
|
||||
});
|
||||
Logging.d(TAG, "Camera device successfully started.");
|
||||
callback.onDone(Camera2Session.this);
|
||||
}
|
||||
|
||||
// Prefers optical stabilization over software stabilization if available. Only enables one of
|
||||
// the stabilization modes at a time because having both enabled can cause strange results.
|
||||
private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
|
||||
final int[] availableOpticalStabilization = cameraCharacteristics.get(
|
||||
CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
|
||||
if (availableOpticalStabilization != null) {
|
||||
for (int mode : availableOpticalStabilization) {
|
||||
if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
|
||||
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
|
||||
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
|
||||
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
|
||||
Logging.d(TAG, "Using optical stabilization.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
// If no optical mode is available, try software.
|
||||
final int[] availableVideoStabilization = cameraCharacteristics.get(
|
||||
CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
|
||||
for (int mode : availableVideoStabilization) {
|
||||
if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
|
||||
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
|
||||
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
|
||||
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
|
||||
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
|
||||
Logging.d(TAG, "Using video stabilization.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "Stabilization not available.");
|
||||
}
|
||||
|
||||
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
|
||||
final int[] availableFocusModes =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
|
||||
for (int mode : availableFocusModes) {
|
||||
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
|
||||
captureRequestBuilder.set(
|
||||
CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
|
||||
Logging.d(TAG, "Using continuous video auto-focus.");
|
||||
return;
|
||||
}
|
||||
}
|
||||
Logging.d(TAG, "Auto-focus is not available.");
|
||||
}
|
||||
}
|
||||
|
||||
private class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
|
||||
@Override
|
||||
public void onCaptureFailed(
|
||||
CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
|
||||
Logging.d(TAG, "Capture failed: " + failure);
|
||||
}
|
||||
}
|
||||
|
||||
public static void create(CreateSessionCallback callback, Events events,
|
||||
Context applicationContext, CameraManager cameraManager,
|
||||
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraId,
|
||||
int width, int height, int framerate) {
|
||||
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
|
||||
mediaRecorder, cameraId, width, height, framerate);
|
||||
}
|
||||
|
||||
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
|
||||
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
|
||||
MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
|
||||
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
|
||||
videoFrameEmitTrialEnabled =
|
||||
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
|
||||
.equals(PeerConnectionFactory.TRIAL_ENABLED);
|
||||
|
||||
constructionTimeNs = System.nanoTime();
|
||||
|
||||
this.cameraThreadHandler = new Handler();
|
||||
this.callback = callback;
|
||||
this.events = events;
|
||||
this.applicationContext = applicationContext;
|
||||
this.cameraManager = cameraManager;
|
||||
this.surfaceTextureHelper = surfaceTextureHelper;
|
||||
this.mediaRecorderSurface = (mediaRecorder != null) ? mediaRecorder.getSurface() : null;
|
||||
this.cameraId = cameraId;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
|
||||
start();
|
||||
}
|
||||
|
||||
private void start() {
|
||||
checkIsOnCameraThread();
|
||||
Logging.d(TAG, "start");
|
||||
|
||||
try {
|
||||
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
|
||||
} catch (final CameraAccessException e) {
|
||||
reportError("getCameraCharacteristics(): " + e.getMessage());
|
||||
return;
|
||||
}
|
||||
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
|
||||
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
|
||||
== CameraMetadata.LENS_FACING_FRONT;
|
||||
|
||||
findCaptureFormat();
|
||||
openCamera();
|
||||
}
|
||||
|
||||
private void findCaptureFormat() {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Range<Integer>[] fpsRanges =
|
||||
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
|
||||
fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
|
||||
List<CaptureFormat.FramerateRange> framerateRanges =
|
||||
Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
|
||||
List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
|
||||
Logging.d(TAG, "Available preview sizes: " + sizes);
|
||||
Logging.d(TAG, "Available fps ranges: " + framerateRanges);
|
||||
|
||||
if (framerateRanges.isEmpty() || sizes.isEmpty()) {
|
||||
reportError("No supported capture formats.");
|
||||
return;
|
||||
}
|
||||
|
||||
final CaptureFormat.FramerateRange bestFpsRange =
|
||||
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
|
||||
|
||||
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
|
||||
CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
|
||||
|
||||
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
|
||||
Logging.d(TAG, "Using capture format: " + captureFormat);
|
||||
}
|
||||
|
||||
private void openCamera() {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
Logging.d(TAG, "Opening camera " + cameraId);
|
||||
events.onCameraOpening();
|
||||
|
||||
try {
|
||||
cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
|
||||
} catch (CameraAccessException e) {
|
||||
reportError("Failed to open camera: " + e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
|
||||
checkIsOnCameraThread();
|
||||
if (state != SessionState.STOPPED) {
|
||||
final long stopStartTime = System.nanoTime();
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
|
||||
camera2StopTimeMsHistogram.addSample(stopTimeMs);
|
||||
}
|
||||
}
|
||||
|
||||
private void stopInternal() {
|
||||
Logging.d(TAG, "Stop internal");
|
||||
checkIsOnCameraThread();
|
||||
|
||||
surfaceTextureHelper.stopListening();
|
||||
|
||||
if (captureSession != null) {
|
||||
captureSession.close();
|
||||
captureSession = null;
|
||||
}
|
||||
if (surface != null) {
|
||||
surface.release();
|
||||
surface = null;
|
||||
}
|
||||
if (cameraDevice != null) {
|
||||
cameraDevice.close();
|
||||
cameraDevice = null;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Stop done");
|
||||
}
|
||||
|
||||
private void reportError(String error) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.e(TAG, "Error: " + error);
|
||||
|
||||
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
|
||||
state = SessionState.STOPPED;
|
||||
stopInternal();
|
||||
if (startFailure) {
|
||||
callback.onFailure(FailureType.ERROR, error);
|
||||
} else {
|
||||
events.onCameraError(this, error);
|
||||
}
|
||||
}
|
||||
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
|
||||
switch (wm.getDefaultDisplay().getRotation()) {
|
||||
case Surface.ROTATION_90:
|
||||
orientation = 90;
|
||||
break;
|
||||
case Surface.ROTATION_180:
|
||||
orientation = 180;
|
||||
break;
|
||||
case Surface.ROTATION_270:
|
||||
orientation = 270;
|
||||
break;
|
||||
case Surface.ROTATION_0:
|
||||
default:
|
||||
orientation = 0;
|
||||
break;
|
||||
}
|
||||
return orientation;
|
||||
}
|
||||
|
||||
private int getFrameOrientation() {
|
||||
int rotation = getDeviceOrientation();
|
||||
if (!isCameraFrontFacing) {
|
||||
rotation = 360 - rotation;
|
||||
}
|
||||
return (cameraOrientation + rotation) % 360;
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
}
|
||||
590
sdk/android/src/java/org/webrtc/CameraCapturer.java
Normal file
590
sdk/android/src/java/org/webrtc/CameraCapturer.java
Normal file
@ -0,0 +1,590 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.media.MediaRecorder;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import java.util.Arrays;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
enum SwitchState {
|
||||
IDLE, // No switch requested.
|
||||
PENDING, // Waiting for previous capture session to open.
|
||||
IN_PROGRESS, // Waiting for new switched capture session to start.
|
||||
}
|
||||
|
||||
enum MediaRecorderState {
|
||||
IDLE, // No media recording update (add or remove) requested.
|
||||
IDLE_TO_ACTIVE, // Waiting for new capture session with added MediaRecorder surface to start.
|
||||
ACTIVE_TO_IDLE, // Waiting for new capture session with removed MediaRecorder surface to start.
|
||||
ACTIVE, // MediaRecorder was successfully added to camera pipeline.
|
||||
}
|
||||
|
||||
private static final String TAG = "CameraCapturer";
|
||||
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
|
||||
private final static int OPEN_CAMERA_DELAY_MS = 500;
|
||||
private final static int OPEN_CAMERA_TIMEOUT = 10000;
|
||||
|
||||
private final CameraEnumerator cameraEnumerator;
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
private final Handler uiThreadHandler;
|
||||
|
||||
private final CameraSession.CreateSessionCallback createSessionCallback =
|
||||
new CameraSession.CreateSessionCallback() {
|
||||
@Override
|
||||
public void onDone(CameraSession session) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.d(TAG,
|
||||
"Create session done. Switch state: " + switchState
|
||||
+ ". MediaRecorder state: " + mediaRecorderState);
|
||||
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
|
||||
synchronized (stateLock) {
|
||||
capturerObserver.onCapturerStarted(true /* success */);
|
||||
sessionOpening = false;
|
||||
currentSession = session;
|
||||
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
|
||||
firstFrameObserved = false;
|
||||
stateLock.notifyAll();
|
||||
|
||||
if (switchState == SwitchState.IN_PROGRESS) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
|
||||
switchEventsHandler = null;
|
||||
}
|
||||
switchState = SwitchState.IDLE;
|
||||
} else if (switchState == SwitchState.PENDING) {
|
||||
switchState = SwitchState.IDLE;
|
||||
switchCameraInternal(switchEventsHandler);
|
||||
}
|
||||
|
||||
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE
|
||||
|| mediaRecorderState == MediaRecorderState.ACTIVE_TO_IDLE) {
|
||||
if (mediaRecorderEventsHandler != null) {
|
||||
mediaRecorderEventsHandler.onMediaRecorderSuccess();
|
||||
mediaRecorderEventsHandler = null;
|
||||
}
|
||||
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE) {
|
||||
mediaRecorderState = MediaRecorderState.ACTIVE;
|
||||
} else {
|
||||
mediaRecorderState = MediaRecorderState.IDLE;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(CameraSession.FailureType failureType, String error) {
|
||||
checkIsOnCameraThread();
|
||||
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
|
||||
synchronized (stateLock) {
|
||||
capturerObserver.onCapturerStarted(false /* success */);
|
||||
openAttemptsRemaining--;
|
||||
|
||||
if (openAttemptsRemaining <= 0) {
|
||||
Logging.w(TAG, "Opening camera failed, passing: " + error);
|
||||
sessionOpening = false;
|
||||
stateLock.notifyAll();
|
||||
|
||||
if (switchState != SwitchState.IDLE) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError(error);
|
||||
switchEventsHandler = null;
|
||||
}
|
||||
switchState = SwitchState.IDLE;
|
||||
}
|
||||
|
||||
if (mediaRecorderState != MediaRecorderState.IDLE) {
|
||||
if (mediaRecorderEventsHandler != null) {
|
||||
mediaRecorderEventsHandler.onMediaRecorderError(error);
|
||||
mediaRecorderEventsHandler = null;
|
||||
}
|
||||
mediaRecorderState = MediaRecorderState.IDLE;
|
||||
}
|
||||
|
||||
if (failureType == CameraSession.FailureType.DISCONNECTED) {
|
||||
eventsHandler.onCameraDisconnected();
|
||||
} else {
|
||||
eventsHandler.onCameraError(error);
|
||||
}
|
||||
} else {
|
||||
Logging.w(TAG, "Opening camera failed, retry: " + error);
|
||||
createSessionInternal(OPEN_CAMERA_DELAY_MS, null /* mediaRecorder */);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
|
||||
@Override
|
||||
public void onCameraOpening() {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (currentSession != null) {
|
||||
Logging.w(TAG, "onCameraOpening while session was open.");
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraOpening(cameraName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraError(CameraSession session, String error) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onCameraError from another session: " + error);
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraError(error);
|
||||
stopCapture();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraDisconnected(CameraSession session) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onCameraDisconnected from another session.");
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraDisconnected();
|
||||
stopCapture();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCameraClosed(CameraSession session) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession && currentSession != null) {
|
||||
Logging.d(TAG, "onCameraClosed from another session.");
|
||||
return;
|
||||
}
|
||||
eventsHandler.onCameraClosed();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameCaptured(CameraSession session, VideoFrame frame) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onTextureFrameCaptured from another session.");
|
||||
return;
|
||||
}
|
||||
if (!firstFrameObserved) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameObserved = true;
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
capturerObserver.onFrameCaptured(frame);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onByteBufferFrameCaptured from another session.");
|
||||
return;
|
||||
}
|
||||
if (!firstFrameObserved) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameObserved = true;
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
capturerObserver.onByteBufferFrameCaptured(data, width, height, rotation, timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameCaptured(CameraSession session, int width, int height,
|
||||
int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onTextureFrameCaptured from another session.");
|
||||
surfaceHelper.returnTextureFrame();
|
||||
return;
|
||||
}
|
||||
if (!firstFrameObserved) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameObserved = true;
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
capturerObserver.onTextureFrameCaptured(
|
||||
width, height, oesTextureId, transformMatrix, rotation, timestamp);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
private final Runnable openCameraTimeoutRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
eventsHandler.onCameraError("Camera failed to start within timeout.");
|
||||
}
|
||||
};
|
||||
|
||||
// Initialized on initialize
|
||||
// -------------------------
|
||||
private Handler cameraThreadHandler;
|
||||
private Context applicationContext;
|
||||
private CapturerObserver capturerObserver;
|
||||
private SurfaceTextureHelper surfaceHelper;
|
||||
|
||||
private final Object stateLock = new Object();
|
||||
private boolean sessionOpening; /* guarded by stateLock */
|
||||
private CameraSession currentSession; /* guarded by stateLock */
|
||||
private String cameraName; /* guarded by stateLock */
|
||||
private int width; /* guarded by stateLock */
|
||||
private int height; /* guarded by stateLock */
|
||||
private int framerate; /* guarded by stateLock */
|
||||
private int openAttemptsRemaining; /* guarded by stateLock */
|
||||
private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
|
||||
private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
|
||||
// Valid from onDone call until stopCapture, otherwise null.
|
||||
private CameraStatistics cameraStatistics; /* guarded by stateLock */
|
||||
private boolean firstFrameObserved; /* guarded by stateLock */
|
||||
|
||||
// Variables used on camera thread - do not require stateLock synchronization.
|
||||
private MediaRecorderState mediaRecorderState = MediaRecorderState.IDLE;
|
||||
private MediaRecorderHandler mediaRecorderEventsHandler;
|
||||
|
||||
public CameraCapturer(
|
||||
String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
|
||||
if (eventsHandler == null) {
|
||||
eventsHandler = new CameraEventsHandler() {
|
||||
@Override
|
||||
public void onCameraError(String errorDescription) {}
|
||||
@Override
|
||||
public void onCameraDisconnected() {}
|
||||
@Override
|
||||
public void onCameraFreezed(String errorDescription) {}
|
||||
@Override
|
||||
public void onCameraOpening(String cameraName) {}
|
||||
@Override
|
||||
public void onFirstFrameAvailable() {}
|
||||
@Override
|
||||
public void onCameraClosed() {}
|
||||
};
|
||||
}
|
||||
|
||||
this.eventsHandler = eventsHandler;
|
||||
this.cameraEnumerator = cameraEnumerator;
|
||||
this.cameraName = cameraName;
|
||||
uiThreadHandler = new Handler(Looper.getMainLooper());
|
||||
|
||||
final String[] deviceNames = cameraEnumerator.getDeviceNames();
|
||||
|
||||
if (deviceNames.length == 0) {
|
||||
throw new RuntimeException("No cameras attached.");
|
||||
}
|
||||
if (!Arrays.asList(deviceNames).contains(this.cameraName)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Camera name " + this.cameraName + " does not match any known camera device.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
|
||||
CapturerObserver capturerObserver) {
|
||||
this.applicationContext = applicationContext;
|
||||
this.capturerObserver = capturerObserver;
|
||||
this.surfaceHelper = surfaceTextureHelper;
|
||||
this.cameraThreadHandler =
|
||||
surfaceTextureHelper == null ? null : surfaceTextureHelper.getHandler();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startCapture(int width, int height, int framerate) {
|
||||
Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
|
||||
if (applicationContext == null) {
|
||||
throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
|
||||
}
|
||||
|
||||
synchronized (stateLock) {
|
||||
if (sessionOpening || currentSession != null) {
|
||||
Logging.w(TAG, "Session already open");
|
||||
return;
|
||||
}
|
||||
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.framerate = framerate;
|
||||
|
||||
sessionOpening = true;
|
||||
openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
|
||||
createSessionInternal(0, null /* mediaRecorder */);
|
||||
}
|
||||
}
|
||||
|
||||
private void createSessionInternal(int delayMs, final MediaRecorder mediaRecorder) {
|
||||
uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
|
||||
cameraThreadHandler.postDelayed(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
|
||||
surfaceHelper, mediaRecorder, cameraName, width, height, framerate);
|
||||
}
|
||||
}, delayMs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stopCapture() {
|
||||
Logging.d(TAG, "Stop capture");
|
||||
|
||||
synchronized (stateLock) {
|
||||
while (sessionOpening) {
|
||||
Logging.d(TAG, "Stop capture: Waiting for session to open");
|
||||
ThreadUtils.waitUninterruptibly(stateLock);
|
||||
}
|
||||
|
||||
if (currentSession != null) {
|
||||
Logging.d(TAG, "Stop capture: Nulling session");
|
||||
cameraStatistics.release();
|
||||
cameraStatistics = null;
|
||||
final CameraSession oldSession = currentSession;
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
oldSession.stop();
|
||||
}
|
||||
});
|
||||
currentSession = null;
|
||||
capturerObserver.onCapturerStopped();
|
||||
} else {
|
||||
Logging.d(TAG, "Stop capture: No session open");
|
||||
}
|
||||
}
|
||||
|
||||
Logging.d(TAG, "Stop capture done");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void changeCaptureFormat(int width, int height, int framerate) {
|
||||
Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
|
||||
synchronized (stateLock) {
|
||||
stopCapture();
|
||||
startCapture(width, height, framerate);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dispose() {
|
||||
Logging.d(TAG, "dispose");
|
||||
stopCapture();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
|
||||
Logging.d(TAG, "switchCamera");
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
switchCameraInternal(switchEventsHandler);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addMediaRecorderToCamera(
|
||||
final MediaRecorder mediaRecorder, final MediaRecorderHandler mediaRecoderEventsHandler) {
|
||||
Logging.d(TAG, "addMediaRecorderToCamera");
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
updateMediaRecorderInternal(mediaRecorder, mediaRecoderEventsHandler);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeMediaRecorderFromCamera(final MediaRecorderHandler mediaRecoderEventsHandler) {
|
||||
Logging.d(TAG, "removeMediaRecorderFromCamera");
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
updateMediaRecorderInternal(null /* mediaRecorder */, mediaRecoderEventsHandler);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isScreencast() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void printStackTrace() {
|
||||
Thread cameraThread = null;
|
||||
if (cameraThreadHandler != null) {
|
||||
cameraThread = cameraThreadHandler.getLooper().getThread();
|
||||
}
|
||||
if (cameraThread != null) {
|
||||
StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
|
||||
if (cameraStackTrace.length > 0) {
|
||||
Logging.d(TAG, "CameraCapturer stack trace:");
|
||||
for (StackTraceElement traceElem : cameraStackTrace) {
|
||||
Logging.d(TAG, traceElem.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void reportCameraSwitchError(String error, CameraSwitchHandler switchEventsHandler) {
|
||||
Logging.e(TAG, error);
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError(error);
|
||||
}
|
||||
}
|
||||
|
||||
private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) {
|
||||
Logging.d(TAG, "switchCamera internal");
|
||||
|
||||
final String[] deviceNames = cameraEnumerator.getDeviceNames();
|
||||
|
||||
if (deviceNames.length < 2) {
|
||||
if (switchEventsHandler != null) {
|
||||
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
synchronized (stateLock) {
|
||||
if (switchState != SwitchState.IDLE) {
|
||||
reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
|
||||
return;
|
||||
}
|
||||
if (mediaRecorderState != MediaRecorderState.IDLE) {
|
||||
reportCameraSwitchError("switchCamera: media recording is active", switchEventsHandler);
|
||||
return;
|
||||
}
|
||||
if (!sessionOpening && currentSession == null) {
|
||||
reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
|
||||
return;
|
||||
}
|
||||
|
||||
this.switchEventsHandler = switchEventsHandler;
|
||||
if (sessionOpening) {
|
||||
switchState = SwitchState.PENDING;
|
||||
return;
|
||||
} else {
|
||||
switchState = SwitchState.IN_PROGRESS;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "switchCamera: Stopping session");
|
||||
cameraStatistics.release();
|
||||
cameraStatistics = null;
|
||||
final CameraSession oldSession = currentSession;
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
oldSession.stop();
|
||||
}
|
||||
});
|
||||
currentSession = null;
|
||||
|
||||
int cameraNameIndex = Arrays.asList(deviceNames).indexOf(cameraName);
|
||||
cameraName = deviceNames[(cameraNameIndex + 1) % deviceNames.length];
|
||||
|
||||
sessionOpening = true;
|
||||
openAttemptsRemaining = 1;
|
||||
createSessionInternal(0, null /* mediaRecorder */);
|
||||
}
|
||||
Logging.d(TAG, "switchCamera done");
|
||||
}
|
||||
|
||||
private void reportUpdateMediaRecorderError(
|
||||
String error, MediaRecorderHandler mediaRecoderEventsHandler) {
|
||||
checkIsOnCameraThread();
|
||||
Logging.e(TAG, error);
|
||||
if (mediaRecoderEventsHandler != null) {
|
||||
mediaRecoderEventsHandler.onMediaRecorderError(error);
|
||||
}
|
||||
}
|
||||
|
||||
private void updateMediaRecorderInternal(
|
||||
MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
|
||||
checkIsOnCameraThread();
|
||||
boolean addMediaRecorder = (mediaRecorder != null);
|
||||
Logging.d(TAG,
|
||||
"updateMediaRecoderInternal internal. State: " + mediaRecorderState
|
||||
+ ". Switch state: " + switchState + ". Add MediaRecorder: " + addMediaRecorder);
|
||||
|
||||
synchronized (stateLock) {
|
||||
if ((addMediaRecorder && mediaRecorderState != MediaRecorderState.IDLE)
|
||||
|| (!addMediaRecorder && mediaRecorderState != MediaRecorderState.ACTIVE)) {
|
||||
reportUpdateMediaRecorderError(
|
||||
"Incorrect state for MediaRecorder update.", mediaRecoderEventsHandler);
|
||||
return;
|
||||
}
|
||||
if (switchState != SwitchState.IDLE) {
|
||||
reportUpdateMediaRecorderError(
|
||||
"MediaRecorder update while camera is switching.", mediaRecoderEventsHandler);
|
||||
return;
|
||||
}
|
||||
if (currentSession == null) {
|
||||
reportUpdateMediaRecorderError(
|
||||
"MediaRecorder update while camera is closed.", mediaRecoderEventsHandler);
|
||||
return;
|
||||
}
|
||||
if (sessionOpening) {
|
||||
reportUpdateMediaRecorderError(
|
||||
"MediaRecorder update while camera is still opening.", mediaRecoderEventsHandler);
|
||||
return;
|
||||
}
|
||||
|
||||
this.mediaRecorderEventsHandler = mediaRecoderEventsHandler;
|
||||
mediaRecorderState =
|
||||
addMediaRecorder ? MediaRecorderState.IDLE_TO_ACTIVE : MediaRecorderState.ACTIVE_TO_IDLE;
|
||||
|
||||
Logging.d(TAG, "updateMediaRecoder: Stopping session");
|
||||
cameraStatistics.release();
|
||||
cameraStatistics = null;
|
||||
final CameraSession oldSession = currentSession;
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
oldSession.stop();
|
||||
}
|
||||
});
|
||||
currentSession = null;
|
||||
|
||||
sessionOpening = true;
|
||||
openAttemptsRemaining = 1;
|
||||
createSessionInternal(0, mediaRecorder);
|
||||
}
|
||||
Logging.d(TAG, "updateMediaRecoderInternal done");
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
|
||||
Logging.e(TAG, "Check is on camera thread failed.");
|
||||
throw new RuntimeException("Not on camera thread.");
|
||||
}
|
||||
}
|
||||
|
||||
protected String getCameraName() {
|
||||
synchronized (stateLock) {
|
||||
return cameraName;
|
||||
}
|
||||
}
|
||||
|
||||
abstract protected void createCameraSession(
|
||||
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
|
||||
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
|
||||
MediaRecorder mediaRecoder, String cameraName, int width, int height, int framerate);
|
||||
}
|
||||
42
sdk/android/src/java/org/webrtc/CameraSession.java
Normal file
42
sdk/android/src/java/org/webrtc/CameraSession.java
Normal file
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
interface CameraSession {
|
||||
enum FailureType { ERROR, DISCONNECTED }
|
||||
|
||||
// Callbacks are fired on the camera thread.
|
||||
public interface CreateSessionCallback {
|
||||
void onDone(CameraSession session);
|
||||
void onFailure(FailureType failureType, String error);
|
||||
}
|
||||
|
||||
// Events are fired on the camera thread.
|
||||
public interface Events {
|
||||
void onCameraOpening();
|
||||
void onCameraError(CameraSession session, String error);
|
||||
void onCameraDisconnected(CameraSession session);
|
||||
void onCameraClosed(CameraSession session);
|
||||
void onFrameCaptured(CameraSession session, VideoFrame frame);
|
||||
|
||||
// The old way of passing frames. Will be removed eventually.
|
||||
void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
|
||||
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
|
||||
float[] transformMatrix, int rotation, long timestamp);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stops the capture. Waits until no more calls to capture observer will be made.
|
||||
* If waitCameraStop is true, also waits for the camera to stop.
|
||||
*/
|
||||
void stop();
|
||||
}
|
||||
95
sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java
Normal file
95
sdk/android/src/java/org/webrtc/DynamicBitrateAdjuster.java
Normal file
@ -0,0 +1,95 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
/**
|
||||
* BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the
|
||||
* bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the
|
||||
* target bitrate by unacceptable margins.
|
||||
*/
|
||||
class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
|
||||
// Change the bitrate at most once every three seconds.
|
||||
private static final double BITRATE_ADJUSTMENT_SEC = 3.0;
|
||||
// Maximum bitrate adjustment scale - no more than 4 times.
|
||||
private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4;
|
||||
// Amount of adjustment steps to reach maximum scale.
|
||||
private static final int BITRATE_ADJUSTMENT_STEPS = 20;
|
||||
|
||||
private static final double BITS_PER_BYTE = 8.0;
|
||||
|
||||
// How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
|
||||
private double deviationBytes = 0;
|
||||
private double timeSinceLastAdjustmentMs = 0;
|
||||
private int bitrateAdjustmentScaleExp = 0;
|
||||
|
||||
@Override
|
||||
public void setTargets(int targetBitrateBps, int targetFps) {
|
||||
if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
|
||||
// Rescale the accumulator level if the accumulator max decreases
|
||||
deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
|
||||
}
|
||||
super.setTargets(targetBitrateBps, targetFps);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reportEncodedFrame(int size) {
|
||||
if (targetFps == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Accumulate the difference between actual and expected frame sizes.
|
||||
double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFps;
|
||||
deviationBytes += (size - expectedBytesPerFrame);
|
||||
timeSinceLastAdjustmentMs += 1000.0 / targetFps;
|
||||
|
||||
// Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
|
||||
// shortfall of the target.
|
||||
double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE;
|
||||
|
||||
// Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for
|
||||
// bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle.
|
||||
double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes;
|
||||
deviationBytes = Math.min(deviationBytes, deviationCap);
|
||||
deviationBytes = Math.max(deviationBytes, -deviationCap);
|
||||
|
||||
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
|
||||
// from the target value.
|
||||
if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (deviationBytes > deviationThresholdBytes) {
|
||||
// Encoder generates too high bitrate - need to reduce the scale.
|
||||
int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5);
|
||||
bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
|
||||
// Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS.
|
||||
// This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
|
||||
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS);
|
||||
deviationBytes = deviationThresholdBytes;
|
||||
} else if (deviationBytes < -deviationThresholdBytes) {
|
||||
// Encoder generates too low bitrate - need to increase the scale.
|
||||
int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5);
|
||||
bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
|
||||
// Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS.
|
||||
// This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
|
||||
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS);
|
||||
deviationBytes = -deviationThresholdBytes;
|
||||
}
|
||||
timeSinceLastAdjustmentMs = 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getAdjustedBitrateBps() {
|
||||
return (int) (targetBitrateBps
|
||||
* Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE,
|
||||
(double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS));
|
||||
}
|
||||
}
|
||||
313
sdk/android/src/java/org/webrtc/EglBase10.java
Normal file
313
sdk/android/src/java/org/webrtc/EglBase10.java
Normal file
@ -0,0 +1,313 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.Rect;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.view.Surface;
|
||||
import android.view.SurfaceHolder;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
import javax.microedition.khronos.egl.EGLSurface;
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
class EglBase10 extends EglBase {
|
||||
// This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
|
||||
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
|
||||
|
||||
private final EGL10 egl;
|
||||
private EGLContext eglContext;
|
||||
private EGLConfig eglConfig;
|
||||
private EGLDisplay eglDisplay;
|
||||
private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
|
||||
|
||||
// EGL wrapper for an actual EGLContext.
|
||||
public static class Context extends EglBase.Context {
|
||||
private final EGLContext eglContext;
|
||||
|
||||
public Context(EGLContext eglContext) {
|
||||
this.eglContext = eglContext;
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new context with the specified config type, sharing data with sharedContext.
|
||||
public EglBase10(Context sharedContext, int[] configAttributes) {
|
||||
this.egl = (EGL10) EGLContext.getEGL();
|
||||
eglDisplay = getEglDisplay();
|
||||
eglConfig = getEglConfig(eglDisplay, configAttributes);
|
||||
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createSurface(Surface surface) {
|
||||
/**
|
||||
* We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
|
||||
* couldn't actually take a Surface object until API 17. Older versions fortunately just call
|
||||
* SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
|
||||
*/
|
||||
class FakeSurfaceHolder implements SurfaceHolder {
|
||||
private final Surface surface;
|
||||
|
||||
FakeSurfaceHolder(Surface surface) {
|
||||
this.surface = surface;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addCallback(Callback callback) {}
|
||||
|
||||
@Override
|
||||
public void removeCallback(Callback callback) {}
|
||||
|
||||
@Override
|
||||
public boolean isCreating() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
@Override
|
||||
public void setType(int i) {}
|
||||
|
||||
@Override
|
||||
public void setFixedSize(int i, int i2) {}
|
||||
|
||||
@Override
|
||||
public void setSizeFromLayout() {}
|
||||
|
||||
@Override
|
||||
public void setFormat(int i) {}
|
||||
|
||||
@Override
|
||||
public void setKeepScreenOn(boolean b) {}
|
||||
|
||||
@Override
|
||||
public Canvas lockCanvas() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Canvas lockCanvas(Rect rect) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void unlockCanvasAndPost(Canvas canvas) {}
|
||||
|
||||
@Override
|
||||
public Rect getSurfaceFrame() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Surface getSurface() {
|
||||
return surface;
|
||||
}
|
||||
}
|
||||
|
||||
createSurfaceInternal(new FakeSurfaceHolder(surface));
|
||||
}
|
||||
|
||||
// Create EGLSurface from the Android SurfaceTexture.
|
||||
@Override
|
||||
public void createSurface(SurfaceTexture surfaceTexture) {
|
||||
createSurfaceInternal(surfaceTexture);
|
||||
}
|
||||
|
||||
// Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
|
||||
private void createSurfaceInternal(Object nativeWindow) {
|
||||
if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
|
||||
throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
|
||||
}
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL10.EGL_NONE};
|
||||
eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
// Create dummy 1x1 pixel buffer surface so the context can be made current.
|
||||
@Override
|
||||
public void createDummyPbufferSurface() {
|
||||
createPbufferSurface(1, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createPbufferSurface(int width, int height) {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
|
||||
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
|
||||
+ height + ": 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.webrtc.EglBase.Context getEglBaseContext() {
|
||||
return new EglBase10.Context(eglContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSurface() {
|
||||
return eglSurface != EGL10.EGL_NO_SURFACE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceWidth() {
|
||||
final int widthArray[] = new int[1];
|
||||
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
|
||||
return widthArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceHeight() {
|
||||
final int heightArray[] = new int[1];
|
||||
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
|
||||
return heightArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void releaseSurface() {
|
||||
if (eglSurface != EGL10.EGL_NO_SURFACE) {
|
||||
egl.eglDestroySurface(eglDisplay, eglSurface);
|
||||
eglSurface = EGL10.EGL_NO_SURFACE;
|
||||
}
|
||||
}
|
||||
|
||||
private void checkIsNotReleased() {
|
||||
if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
|
||||
|| eglConfig == null) {
|
||||
throw new RuntimeException("This object has been released");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
checkIsNotReleased();
|
||||
releaseSurface();
|
||||
detachCurrent();
|
||||
egl.eglDestroyContext(eglDisplay, eglContext);
|
||||
egl.eglTerminate(eglDisplay);
|
||||
eglContext = EGL10.EGL_NO_CONTEXT;
|
||||
eglDisplay = EGL10.EGL_NO_DISPLAY;
|
||||
eglConfig = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void makeCurrent() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't make current");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
throw new RuntimeException(
|
||||
"eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
@Override
|
||||
public void detachCurrent() {
|
||||
synchronized (EglBase.lock) {
|
||||
if (!egl.eglMakeCurrent(
|
||||
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
|
||||
throw new RuntimeException(
|
||||
"eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL10.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
egl.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers(long timeStampNs) {
|
||||
// Setting presentation time is not supported for EGL 1.0.
|
||||
swapBuffers();
|
||||
}
|
||||
|
||||
// Return an EGLDisplay, or die trying.
|
||||
private EGLDisplay getEglDisplay() {
|
||||
EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException(
|
||||
"Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!egl.eglInitialize(eglDisplay, version)) {
|
||||
throw new RuntimeException(
|
||||
"Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
return eglDisplay;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
|
||||
throw new RuntimeException(
|
||||
"eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
if (numConfigs[0] <= 0) {
|
||||
throw new RuntimeException("Unable to find any matching EGL config");
|
||||
}
|
||||
final EGLConfig eglConfig = configs[0];
|
||||
if (eglConfig == null) {
|
||||
throw new RuntimeException("eglChooseConfig returned null");
|
||||
}
|
||||
return eglConfig;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private EGLContext createEglContext(
|
||||
Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
|
||||
if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException("Invalid sharedContext");
|
||||
}
|
||||
int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
|
||||
EGLContext rootContext =
|
||||
sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
|
||||
final EGLContext eglContext;
|
||||
synchronized (EglBase.lock) {
|
||||
eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
|
||||
}
|
||||
if (eglContext == EGL10.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError()));
|
||||
}
|
||||
return eglContext;
|
||||
}
|
||||
}
|
||||
266
sdk/android/src/java/org/webrtc/EglBase14.java
Normal file
266
sdk/android/src/java/org/webrtc/EglBase14.java
Normal file
@ -0,0 +1,266 @@
|
||||
/*
|
||||
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.opengl.EGL14;
|
||||
import android.opengl.EGLConfig;
|
||||
import android.opengl.EGLContext;
|
||||
import android.opengl.EGLDisplay;
|
||||
import android.opengl.EGLExt;
|
||||
import android.opengl.EGLSurface;
|
||||
import android.view.Surface;
|
||||
|
||||
/**
|
||||
* Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
|
||||
* and an EGLSurface.
|
||||
*/
|
||||
@TargetApi(18)
|
||||
class EglBase14 extends EglBase {
|
||||
private static final String TAG = "EglBase14";
|
||||
private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
|
||||
private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
|
||||
private EGLContext eglContext;
|
||||
private EGLConfig eglConfig;
|
||||
private EGLDisplay eglDisplay;
|
||||
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
|
||||
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
|
||||
// time stamp on a surface is supported from 18 so we require 18.
|
||||
public static boolean isEGL14Supported() {
|
||||
Logging.d(TAG,
|
||||
"SDK version: " + CURRENT_SDK_VERSION
|
||||
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
|
||||
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
|
||||
}
|
||||
|
||||
public static class Context extends EglBase.Context {
|
||||
private final android.opengl.EGLContext egl14Context;
|
||||
|
||||
public Context(android.opengl.EGLContext eglContext) {
|
||||
this.egl14Context = eglContext;
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new context with the specified config type, sharing data with sharedContext.
|
||||
// |sharedContext| may be null.
|
||||
public EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
|
||||
eglDisplay = getEglDisplay();
|
||||
eglConfig = getEglConfig(eglDisplay, configAttributes);
|
||||
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
|
||||
}
|
||||
|
||||
// Create EGLSurface from the Android Surface.
|
||||
@Override
|
||||
public void createSurface(Surface surface) {
|
||||
createSurfaceInternal(surface);
|
||||
}
|
||||
|
||||
// Create EGLSurface from the Android SurfaceTexture.
|
||||
@Override
|
||||
public void createSurface(SurfaceTexture surfaceTexture) {
|
||||
createSurfaceInternal(surfaceTexture);
|
||||
}
|
||||
|
||||
// Create EGLSurface from either Surface or SurfaceTexture.
|
||||
private void createSurfaceInternal(Object surface) {
|
||||
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
|
||||
throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
|
||||
}
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL14.EGL_NONE};
|
||||
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createDummyPbufferSurface() {
|
||||
createPbufferSurface(1, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createPbufferSurface(int width, int height) {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Already has an EGLSurface");
|
||||
}
|
||||
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
|
||||
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
|
||||
+ height + ": 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Context getEglBaseContext() {
|
||||
return new EglBase14.Context(eglContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasSurface() {
|
||||
return eglSurface != EGL14.EGL_NO_SURFACE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceWidth() {
|
||||
final int widthArray[] = new int[1];
|
||||
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
|
||||
return widthArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public int surfaceHeight() {
|
||||
final int heightArray[] = new int[1];
|
||||
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
|
||||
return heightArray[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void releaseSurface() {
|
||||
if (eglSurface != EGL14.EGL_NO_SURFACE) {
|
||||
EGL14.eglDestroySurface(eglDisplay, eglSurface);
|
||||
eglSurface = EGL14.EGL_NO_SURFACE;
|
||||
}
|
||||
}
|
||||
|
||||
private void checkIsNotReleased() {
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
|
||||
|| eglConfig == null) {
|
||||
throw new RuntimeException("This object has been released");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
checkIsNotReleased();
|
||||
releaseSurface();
|
||||
detachCurrent();
|
||||
EGL14.eglDestroyContext(eglDisplay, eglContext);
|
||||
EGL14.eglReleaseThread();
|
||||
EGL14.eglTerminate(eglDisplay);
|
||||
eglContext = EGL14.EGL_NO_CONTEXT;
|
||||
eglDisplay = EGL14.EGL_NO_DISPLAY;
|
||||
eglConfig = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void makeCurrent() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't make current");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
|
||||
throw new RuntimeException(
|
||||
"eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Detach the current EGL context, so that it can be made current on another thread.
|
||||
@Override
|
||||
public void detachCurrent() {
|
||||
synchronized (EglBase.lock) {
|
||||
if (!EGL14.eglMakeCurrent(
|
||||
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
|
||||
throw new RuntimeException(
|
||||
"eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers() {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void swapBuffers(long timeStampNs) {
|
||||
checkIsNotReleased();
|
||||
if (eglSurface == EGL14.EGL_NO_SURFACE) {
|
||||
throw new RuntimeException("No EGLSurface - can't swap buffers");
|
||||
}
|
||||
synchronized (EglBase.lock) {
|
||||
// See
|
||||
// https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
|
||||
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
|
||||
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
|
||||
}
|
||||
}
|
||||
|
||||
// Return an EGLDisplay, or die trying.
|
||||
private static EGLDisplay getEglDisplay() {
|
||||
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
|
||||
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
|
||||
throw new RuntimeException(
|
||||
"Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
int[] version = new int[2];
|
||||
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
|
||||
throw new RuntimeException(
|
||||
"Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
return eglDisplay;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
|
||||
EGLConfig[] configs = new EGLConfig[1];
|
||||
int[] numConfigs = new int[1];
|
||||
if (!EGL14.eglChooseConfig(
|
||||
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
|
||||
throw new RuntimeException(
|
||||
"eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
if (numConfigs[0] <= 0) {
|
||||
throw new RuntimeException("Unable to find any matching EGL config");
|
||||
}
|
||||
final EGLConfig eglConfig = configs[0];
|
||||
if (eglConfig == null) {
|
||||
throw new RuntimeException("eglChooseConfig returned null");
|
||||
}
|
||||
return eglConfig;
|
||||
}
|
||||
|
||||
// Return an EGLConfig, or die trying.
|
||||
private static EGLContext createEglContext(
|
||||
EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
|
||||
if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException("Invalid sharedContext");
|
||||
}
|
||||
int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
|
||||
EGLContext rootContext =
|
||||
sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
|
||||
final EGLContext eglContext;
|
||||
synchronized (EglBase.lock) {
|
||||
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
|
||||
}
|
||||
if (eglContext == EGL14.EGL_NO_CONTEXT) {
|
||||
throw new RuntimeException(
|
||||
"Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError()));
|
||||
}
|
||||
return eglContext;
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user