This implementation greatly simplifies Android video capturing stack. The old

stack will be removed soon in a separate CL. Constraints will not be supported
in the new implementation. Apps can request a format directly and the closest
supported format will be selected.

Changes needed from the apps:
1. Use the new createVideoSource without constraints.
2. Call startCapture manually.
3. Don't call videoSource.stop/restart, use startCapture/stopCapture instead.

R=magjed@webrtc.org
TBR=kjellander@webrtc.org

Review URL: https://codereview.webrtc.org/2127893002 .

Cr-Commit-Position: refs/heads/master@{#13504}
This commit is contained in:
Sami Kalliomaki
2016-07-20 16:13:08 +02:00
parent 70ffead256
commit 16032126ed
15 changed files with 664 additions and 73 deletions

View File

@ -156,6 +156,7 @@ if (is_android && !build_with_chromium) {
"android/jni/androidnetworkmonitor_jni.h",
"android/jni/androidvideocapturer_jni.cc",
"android/jni/androidvideocapturer_jni.h",
"android/jni/androidvideotracksource_jni.cc",
"android/jni/classreferenceholder.cc",
"android/jni/classreferenceholder.h",
"android/jni/jni_helpers.cc",
@ -167,6 +168,8 @@ if (is_android && !build_with_chromium) {
"android/jni/surfacetexturehelper_jni.h",
"androidvideocapturer.cc",
"androidvideocapturer.h",
"androidvideotracksource.cc",
"androidvideotracksource.h",
]
configs += [

View File

@ -6,6 +6,9 @@ tkchin@webrtc.org
tommi@webrtc.org
deadbeef@webrtc.org
per-file androidvideotracksource.*=sakal@webrtc.org
per-file androidvideotracksource.*=magjed@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
per-file *.gyp=*

View File

@ -575,6 +575,7 @@ public class Camera2Capturer implements
if (eventsHandler != null) {
eventsHandler.onCameraClosed();
}
capturerObserver.onCapturerStopped();
}
}

View File

@ -111,8 +111,9 @@ public class PeerConnectionFactory {
nativeCreateLocalMediaStream(nativeFactory, label));
}
// The VideoSource takes ownership of |capturer|, so capturer.release() should not be called
// manually after this.
// The VideoSource takes ownership of |capturer|, so capturer.dispose() should not be called
// manually after this. Video capturer is automatically started so there is no need to call
// startCapture after this method.
public VideoSource createVideoSource(
VideoCapturer capturer, MediaConstraints constraints) {
final EglBase.Context eglContext =
@ -121,6 +122,17 @@ public class PeerConnectionFactory {
eglContext, capturer, constraints));
}
public VideoSource createVideoSource(VideoCapturer capturer) {
final EglBase.Context eglContext =
localEglbase == null ? null : localEglbase.getEglBaseContext();
long nativeAndroidVideoTrackSource = nativeCreateVideoSource2(nativeFactory, eglContext);
VideoCapturer.CapturerObserver capturerObserver
= new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
nativeInitializeVideoCapturer(nativeFactory, capturer, nativeAndroidVideoTrackSource,
capturerObserver);
return new VideoSource(nativeAndroidVideoTrackSource);
}
public VideoTrack createVideoTrack(String id, VideoSource source) {
return new VideoTrack(nativeCreateVideoTrack(
nativeFactory, id, source.nativeSource));
@ -239,6 +251,13 @@ public class PeerConnectionFactory {
long nativeFactory, EglBase.Context eglContext, VideoCapturer videoCapturer,
MediaConstraints constraints);
private static native long nativeCreateVideoSource2(
long nativeFactory, EglBase.Context eglContext);
private static native void nativeInitializeVideoCapturer(
long native_factory, VideoCapturer j_video_capturer, long native_source,
VideoCapturer.CapturerObserver j_frame_observer);
private static native long nativeCreateVideoTrack(
long nativeFactory, String id, long nativeVideoSource);

View File

@ -21,6 +21,7 @@ public interface VideoCapturer {
// Notify if the camera have been started successfully or not.
// Called on a Java thread owned by VideoCapturer.
void onCapturerStarted(boolean success);
void onCapturerStopped();
// Delivers a captured frame. Called on a Java thread owned by VideoCapturer.
void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
@ -52,6 +53,9 @@ public interface VideoCapturer {
nativeCapturerStarted(nativeCapturer, success);
}
@Override
public void onCapturerStopped() {}
@Override
public void onByteBufferFrameCaptured(byte[] data, int width, int height,
int rotation, long timeStamp) {
@ -82,6 +86,57 @@ public interface VideoCapturer {
int width, int height, int framerate);
}
// An implementation of CapturerObserver that forwards all calls from
// Java to the C layer.
static class AndroidVideoTrackSourceObserver implements CapturerObserver {
// Pointer to VideoTrackSourceProxy proxying AndroidVideoTrackSource.
private final long nativeSource;
public AndroidVideoTrackSourceObserver(long nativeSource) {
this.nativeSource = nativeSource;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeSource, success);
}
@Override
public void onCapturerStopped() {
nativeCapturerStopped(nativeSource);
}
@Override
public void onByteBufferFrameCaptured(byte[] data, int width, int height,
int rotation, long timeStamp) {
nativeOnByteBufferFrameCaptured(nativeSource, data, data.length, width, height, rotation,
timeStamp);
}
@Override
public void onTextureFrameCaptured(
int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
long timestamp) {
nativeOnTextureFrameCaptured(nativeSource, width, height, oesTextureId, transformMatrix,
rotation, timestamp);
}
@Override
public void onOutputFormatRequest(int width, int height, int framerate) {
nativeOnOutputFormatRequest(nativeSource, width, height, framerate);
}
private native void nativeCapturerStarted(long nativeSource,
boolean success);
private native void nativeCapturerStopped(long nativeSource);
private native void nativeOnByteBufferFrameCaptured(long nativeSource,
byte[] data, int length, int width, int height, int rotation, long timeStamp);
private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
private native void nativeOnOutputFormatRequest(long nativeSource,
int width, int height, int framerate);
}
/**
* Returns a list with all the formats this VideoCapturer supports.
*/

View File

@ -23,9 +23,9 @@ import java.nio.ByteBuffer;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
// Android specific implementation of VideoCapturer.
// An instance of this class can be created by an application using
@ -484,6 +484,7 @@ public class VideoCapturerAndroid implements
eventsHandler.onCameraError("Camera stop timeout");
}
}
frameObserver.onCapturerStopped();
Logging.d(TAG, "stopCapture done");
}

View File

@ -36,11 +36,6 @@ public class VideoSource extends MediaSource {
restart(nativeSource);
}
@Override
public void dispose() {
super.dispose();
}
private static native void stop(long nativeSource);
private static native void restart(long nativeSource);
}

View File

@ -2,6 +2,8 @@ per-file androidvideocapturer*=magjed@webrtc.org
per-file androidmediaencoder*=magjed@webrtc.org
per-file androidmediadecoder*=magjed@webrtc.org
per-file androidmediacodeccommon.h=magjed@webrtc.org
per-file androidvideotracksource_jni.cc=magjed@webrtc.org
per-file androidvideotracksource_jni.cc=sakal@webrtc.org
per-file surfacetexturehelper*=magjed@webrtc.org
per-file native_handle_impl*=magjed@webrtc.org
# Video related parts of peerconnection only.

View File

@ -0,0 +1,88 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/android/jni/classreferenceholder.h"
#include "webrtc/api/androidvideotracksource.h"
#include "webrtc/api/videosourceproxy.h"
// Identifiers are over 80 characters long so this is needed to fit them on one
// line.
#define JOW_OBSERVER_METHOD(rettype, name) \
JOW(rettype, VideoCapturer_00024AndroidVideoTrackSourceObserver_##name)
namespace webrtc_jni {
static webrtc::AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy(
jlong j_proxy) {
auto proxy_source = reinterpret_cast<webrtc::VideoTrackSourceProxy*>(j_proxy);
return reinterpret_cast<webrtc::AndroidVideoTrackSource*>(
proxy_source->internal());
}
JOW_OBSERVER_METHOD(void, nativeOnByteBufferFrameCaptured)
(JNIEnv* jni,
jclass,
jlong j_source,
jbyteArray j_frame,
jint length,
jint width,
jint height,
jint rotation,
jlong timestamp) {
webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
jbyte* bytes = jni->GetByteArrayElements(j_frame, nullptr);
source->OnByteBufferFrameCaptured(bytes, length, width, height, rotation,
timestamp);
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
}
JOW_OBSERVER_METHOD(void, nativeOnTextureFrameCaptured)
(JNIEnv* jni,
jclass,
jlong j_source,
jint j_width,
jint j_height,
jint j_oes_texture_id,
jfloatArray j_transform_matrix,
jint j_rotation,
jlong j_timestamp) {
webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnTextureFrameCaptured(
j_width, j_height, j_rotation, j_timestamp,
NativeHandleImpl(jni, j_oes_texture_id, j_transform_matrix));
}
JOW_OBSERVER_METHOD(void, nativeCapturerStarted)
(JNIEnv* jni, jclass, jlong j_source, jboolean j_success) {
LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStarted";
webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->SetState(webrtc::AndroidVideoTrackSource::SourceState::kLive);
}
JOW_OBSERVER_METHOD(void, nativeCapturerStopped)
(JNIEnv* jni, jclass, jlong j_source) {
LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStopped";
webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->SetState(webrtc::AndroidVideoTrackSource::SourceState::kEnded);
}
JOW_OBSERVER_METHOD(void, nativeOnOutputFormatRequest)
(JNIEnv* jni, jclass, jlong j_source, jint j_width, jint j_height, jint j_fps) {
LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeOnOutputFormatRequest";
webrtc::AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnOutputFormatRequest(j_width, j_height, j_fps);
}
} // namespace webrtc_jni

View File

@ -44,6 +44,7 @@
#include <utility>
#include "webrtc/api/androidvideocapturer.h"
#include "webrtc/api/androidvideotracksource.h"
#include "webrtc/api/android/jni/androidmediadecoder_jni.h"
#include "webrtc/api/android/jni/androidmediaencoder_jni.h"
#include "webrtc/api/android/jni/androidnetworkmonitor_jni.h"
@ -55,6 +56,7 @@
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/api/rtpreceiverinterface.h"
#include "webrtc/api/rtpsenderinterface.h"
#include "webrtc/api/videosourceproxy.h"
#include "webrtc/api/webrtcsdp.h"
#include "webrtc/base/bind.h"
#include "webrtc/base/checks.h"
@ -116,6 +118,7 @@ static char *field_trials_init_string = NULL;
// Set in PeerConnectionFactory_initializeAndroidGlobals().
static bool factory_static_initialized = false;
static bool video_hw_acceleration_enabled = true;
static jobject j_application_context = nullptr;
// Return the (singleton) Java Enum object corresponding to |index|;
// |state_class_fragment| is something like "MediaSource$State".
@ -931,7 +934,7 @@ JOW(void, PeerConnection_freeObserver)(JNIEnv*, jclass, jlong j_p) {
}
JOW(void, MediaSource_free)(JNIEnv*, jclass, jlong j_p) {
CHECK_RELEASE(reinterpret_cast<MediaSourceInterface*>(j_p));
reinterpret_cast<rtc::RefCountInterface*>(j_p)->Release();
}
JOW(void, VideoRenderer_freeWrappedVideoRenderer)(JNIEnv*, jclass, jlong j_p) {
@ -985,14 +988,20 @@ JOW(jlong, PeerConnectionFactory_nativeCreateObserver)(
return (jlong)new PCOJava(jni, j_observer);
}
JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)(
JNIEnv* jni, jclass, jobject context,
jboolean initialize_audio, jboolean initialize_video,
JOW(jboolean, PeerConnectionFactory_initializeAndroidGlobals)
(JNIEnv* jni,
jclass,
jobject context,
jboolean initialize_audio,
jboolean initialize_video,
jboolean video_hw_acceleration) {
bool failure = false;
video_hw_acceleration_enabled = video_hw_acceleration;
AndroidNetworkMonitor::SetAndroidContext(jni, context);
if (!factory_static_initialized) {
RTC_DCHECK(j_application_context == nullptr);
j_application_context = NewGlobalRef(jni, context);
if (initialize_video) {
failure |= AndroidVideoCapturerJni::SetAndroidObjects(jni, context);
}
@ -1075,6 +1084,8 @@ class OwnedFactoryAndThreads {
}
PeerConnectionFactoryInterface* factory() { return factory_; }
Thread* signaling_thread() { return signaling_thread_.get(); }
Thread* worker_thread() { return worker_thread_.get(); }
WebRtcVideoEncoderFactory* encoder_factory() { return encoder_factory_; }
WebRtcVideoDecoderFactory* decoder_factory() { return decoder_factory_; }
rtc::NetworkMonitorFactory* network_monitor_factory() {
@ -1270,6 +1281,49 @@ JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
return (jlong)source.release();
}
JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource2)
(JNIEnv* jni, jclass, jlong native_factory, jobject j_egl_context) {
OwnedFactoryAndThreads* factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
rtc::scoped_refptr<webrtc::AndroidVideoTrackSource> source(
new rtc::RefCountedObject<webrtc::AndroidVideoTrackSource>(
factory->signaling_thread(), jni, j_egl_context));
rtc::scoped_refptr<webrtc::VideoTrackSourceProxy> proxy_source =
webrtc::VideoTrackSourceProxy::Create(factory->signaling_thread(),
factory->worker_thread(), source);
return (jlong)proxy_source.release();
}
JOW(void, PeerConnectionFactory_nativeInitializeVideoCapturer)
(JNIEnv* jni,
jclass,
jlong native_factory,
jobject j_video_capturer,
jlong native_source,
jobject j_frame_observer) {
LOG(LS_INFO) << "PeerConnectionFactory_nativeInitializeVideoCapturer";
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
auto proxy_source =
reinterpret_cast<webrtc::VideoTrackSourceProxy*>(native_source);
auto source = reinterpret_cast<webrtc::AndroidVideoTrackSource*>(
proxy_source->internal());
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper =
source->surface_texture_helper();
jni->CallVoidMethod(
j_video_capturer,
GetMethodID(jni, FindClass(jni, "org/webrtc/VideoCapturer"), "initialize",
"(Lorg/webrtc/SurfaceTextureHelper;Landroid/content/"
"Context;Lorg/webrtc/VideoCapturer$CapturerObserver;)V"),
surface_texture_helper
? surface_texture_helper->GetJavaSurfaceTextureHelper()
: nullptr,
j_application_context, j_frame_observer);
CHECK_EXCEPTION(jni) << "error during VideoCapturer.initialize()";
}
JOW(jlong, PeerConnectionFactory_nativeCreateVideoTrack)(
JNIEnv* jni, jclass, jlong native_factory, jstring id,
jlong native_source) {

View File

@ -104,6 +104,11 @@ class CameraVideoCapturerTestFixtures {
}
}
@Override
public void onCapturerStopped() {
Logging.d(TAG, "onCapturerStopped");
}
@Override
public void onByteBufferFrameCaptured(byte[] frame, int width, int height, int rotation,
long timeStamp) {

View File

@ -0,0 +1,260 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/androidvideotracksource.h"
#include <utility>
namespace webrtc {
AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
JNIEnv* jni,
jobject j_egl_context)
: signaling_thread_(signaling_thread),
surface_texture_helper_(webrtc_jni::SurfaceTextureHelper::create(
jni,
"Camera SurfaceTextureHelper",
j_egl_context)) {
LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
worker_thread_checker_.DetachFromThread();
camera_thread_checker_.DetachFromThread();
}
bool AndroidVideoTrackSource::GetStats(AndroidVideoTrackSource::Stats* stats) {
rtc::CritScope lock(&stats_crit_);
if (!stats_) {
return false;
}
*stats = *stats_;
return true;
}
void AndroidVideoTrackSource::SetState(SourceState state) {
if (rtc::Thread::Current() != signaling_thread_) {
invoker_.AsyncInvoke<void>(
RTC_FROM_HERE, signaling_thread_,
rtc::Bind(&AndroidVideoTrackSource::SetState, this, state));
return;
}
if (state_ != state) {
state_ = state;
FireOnChanged();
}
}
void AndroidVideoTrackSource::AddOrUpdateSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
broadcaster_.AddOrUpdateSink(sink, wants);
OnSinkWantsChanged(broadcaster_.wants());
}
void AndroidVideoTrackSource::RemoveSink(
rtc::VideoSinkInterface<cricket::VideoFrame>* sink) {
RTC_DCHECK(worker_thread_checker_.CalledOnValidThread());
broadcaster_.RemoveSink(sink);
OnSinkWantsChanged(broadcaster_.wants());
}
void AndroidVideoTrackSource::OnSinkWantsChanged(
const rtc::VideoSinkWants& wants) {
{
rtc::CritScope lock(&apply_rotation_crit_);
apply_rotation_ = wants.rotation_applied;
}
video_adapter_.OnResolutionRequest(wants.max_pixel_count,
wants.max_pixel_count_step_up);
}
void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
int length,
int width,
int height,
int rotation,
int64_t timestamp_ns) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
int64_t translated_camera_time_us;
if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec,
&adapted_width, &adapted_height, &crop_width, &crop_height,
&crop_x, &crop_y, &translated_camera_time_us)) {
return;
}
int rotated_width = crop_width;
int rotated_height = crop_height;
rtc::CritScope lock(&apply_rotation_crit_);
if (apply_rotation_ && (rotation == 90 || rotation == 270)) {
std::swap(adapted_width, adapted_height);
std::swap(rotated_width, rotated_height);
}
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
pre_scale_pool_.CreateBuffer(rotated_width, rotated_height);
const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
const uint8_t* uv_plane = y_plane + width * height;
int uv_width = (width + 1) / 2;
RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));
// Can only crop at even pixels.
crop_x &= ~1;
crop_y &= ~1;
libyuv::NV12ToI420Rotate(
y_plane + width * crop_y + crop_x, width,
uv_plane + uv_width * crop_y + crop_x, width, buffer->MutableDataY(),
buffer->StrideY(),
// Swap U and V, since we have NV21, not NV12.
buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
buffer->StrideU(), crop_width, crop_height,
static_cast<libyuv::RotationMode>(apply_rotation_ ? rotation : 0));
if (adapted_width != buffer->width() || adapted_height != buffer->height()) {
rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
post_scale_pool_.CreateBuffer(adapted_width, adapted_height));
scaled_buffer->ScaleFrom(buffer);
buffer = scaled_buffer;
}
OnFrame(cricket::WebRtcVideoFrame(
buffer,
apply_rotation_ ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us),
width, height);
}
void AndroidVideoTrackSource::OnTextureFrameCaptured(
int width,
int height,
int rotation,
int64_t timestamp_ns,
const webrtc_jni::NativeHandleImpl& handle) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
int64_t translated_camera_time_us;
if (!AdaptFrame(width, height, timestamp_ns / rtc::kNumNanosecsPerMicrosec,
&adapted_width, &adapted_height, &crop_width, &crop_height,
&crop_x, &crop_y, &translated_camera_time_us)) {
surface_texture_helper_->ReturnTextureFrame();
return;
}
webrtc_jni::Matrix matrix = handle.sampling_matrix;
matrix.Crop(crop_width / static_cast<float>(width),
crop_height / static_cast<float>(height),
crop_x / static_cast<float>(width),
crop_y / static_cast<float>(height));
rtc::CritScope lock(&apply_rotation_crit_);
if (apply_rotation_) {
if (rotation == webrtc::kVideoRotation_90 ||
rotation == webrtc::kVideoRotation_270) {
std::swap(adapted_width, adapted_height);
}
matrix.Rotate(static_cast<webrtc::VideoRotation>(rotation));
}
OnFrame(cricket::WebRtcVideoFrame(
surface_texture_helper_->CreateTextureFrame(
adapted_width, adapted_height,
webrtc_jni::NativeHandleImpl(handle.oes_texture_id, matrix)),
apply_rotation_ ? webrtc::kVideoRotation_0
: static_cast<webrtc::VideoRotation>(rotation),
translated_camera_time_us),
width, height);
}
void AndroidVideoTrackSource::OnFrame(const cricket::VideoFrame& frame,
int width,
int height) {
{
rtc::CritScope lock(&stats_crit_);
stats_ = rtc::Optional<AndroidVideoTrackSource::Stats>({width, height});
}
broadcaster_.OnFrame(frame);
}
void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
int height,
int fps) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
cricket::VideoFormat format(width, height,
cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter_.OnOutputFormatRequest(format);
}
bool AndroidVideoTrackSource::AdaptFrame(int width,
int height,
int64_t camera_time_us,
int* out_width,
int* out_height,
int* crop_width,
int* crop_height,
int* crop_x,
int* crop_y,
int64_t* translated_camera_time_us) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
int64_t system_time_us = rtc::TimeMicros();
int64_t offset_us =
timestamp_aligner_.UpdateOffset(camera_time_us, system_time_us);
if (!broadcaster_.frame_wanted()) {
return false;
}
if (!video_adapter_.AdaptFrameResolution(
width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec,
crop_width, crop_height, out_width, out_height)) {
// VideoAdapter dropped the frame.
return false;
}
*crop_x = (width - *crop_width) / 2;
*crop_y = (height - *crop_height) / 2;
*translated_camera_time_us = timestamp_aligner_.ClipTimestamp(
camera_time_us + offset_us, system_time_us);
return true;
}
} // namespace webrtc

View File

@ -0,0 +1,124 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_API_ANDROIDVIDEOTRACKSOURCE_H_
#define WEBRTC_API_ANDROIDVIDEOTRACKSOURCE_H_
#include "webrtc/api/android/jni/native_handle_impl.h"
#include "webrtc/api/android/jni/surfacetexturehelper_jni.h"
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/notifier.h"
#include "webrtc/base/asyncinvoker.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/thread_checker.h"
#include "webrtc/base/timestampaligner.h"
#include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/media/base/videoadapter.h"
#include "webrtc/media/base/videobroadcaster.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "third_party/libyuv/include/libyuv/convert.h"
namespace webrtc {
class AndroidVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
public:
AndroidVideoTrackSource(rtc::Thread* signaling_thread,
JNIEnv* jni,
jobject j_egl_context);
// Not used on Android.
// TODO(sakal/magjed): Try to remove this from the interface.
void Stop() override { RTC_NOTREACHED(); };
// Not used on Android.
// TODO(sakal/magjed): Try to remove this from the interface.
void Restart() override { RTC_NOTREACHED(); }
// Currently, none of the Android implementations are screencast.
bool is_screencast() const override { return false; }
// Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different
// depending on video codec.
rtc::Optional<bool> needs_denoising() const override {
return rtc::Optional<bool>(false);
}
// Returns false if no stats are available, e.g, for a remote
// source, or a source which has not seen its first frame yet.
// Should avoid blocking.
bool GetStats(Stats* stats) override;
// Called by the native capture observer
void SetState(SourceState state);
SourceState state() const override { return state_; }
bool remote() const override { return false; }
void AddOrUpdateSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override;
void RemoveSink(rtc::VideoSinkInterface<cricket::VideoFrame>* sink) override;
void OnByteBufferFrameCaptured(const void* frame_data,
int length,
int width,
int height,
int rotation,
int64_t timestamp_ns);
void OnTextureFrameCaptured(int width,
int height,
int rotation,
int64_t timestamp_ns,
const webrtc_jni::NativeHandleImpl& handle);
void OnOutputFormatRequest(int width, int height, int fps);
rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper>
surface_texture_helper() {
return surface_texture_helper_;
}
private:
rtc::Thread* signaling_thread_;
rtc::AsyncInvoker invoker_;
rtc::ThreadChecker worker_thread_checker_;
rtc::ThreadChecker camera_thread_checker_;
rtc::CriticalSection stats_crit_;
rtc::Optional<Stats> stats_ GUARDED_BY(stats_crit_);
SourceState state_;
rtc::VideoBroadcaster broadcaster_;
rtc::TimestampAligner timestamp_aligner_;
cricket::VideoAdapter video_adapter_;
rtc::CriticalSection apply_rotation_crit_;
bool apply_rotation_ GUARDED_BY(apply_rotation_crit_);
webrtc::I420BufferPool pre_scale_pool_;
webrtc::I420BufferPool post_scale_pool_;
rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper> surface_texture_helper_;
void OnFrame(const cricket::VideoFrame& frame, int width, int height);
void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
bool AdaptFrame(int width,
int height,
int64_t camera_time_us,
int* out_width,
int* out_height,
int* crop_width,
int* crop_height,
int* crop_x,
int* crop_y,
int64_t* translated_camera_time_us);
};
} // namespace webrtc
#endif // WEBRTC_API_ANDROIDVIDEOTRACKSOURCE_H_

View File

@ -45,6 +45,7 @@
'android/jni/androidnetworkmonitor_jni.h',
'android/jni/androidvideocapturer_jni.cc',
'android/jni/androidvideocapturer_jni.h',
'android/jni/androidvideotracksource_jni.cc',
'android/jni/classreferenceholder.cc',
'android/jni/classreferenceholder.h',
'android/jni/jni_helpers.cc',
@ -56,6 +57,8 @@
'android/jni/surfacetexturehelper_jni.h',
'androidvideocapturer.cc',
'androidvideocapturer.h',
'androidvideotracksource.cc',
'androidvideotracksource.h',
],
'include_dirs': [
'<(libyuv_dir)/include',

View File

@ -107,14 +107,16 @@ public class PeerConnectionClient {
private boolean videoCallEnabled;
private boolean preferIsac;
private String preferredVideoCodec;
private boolean videoSourceStopped;
private boolean videoCapturerStopped;
private boolean isError;
private Timer statsTimer;
private VideoRenderer.Callbacks localRender;
private VideoRenderer.Callbacks remoteRender;
private SignalingParameters signalingParameters;
private MediaConstraints pcConstraints;
private MediaConstraints videoConstraints;
private int videoWidth;
private int videoHeight;
private int videoFps;
private MediaConstraints audioConstraints;
private ParcelFileDescriptor aecDumpFileDescriptor;
private MediaConstraints sdpMediaConstraints;
@ -260,7 +262,7 @@ public class PeerConnectionClient {
factory = null;
peerConnection = null;
preferIsac = false;
videoSourceStopped = false;
videoCapturerStopped = false;
isError = false;
queuedRemoteCandidates = null;
localSdp = null; // either offer or answer SDP
@ -399,42 +401,24 @@ public class PeerConnectionClient {
}
// Create video constraints if video call is enabled.
if (videoCallEnabled) {
videoConstraints = new MediaConstraints();
int videoWidth = peerConnectionParameters.videoWidth;
int videoHeight = peerConnectionParameters.videoHeight;
videoWidth = peerConnectionParameters.videoWidth;
videoHeight = peerConnectionParameters.videoHeight;
videoFps = peerConnectionParameters.videoFps;
// If VP8 HW video encoder is supported and video resolution is not
// specified force it to HD.
if ((videoWidth == 0 || videoHeight == 0)
&& peerConnectionParameters.videoCodecHwAcceleration
&& MediaCodecVideoEncoder.isVp8HwSupported()) {
// If video resolution is not specified, default to HD.
if (videoWidth == 0 || videoHeight == 0) {
videoWidth = HD_VIDEO_WIDTH;
videoHeight = HD_VIDEO_HEIGHT;
}
// Add video resolution constraints.
if (videoWidth > 0 && videoHeight > 0) {
videoWidth = Math.min(videoWidth, MAX_VIDEO_WIDTH);
videoHeight = Math.min(videoHeight, MAX_VIDEO_HEIGHT);
videoConstraints.mandatory.add(new KeyValuePair(
MIN_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth)));
videoConstraints.mandatory.add(new KeyValuePair(
MAX_VIDEO_WIDTH_CONSTRAINT, Integer.toString(videoWidth)));
videoConstraints.mandatory.add(new KeyValuePair(
MIN_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight)));
videoConstraints.mandatory.add(new KeyValuePair(
MAX_VIDEO_HEIGHT_CONSTRAINT, Integer.toString(videoHeight)));
// If fps is not specified, default to 30.
if (videoFps == 0) {
videoFps = 30;
}
// Add fps constraints.
int videoFps = peerConnectionParameters.videoFps;
if (videoFps > 0) {
videoWidth = Math.min(videoWidth, MAX_VIDEO_WIDTH);
videoHeight = Math.min(videoHeight, MAX_VIDEO_HEIGHT);
videoFps = Math.min(videoFps, MAX_VIDEO_FPS);
videoConstraints.mandatory.add(new KeyValuePair(
MIN_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps)));
videoConstraints.mandatory.add(new KeyValuePair(
MAX_VIDEO_FPS_CONSTRAINT, Integer.toString(videoFps)));
}
}
// Create audio constraints.
@ -502,9 +486,6 @@ public class PeerConnectionClient {
Log.d(TAG, "Create peer connection.");
Log.d(TAG, "PCConstraints: " + pcConstraints.toString());
if (videoConstraints != null) {
Log.d(TAG, "VideoConstraints: " + videoConstraints.toString());
}
queuedRemoteCandidates = new LinkedList<IceCandidate>();
if (videoCallEnabled) {
@ -592,6 +573,16 @@ public class PeerConnectionClient {
audioSource.dispose();
audioSource = null;
}
Log.d(TAG, "Stopping capture.");
if (videoCapturer != null) {
try {
videoCapturer.stopCapture();
} catch(InterruptedException e) {
throw new RuntimeException(e);
}
videoCapturer.dispose();
videoCapturer = null;
}
Log.d(TAG, "Closing video source.");
if (videoSource != null) {
videoSource.dispose();
@ -613,24 +604,8 @@ public class PeerConnectionClient {
if (!videoCallEnabled) {
return false;
}
int minWidth = 0;
int minHeight = 0;
for (KeyValuePair keyValuePair : videoConstraints.mandatory) {
if (keyValuePair.getKey().equals("minWidth")) {
try {
minWidth = Integer.parseInt(keyValuePair.getValue());
} catch (NumberFormatException e) {
Log.e(TAG, "Can not parse video width from video constraints");
}
} else if (keyValuePair.getKey().equals("minHeight")) {
try {
minHeight = Integer.parseInt(keyValuePair.getValue());
} catch (NumberFormatException e) {
Log.e(TAG, "Can not parse video height from video constraints");
}
}
}
return minWidth * minHeight >= 1280 * 720;
return videoWidth * videoHeight >= 1280 * 720;
}
private void getStats() {
@ -791,10 +766,12 @@ public class PeerConnectionClient {
executor.execute(new Runnable() {
@Override
public void run() {
if (videoSource != null && !videoSourceStopped) {
if (videoCapturer != null && !videoCapturerStopped) {
Log.d(TAG, "Stop video source.");
videoSource.stop();
videoSourceStopped = true;
try {
videoCapturer.stopCapture();
} catch (InterruptedException e) {}
videoCapturerStopped = true;
}
}
});
@ -804,10 +781,10 @@ public class PeerConnectionClient {
executor.execute(new Runnable() {
@Override
public void run() {
if (videoSource != null && videoSourceStopped) {
if (videoCapturer != null && videoCapturerStopped) {
Log.d(TAG, "Restart video source.");
videoSource.restart();
videoSourceStopped = false;
videoCapturer.startCapture(videoWidth, videoHeight, videoFps);
videoCapturerStopped = false;
}
}
});
@ -834,7 +811,8 @@ public class PeerConnectionClient {
}
private VideoTrack createVideoTrack(VideoCapturer capturer) {
videoSource = factory.createVideoSource(capturer, videoConstraints);
videoSource = factory.createVideoSource(capturer);
capturer.startCapture(videoWidth, videoHeight, videoFps);
localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource);
localVideoTrack.setEnabled(renderVideo);