Create Android screen capturer.

Review-Url: https://codereview.webrtc.org/2276593003
Cr-Commit-Position: refs/heads/master@{#14010}
This commit is contained in:
arsany
2016-08-31 18:50:52 -07:00
committed by Commit bot
parent be5163ca09
commit b75f2541c9
8 changed files with 260 additions and 9 deletions

View File

@ -247,6 +247,11 @@ public abstract class CameraCapturer implements CameraVideoCapturer {
});
}
@Override
public boolean isScreencast() {
return false;
}
private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) {
Logging.d(TAG, "switchCamera internal");

View File

@ -114,7 +114,8 @@ public class PeerConnectionFactory {
public VideoSource createVideoSource(VideoCapturer capturer) {
final EglBase.Context eglContext =
localEglbase == null ? null : localEglbase.getEglBaseContext();
long nativeAndroidVideoTrackSource = nativeCreateVideoSource(nativeFactory, eglContext);
long nativeAndroidVideoTrackSource = nativeCreateVideoSource(
nativeFactory, eglContext, capturer.isScreencast());
VideoCapturer.CapturerObserver capturerObserver
= new VideoCapturer.AndroidVideoTrackSourceObserver(nativeAndroidVideoTrackSource);
nativeInitializeVideoCapturer(nativeFactory, capturer, nativeAndroidVideoTrackSource,
@ -237,7 +238,7 @@ public class PeerConnectionFactory {
long nativeFactory, String label);
private static native long nativeCreateVideoSource(
long nativeFactory, EglBase.Context eglContext);
long nativeFactory, EglBase.Context eglContext, boolean is_screencast);
private static native void nativeInitializeVideoCapturer(
long native_factory, VideoCapturer j_video_capturer, long native_source,

View File

@ -0,0 +1,231 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.view.Surface;
import java.util.ArrayList;
import java.util.List;
/**
* An implementation of VideoCapturer to capture the screen content as a video stream.
* Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this
* {@code SurfaceTexture} using a {@code SurfaceTextureHelper}.
* The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in
* {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it
* as a texture to the native code via {@code CapturerObserver.onTextureFrameCaptured()}. This takes
* place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
* the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
* frames. At any time, at most one frame is being processed.
*/
@TargetApi(21)
public class ScreenCapturerAndroid implements
VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final int DISPLAY_FLAGS = DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC
| DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
// DPI for VirtualDisplay, does not seem to matter for us.
private static final int VIRTUAL_DISPLAY_DPI = 400;
private final Intent mediaProjectionPermissionResultData;
private final MediaProjection.Callback mediaProjectionCallback;
private int width;
private int height;
private VirtualDisplay virtualDisplay;
private SurfaceTextureHelper surfaceTextureHelper;
private CapturerObserver capturerObserver;
private long numCapturedFrames = 0;
private MediaProjection mediaProjection;
private boolean isDisposed = false;
private MediaProjectionManager mediaProjectionManager;
/**
* Constructs a new Screen Capturer.
*
* @param mediaProjectionPermissionResultData the result data of MediaProjection permission
* activity; the calling app must validate that result code is Activity.RESULT_OK before
* calling this method.
* @param mediaProjectionCallback MediaProjection callback to implement application specific
* logic in events such as when the user revokes a previously granted capture permission.
**/
public ScreenCapturerAndroid(
Intent mediaProjectionPermissionResultData,
MediaProjection.Callback mediaProjectionCallback) {
this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
this.mediaProjectionCallback = mediaProjectionCallback;
}
private void checkNotDisposed() {
if (isDisposed) {
throw new RuntimeException("capturer is disposed.");
}
}
@Override
public synchronized void initialize(
final SurfaceTextureHelper surfaceTextureHelper,
final Context applicationContext,
final VideoCapturer.CapturerObserver capturerObserver) {
checkNotDisposed();
if (capturerObserver == null) {
throw new RuntimeException("capturerObserver not set.");
}
this.capturerObserver = capturerObserver;
if (surfaceTextureHelper == null) {
throw new RuntimeException("surfaceTextureHelper not set.");
}
this.surfaceTextureHelper = surfaceTextureHelper;
mediaProjectionManager = (MediaProjectionManager)
applicationContext.getSystemService(Context.MEDIA_PROJECTION_SERVICE);
}
/**
* This method is not called by native code, neither by the java app. It can be removed later
* once it is removed from the VideoCapturer interface.
*/
@Override
public List<CameraEnumerationAndroid.CaptureFormat> getSupportedFormats() {
return null;
}
@Override
public synchronized void startCapture(final int width, final int height,
final int ignoredFramerate) {
checkNotDisposed();
this.width = width;
this.height = height;
mediaProjection = mediaProjectionManager.getMediaProjection(
Activity.RESULT_OK, mediaProjectionPermissionResultData);
// Let MediaProjection callback use the SurfaceTextureHelper thread.
mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler());
createVirtualDisplay();
capturerObserver.onCapturerStarted(true);
surfaceTextureHelper.startListening(ScreenCapturerAndroid.this);
}
@Override
public synchronized void stopCapture() {
checkNotDisposed();
ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
@Override
public void run() {
surfaceTextureHelper.stopListening();
capturerObserver.onCapturerStopped();
if (virtualDisplay != null) {
virtualDisplay.release();
virtualDisplay = null;
}
if (mediaProjection != null) {
// Unregister the callback before stopping, otherwise the callback recursively
// calls this method.
mediaProjection.unregisterCallback(mediaProjectionCallback);
mediaProjection.stop();
mediaProjection = null;
}
}
});
}
@Override
public synchronized void dispose() {
isDisposed = true;
}
@Override
public synchronized void onOutputFormatRequest(
final int width, final int height, final int framerate) {
checkNotDisposed();
surfaceTextureHelper.getHandler().post(new Runnable() {
@Override
public void run() {
capturerObserver.onOutputFormatRequest(width, height, framerate);
}
});
}
/**
* Changes output video format. This method can be used to scale the output
* video, or to change orientation when the captured screen is rotated for example.
*
* @param width new output video width
* @param height new output video height
* @param ignoredFramerate ignored
*/
@Override
public synchronized void changeCaptureFormat(
final int width, final int height, final int ignoredFramerate) {
checkNotDisposed();
this.width = width;
this.height = height;
if (virtualDisplay == null) {
// Capturer is stopped, the virtual display will be created in startCaptuer().
return;
}
// Create a new virtual display on the surfaceTextureHelper thread to avoid interference
// with frame processing, which happens on the same thread (we serialize events by running
// them on the same thread).
ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
@Override
public void run() {
virtualDisplay.release();
createVirtualDisplay();
}
});
}
private void createVirtualDisplay() {
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
virtualDisplay = mediaProjection.createVirtualDisplay(
"WebRTC_ScreenCapture", width, height, VIRTUAL_DISPLAY_DPI,
DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
null /* callback */, null /* callback handler */);
}
// This is called on the internal looper thread of {@Code SurfaceTextureHelper}.
@Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
numCapturedFrames++;
capturerObserver.onTextureFrameCaptured(width, height, oesTextureId, transformMatrix,
0 /* rotation */, timestampNs);
}
@Override
public boolean isScreencast() {
return true;
}
public long getNumCapturedFrames() {
return numCapturedFrames;
}
}

View File

@ -125,4 +125,9 @@ public interface VideoCapturer {
* Perform any final cleanup here. No more capturing will be done after this call.
*/
void dispose();
/**
* @return true if-and-only-if this is a screen capturer.
*/
boolean isScreencast();
}

View File

@ -640,4 +640,9 @@ public class VideoCapturerAndroid implements
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
transformMatrix, rotation, timestampNs);
}
@Override
public boolean isScreencast() {
return false;
}
}

View File

@ -1258,13 +1258,14 @@ JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
}
JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)
(JNIEnv* jni, jclass, jlong native_factory, jobject j_egl_context) {
(JNIEnv* jni, jclass, jlong native_factory, jobject j_egl_context,
jboolean is_screencast) {
OwnedFactoryAndThreads* factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
rtc::scoped_refptr<webrtc::AndroidVideoTrackSource> source(
new rtc::RefCountedObject<webrtc::AndroidVideoTrackSource>(
factory->signaling_thread(), jni, j_egl_context));
factory->signaling_thread(), jni, j_egl_context, is_screencast));
rtc::scoped_refptr<webrtc::VideoTrackSourceProxy> proxy_source =
webrtc::VideoTrackSourceProxy::Create(factory->signaling_thread(),
factory->worker_thread(), source);

View File

@ -16,12 +16,14 @@ namespace webrtc {
AndroidVideoTrackSource::AndroidVideoTrackSource(rtc::Thread* signaling_thread,
JNIEnv* jni,
jobject j_egl_context)
jobject j_egl_context,
bool is_screencast)
: signaling_thread_(signaling_thread),
surface_texture_helper_(webrtc_jni::SurfaceTextureHelper::create(
jni,
"Camera SurfaceTextureHelper",
j_egl_context)) {
j_egl_context)),
is_screencast_(is_screencast) {
LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
worker_thread_checker_.DetachFromThread();
camera_thread_checker_.DetachFromThread();

View File

@ -31,7 +31,8 @@ class AndroidVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
public:
AndroidVideoTrackSource(rtc::Thread* signaling_thread,
JNIEnv* jni,
jobject j_egl_context);
jobject j_egl_context,
bool is_screencast = false);
// Not used on Android.
// TODO(sakal/magjed): Try to remove this from the interface.
@ -40,8 +41,7 @@ class AndroidVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
// TODO(sakal/magjed): Try to remove this from the interface.
void Restart() override { RTC_NOTREACHED(); }
// Currently, none of the Android implementations are screencast.
bool is_screencast() const override { return false; }
bool is_screencast() const override { return is_screencast_; }
// Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different
@ -102,6 +102,7 @@ class AndroidVideoTrackSource : public Notifier<VideoTrackSourceInterface> {
webrtc::I420BufferPool pre_scale_pool_;
webrtc::I420BufferPool post_scale_pool_;
rtc::scoped_refptr<webrtc_jni::SurfaceTextureHelper> surface_texture_helper_;
const bool is_screencast_;
void OnFrame(const cricket::VideoFrame& frame, int width, int height);