Files
platform-external-webrtc/sdk/android/api/org/webrtc/ScreenCapturerAndroid.java
Magnus Jedvert 1a759c6354 Android: Only use Java VideoFrames internally
This CL removes internal support for anything else than Android frames
that are wrapped Java VideoFrames. This allows for a big internal
cleanup and we can remove the internal class AndroidTextureBuffer and
all logic related to that. Also, the C++ AndroidVideoTrackSource no
longer needs to hold on to a C++ SurfaceTextureHelper and we can
remove all JNI code related to SurfaceTextureHelper. Also, when these
methods are removed, it's possible to let VideoSource implement the
CapturerObserver interface directly and there is no longer any need for
AndroidVideoTrackSourceObserver. Clients can then initialize
VideoCapturers themselves outside the PeerConnectionFactory, and a new
method is added in the PeerConnectionFactory to allow clients to create
standalone VideoSources that can be connected to a VideoCapturer outside
the factory.

Bug: webrtc:9181
Change-Id: Ie292ea9214f382d44dce9120725c62602a646ed8
Reviewed-on: https://webrtc-review.googlesource.com/71666
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23004}
2018-04-24 13:51:11 +00:00

215 lines
8.2 KiB
Java

/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.view.Surface;
import javax.annotation.Nullable;
/**
* An implementation of VideoCapturer to capture the screen content as a video stream.
* Capturing is done by {@code MediaProjection} on a {@code SurfaceTexture}. We interact with this
* {@code SurfaceTexture} using a {@code SurfaceTextureHelper}.
* The {@code SurfaceTextureHelper} is created by the native code and passed to this capturer in
* {@code VideoCapturer.initialize()}. On receiving a new frame, this capturer passes it
* as a texture to the native code via {@code CapturerObserver.onFrameCaptured()}. This takes
* place on the HandlerThread of the given {@code SurfaceTextureHelper}. When done with each frame,
* the native code returns the buffer to the {@code SurfaceTextureHelper} to be used for new
* frames. At any time, at most one frame is being processed.
*/
@TargetApi(21)
public class ScreenCapturerAndroid
implements VideoCapturer, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final int DISPLAY_FLAGS =
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION;
// DPI for VirtualDisplay, does not seem to matter for us.
private static final int VIRTUAL_DISPLAY_DPI = 400;
private final Intent mediaProjectionPermissionResultData;
private final MediaProjection.Callback mediaProjectionCallback;
private int width;
private int height;
@Nullable private VirtualDisplay virtualDisplay;
@Nullable private SurfaceTextureHelper surfaceTextureHelper;
@Nullable private CapturerObserver capturerObserver;
private long numCapturedFrames = 0;
@Nullable private MediaProjection mediaProjection;
private boolean isDisposed = false;
@Nullable private MediaProjectionManager mediaProjectionManager;
/**
* Constructs a new Screen Capturer.
*
* @param mediaProjectionPermissionResultData the result data of MediaProjection permission
* activity; the calling app must validate that result code is Activity.RESULT_OK before
* calling this method.
* @param mediaProjectionCallback MediaProjection callback to implement application specific
* logic in events such as when the user revokes a previously granted capture permission.
**/
public ScreenCapturerAndroid(Intent mediaProjectionPermissionResultData,
MediaProjection.Callback mediaProjectionCallback) {
this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData;
this.mediaProjectionCallback = mediaProjectionCallback;
}
private void checkNotDisposed() {
if (isDisposed) {
throw new RuntimeException("capturer is disposed.");
}
}
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void initialize(final SurfaceTextureHelper surfaceTextureHelper,
final Context applicationContext, final VideoCapturer.CapturerObserver capturerObserver) {
checkNotDisposed();
if (capturerObserver == null) {
throw new RuntimeException("capturerObserver not set.");
}
this.capturerObserver = capturerObserver;
if (surfaceTextureHelper == null) {
throw new RuntimeException("surfaceTextureHelper not set.");
}
this.surfaceTextureHelper = surfaceTextureHelper;
mediaProjectionManager = (MediaProjectionManager) applicationContext.getSystemService(
Context.MEDIA_PROJECTION_SERVICE);
}
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void startCapture(
final int width, final int height, final int ignoredFramerate) {
checkNotDisposed();
this.width = width;
this.height = height;
mediaProjection = mediaProjectionManager.getMediaProjection(
Activity.RESULT_OK, mediaProjectionPermissionResultData);
// Let MediaProjection callback use the SurfaceTextureHelper thread.
mediaProjection.registerCallback(mediaProjectionCallback, surfaceTextureHelper.getHandler());
createVirtualDisplay();
capturerObserver.onCapturerStarted(true);
surfaceTextureHelper.startListening(ScreenCapturerAndroid.this);
}
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void stopCapture() {
checkNotDisposed();
ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
@Override
public void run() {
surfaceTextureHelper.stopListening();
capturerObserver.onCapturerStopped();
if (virtualDisplay != null) {
virtualDisplay.release();
virtualDisplay = null;
}
if (mediaProjection != null) {
// Unregister the callback before stopping, otherwise the callback recursively
// calls this method.
mediaProjection.unregisterCallback(mediaProjectionCallback);
mediaProjection.stop();
mediaProjection = null;
}
}
});
}
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void dispose() {
isDisposed = true;
}
/**
* Changes output video format. This method can be used to scale the output
* video, or to change orientation when the captured screen is rotated for example.
*
* @param width new output video width
* @param height new output video height
* @param ignoredFramerate ignored
*/
@Override
// TODO(bugs.webrtc.org/8491): Remove NoSynchronizedMethodCheck suppression.
@SuppressWarnings("NoSynchronizedMethodCheck")
public synchronized void changeCaptureFormat(
final int width, final int height, final int ignoredFramerate) {
checkNotDisposed();
this.width = width;
this.height = height;
if (virtualDisplay == null) {
// Capturer is stopped, the virtual display will be created in startCaptuer().
return;
}
// Create a new virtual display on the surfaceTextureHelper thread to avoid interference
// with frame processing, which happens on the same thread (we serialize events by running
// them on the same thread).
ThreadUtils.invokeAtFrontUninterruptibly(surfaceTextureHelper.getHandler(), new Runnable() {
@Override
public void run() {
virtualDisplay.release();
createVirtualDisplay();
}
});
}
private void createVirtualDisplay() {
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
virtualDisplay = mediaProjection.createVirtualDisplay("WebRTC_ScreenCapture", width, height,
VIRTUAL_DISPLAY_DPI, DISPLAY_FLAGS, new Surface(surfaceTextureHelper.getSurfaceTexture()),
null /* callback */, null /* callback handler */);
}
// This is called on the internal looper thread of {@Code SurfaceTextureHelper}.
@Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
numCapturedFrames++;
final VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
width, height, RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, timestampNs);
capturerObserver.onFrameCaptured(frame);
frame.release();
}
@Override
public boolean isScreencast() {
return true;
}
public long getNumCapturedFrames() {
return numCapturedFrames;
}
}