VideoCapturer: Update interface
This CL changes the interface by adding a SurfaceTextureHelper argument to VideoCapturer.startCapture(). This removes the need for the VideoCapturer to create the SurfaceTextureHelper itself. This also means that it is no longer necessary to send an EGLContext to the VideoCapturerAndroid.create() function. The SurfaceTextureHelper is now created in AndroidVideoCapturerJni, and the EGLContext is passed from PeerConnectionFactory in nativeCreateVideoSource(). Another change in this CL is that the C++ SurfaceTextureHelper creates the Java SurfaceTextureHelper instead of getting it passed as an argument in the ctor. BUG=webrtc:5519 Review URL: https://codereview.webrtc.org/1783793002 Cr-Commit-Position: refs/heads/master@{#11977}
This commit is contained in:
@ -67,10 +67,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@SmallTest
|
||||
public void testCreateAndReleaseUsingTextures() {
|
||||
EglBase eglBase = EglBase.create();
|
||||
VideoCapturerAndroidTestFixtures.release(
|
||||
VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext()));
|
||||
eglBase.release();
|
||||
VideoCapturerAndroid.create("", null, true /* captureToTexture */));
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -91,7 +89,7 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
public void testStartVideoCapturerUsingTextures() throws InterruptedException {
|
||||
public void testStartVideoCapturerUsingTexturesDeprecated() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
|
||||
@ -99,6 +97,13 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
public void testStartVideoCapturerUsingTextures() throws InterruptedException {
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create("", null, true /* captureToTexture */);
|
||||
VideoCapturerAndroidTestFixtures.startCapturerAndRender(capturer);
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
// This test that the camera can be started and that the frames are forwarded
|
||||
// to a Java video renderer using the front facing video capturer.
|
||||
@ -136,11 +141,9 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@SmallTest
|
||||
public void testSwitchVideoCapturerUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
|
||||
VideoCapturerAndroid.create("", null, true /* captureToTexture */);
|
||||
VideoCapturerAndroidTestFixtures.switchCamera(capturer);
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@MediumTest
|
||||
@ -154,14 +157,12 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@MediumTest
|
||||
public void testCameraEventsUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
|
||||
VideoCapturerAndroidTestFixtures.createCameraEvents();
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create("", cameraEvents, eglBase.getEglBaseContext());
|
||||
VideoCapturerAndroid.create("", cameraEvents, true /* captureToTexture */);
|
||||
VideoCapturerAndroidTestFixtures.cameraEventsInvoked(
|
||||
capturer, cameraEvents, getInstrumentation().getContext());
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@MediumTest
|
||||
@ -176,14 +177,12 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@MediumTest
|
||||
public void testCameraCallsAfterStopUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
final String deviceName = CameraEnumerationAndroid.getDeviceName(0);
|
||||
final VideoCapturerAndroid capturer = VideoCapturerAndroid.create(deviceName, null,
|
||||
eglBase.getEglBaseContext());
|
||||
true /* captureToTexture */);
|
||||
|
||||
VideoCapturerAndroidTestFixtures.cameraCallsAfterStop(capturer,
|
||||
getInstrumentation().getContext());
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -196,11 +195,9 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@SmallTest
|
||||
public void testStopRestartVideoSourceUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
|
||||
VideoCapturerAndroid.create("", null, true /* captureToTexture */);
|
||||
VideoCapturerAndroidTestFixtures.stopRestartVideoSource(capturer);
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -216,13 +213,11 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@SmallTest
|
||||
public void testStartStopWithDifferentResolutionsUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
String deviceName = CameraEnumerationAndroid.getDeviceName(0);
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create(deviceName, null, eglBase.getEglBaseContext());
|
||||
VideoCapturerAndroid.create(deviceName, null, true /* captureToTexture */);
|
||||
VideoCapturerAndroidTestFixtures.startStopWithDifferentResolutions(capturer,
|
||||
getInstrumentation().getContext());
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@SmallTest
|
||||
@ -273,13 +268,11 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@SmallTest
|
||||
public void testReturnBufferLateUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
String deviceName = CameraEnumerationAndroid.getDeviceName(0);
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create(deviceName, null, eglBase.getEglBaseContext());
|
||||
VideoCapturerAndroid.create(deviceName, null, true /* captureToTexture */);
|
||||
VideoCapturerAndroidTestFixtures.returnBufferLate(capturer,
|
||||
getInstrumentation().getContext());
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@MediumTest
|
||||
@ -293,25 +286,21 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
|
||||
@MediumTest
|
||||
public void testReturnBufferLateEndToEndUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
final VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
|
||||
VideoCapturerAndroid.create("", null, true /* captureToTexture */);
|
||||
VideoCapturerAndroidTestFixtures.returnBufferLateEndToEnd(capturer);
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@MediumTest
|
||||
// This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
|
||||
// the capturer.
|
||||
public void testCameraFreezedEventOnBufferStarvationUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
|
||||
VideoCapturerAndroidTestFixtures.createCameraEvents();
|
||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents,
|
||||
eglBase.getEglBaseContext());
|
||||
true /* captureToTexture */);
|
||||
VideoCapturerAndroidTestFixtures.cameraFreezedEventOnBufferStarvationUsingTextures(capturer,
|
||||
cameraEvents, getInstrumentation().getContext());
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@MediumTest
|
||||
@ -326,10 +315,8 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
// This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
|
||||
// called. This test both Java and C++ parts of of the stack.
|
||||
public void testScaleCameraOutputUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
VideoCapturerAndroid capturer =
|
||||
VideoCapturerAndroid.create("", null, eglBase.getEglBaseContext());
|
||||
VideoCapturerAndroid.create("", null, true /* captureToTexture */);
|
||||
VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
|
||||
eglBase.release();
|
||||
}
|
||||
}
|
||||
|
||||
@ -286,17 +286,20 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(null /* sharedContext */);
|
||||
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
surfaceTextureHelper, appContext, observer);
|
||||
// Make sure camera is started and first frame is received and then stop it.
|
||||
assertTrue(observer.WaitForCapturerToStart());
|
||||
observer.WaitForNextCapturedFrame();
|
||||
capturer.stopCapture();
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
}
|
||||
release(capturer);
|
||||
surfaceTextureHelper.dispose();
|
||||
|
||||
assertTrue(events.onCameraOpeningCalled);
|
||||
assertTrue(events.onFirstFrameAvailableCalled);
|
||||
@ -307,14 +310,16 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(null /* sharedContext */);
|
||||
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
surfaceTextureHelper, appContext, observer);
|
||||
// Make sure camera is started and then stop it.
|
||||
assertTrue(observer.WaitForCapturerToStart());
|
||||
capturer.stopCapture();
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
}
|
||||
|
||||
// We can't change |capturer| at this point, but we should not crash.
|
||||
@ -323,6 +328,7 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
capturer.changeCaptureFormat(640, 480, 15);
|
||||
|
||||
release(capturer);
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
static public void stopRestartVideoSource(VideoCapturerAndroid capturer)
|
||||
@ -350,13 +356,15 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
|
||||
static public void startStopWithDifferentResolutions(VideoCapturerAndroid capturer,
|
||||
Context appContext) throws InterruptedException {
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(null /* sharedContext */);
|
||||
FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||
List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||
|
||||
for(int i = 0; i < 3 ; ++i) {
|
||||
CameraEnumerationAndroid.CaptureFormat format = formats.get(i);
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
surfaceTextureHelper, appContext, observer);
|
||||
assertTrue(observer.WaitForCapturerToStart());
|
||||
observer.WaitForNextCapturedFrame();
|
||||
|
||||
@ -378,10 +386,11 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
}
|
||||
capturer.stopCapture();
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
}
|
||||
}
|
||||
release(capturer);
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
static void waitUntilIdle(VideoCapturerAndroid capturer) throws InterruptedException {
|
||||
@ -400,9 +409,11 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||
Camera camera = Camera.open(capturer.getCurrentCameraId());
|
||||
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(null /* sharedContext */);
|
||||
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
surfaceTextureHelper, appContext, observer);
|
||||
|
||||
if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
|
||||
// The first opened camera client will be evicted.
|
||||
@ -414,6 +425,7 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
|
||||
release(capturer);
|
||||
camera.release();
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
static public void startWhileCameraIsAlreadyOpenAndCloseCamera(
|
||||
@ -422,9 +434,11 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||
Camera camera = Camera.open(capturer.getCurrentCameraId());
|
||||
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(null /* sharedContext */);
|
||||
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
surfaceTextureHelper, appContext, observer);
|
||||
waitUntilIdle(capturer);
|
||||
|
||||
camera.release();
|
||||
@ -434,9 +448,10 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
observer.WaitForNextCapturedFrame();
|
||||
capturer.stopCapture();
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
}
|
||||
release(capturer);
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
static public void startWhileCameraIsAlreadyOpenAndStop(
|
||||
@ -445,22 +460,27 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||
Camera camera = Camera.open(capturer.getCurrentCameraId());
|
||||
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(null /* sharedContext */);
|
||||
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
surfaceTextureHelper, appContext, observer);
|
||||
capturer.stopCapture();
|
||||
release(capturer);
|
||||
camera.release();
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
static public void returnBufferLate(VideoCapturerAndroid capturer,
|
||||
Context appContext) throws InterruptedException {
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(null /* sharedContext */);
|
||||
FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||
|
||||
List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||
CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
surfaceTextureHelper, appContext, observer);
|
||||
assertTrue(observer.WaitForCapturerToStart());
|
||||
|
||||
observer.WaitForNextCapturedFrame();
|
||||
@ -470,10 +490,10 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
|
||||
format = formats.get(1);
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
surfaceTextureHelper, appContext, observer);
|
||||
observer.WaitForCapturerToStart();
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
}
|
||||
|
||||
observer.WaitForNextCapturedFrame();
|
||||
@ -482,10 +502,11 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
listOftimestamps = observer.getCopyAndResetListOftimeStamps();
|
||||
assertTrue(listOftimestamps.size() >= 1);
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
}
|
||||
|
||||
release(capturer);
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
static public void returnBufferLateEndToEnd(VideoCapturerAndroid capturer)
|
||||
@ -529,9 +550,11 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||
|
||||
final SurfaceTextureHelper surfaceTextureHelper =
|
||||
SurfaceTextureHelper.create(null /* sharedContext */);
|
||||
final FakeCapturerObserver observer = new FakeCapturerObserver();
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
surfaceTextureHelper, appContext, observer);
|
||||
// Make sure camera is started.
|
||||
assertTrue(observer.WaitForCapturerToStart());
|
||||
// Since we don't return the buffer, we should get a starvation message if we are
|
||||
@ -541,10 +564,11 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
|
||||
capturer.stopCapture();
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
surfaceTextureHelper.returnTextureFrame();
|
||||
}
|
||||
|
||||
release(capturer);
|
||||
surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException {
|
||||
|
||||
@ -81,7 +81,7 @@ public class VideoCapturerAndroid implements
|
||||
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
|
||||
private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
|
||||
private final boolean isCapturingToTexture;
|
||||
final SurfaceTextureHelper surfaceHelper; // Package visible for testing purposes.
|
||||
private SurfaceTextureHelper surfaceHelper;
|
||||
// The camera API can output one old frame after the camera has been switched or the resolution
|
||||
// has been changed. This flag is used for dropping the first frame after camera restart.
|
||||
private boolean dropNextFrame = false;
|
||||
@ -187,16 +187,22 @@ public class VideoCapturerAndroid implements
|
||||
|
||||
public static VideoCapturerAndroid create(String name,
|
||||
CameraEventsHandler eventsHandler) {
|
||||
return VideoCapturerAndroid.create(name, eventsHandler, null);
|
||||
return VideoCapturerAndroid.create(name, eventsHandler, false /* captureToTexture */);
|
||||
}
|
||||
|
||||
// Deprecated. Use create() function below instead.
|
||||
public static VideoCapturerAndroid create(String name,
|
||||
CameraEventsHandler eventsHandler, EglBase.Context sharedEglContext) {
|
||||
return create(name, eventsHandler, (sharedEglContext != null) /* captureToTexture */);
|
||||
}
|
||||
|
||||
public static VideoCapturerAndroid create(String name,
|
||||
CameraEventsHandler eventsHandler, EglBase.Context sharedEglContext) {
|
||||
CameraEventsHandler eventsHandler, boolean captureToTexture) {
|
||||
final int cameraId = lookupDeviceName(name);
|
||||
if (cameraId == -1) {
|
||||
return null;
|
||||
}
|
||||
return new VideoCapturerAndroid(cameraId, eventsHandler, sharedEglContext);
|
||||
return new VideoCapturerAndroid(cameraId, eventsHandler, captureToTexture);
|
||||
}
|
||||
|
||||
public void printStackTrace() {
|
||||
@ -297,18 +303,12 @@ public class VideoCapturerAndroid implements
|
||||
return isCapturingToTexture;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SurfaceTextureHelper getSurfaceTextureHelper() {
|
||||
return surfaceHelper;
|
||||
}
|
||||
|
||||
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
|
||||
EglBase.Context sharedContext) {
|
||||
boolean captureToTexture) {
|
||||
this.id = cameraId;
|
||||
this.eventsHandler = eventsHandler;
|
||||
isCapturingToTexture = (sharedContext != null);
|
||||
isCapturingToTexture = captureToTexture;
|
||||
cameraStatistics = new CameraStatistics();
|
||||
surfaceHelper = SurfaceTextureHelper.create(sharedContext);
|
||||
Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
|
||||
}
|
||||
|
||||
@ -361,7 +361,6 @@ public class VideoCapturerAndroid implements
|
||||
throw new IllegalStateException("dispose() called while camera is running");
|
||||
}
|
||||
}
|
||||
surfaceHelper.dispose();
|
||||
isDisposed = true;
|
||||
}
|
||||
|
||||
@ -375,8 +374,12 @@ public class VideoCapturerAndroid implements
|
||||
@Override
|
||||
public void startCapture(
|
||||
final int width, final int height, final int framerate,
|
||||
final Context applicationContext, final CapturerObserver frameObserver) {
|
||||
final SurfaceTextureHelper surfaceTextureHelper, final Context applicationContext,
|
||||
final CapturerObserver frameObserver) {
|
||||
Logging.d(TAG, "startCapture requested: " + width + "x" + height + "@" + framerate);
|
||||
if (surfaceTextureHelper == null) {
|
||||
throw new IllegalArgumentException("surfaceTextureHelper not set.");
|
||||
}
|
||||
if (applicationContext == null) {
|
||||
throw new IllegalArgumentException("applicationContext not set.");
|
||||
}
|
||||
@ -387,7 +390,8 @@ public class VideoCapturerAndroid implements
|
||||
if (this.cameraThreadHandler != null) {
|
||||
throw new RuntimeException("Camera has already been started.");
|
||||
}
|
||||
this.cameraThreadHandler = surfaceHelper.getHandler();
|
||||
this.cameraThreadHandler = surfaceTextureHelper.getHandler();
|
||||
this.surfaceHelper = surfaceTextureHelper;
|
||||
final boolean didPost = maybePostOnCameraThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
@ -579,6 +583,7 @@ public class VideoCapturerAndroid implements
|
||||
// Remove all pending Runnables posted from |this|.
|
||||
cameraThreadHandler.removeCallbacksAndMessages(this /* token */);
|
||||
cameraThreadHandler = null;
|
||||
surfaceHelper = null;
|
||||
}
|
||||
barrier.countDown();
|
||||
}
|
||||
|
||||
@ -343,19 +343,9 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||
|
||||
ResetVariables();
|
||||
|
||||
jobject java_surface_texture_helper_ = nullptr;
|
||||
if (use_surface_) {
|
||||
java_surface_texture_helper_ = jni->CallStaticObjectMethod(
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||
GetStaticMethodID(jni,
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||
"create",
|
||||
"(Lorg/webrtc/EglBase$Context;)"
|
||||
"Lorg/webrtc/SurfaceTextureHelper;"),
|
||||
render_egl_context_);
|
||||
RTC_CHECK(java_surface_texture_helper_ != nullptr);
|
||||
surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
|
||||
jni, java_surface_texture_helper_);
|
||||
jni, render_egl_context_);
|
||||
}
|
||||
|
||||
jobject j_video_codec_enum = JavaEnumFromIndexAndClassName(
|
||||
@ -366,7 +356,8 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||
j_video_codec_enum,
|
||||
codec_.width,
|
||||
codec_.height,
|
||||
java_surface_texture_helper_);
|
||||
use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
|
||||
: nullptr);
|
||||
|
||||
if (CheckException(jni) || !success) {
|
||||
ALOGE << "Codec initialization error - fallback to SW codec.";
|
||||
|
||||
@ -33,7 +33,7 @@ int AndroidVideoCapturerJni::SetAndroidObjects(JNIEnv* jni,
|
||||
AndroidVideoCapturerJni::AndroidVideoCapturerJni(
|
||||
JNIEnv* jni,
|
||||
jobject j_video_capturer,
|
||||
jobject j_surface_texture_helper)
|
||||
jobject j_egl_context)
|
||||
: j_video_capturer_(jni, j_video_capturer),
|
||||
j_video_capturer_class_(
|
||||
jni, FindClass(jni, "org/webrtc/VideoCapturer")),
|
||||
@ -42,7 +42,7 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(
|
||||
FindClass(jni,
|
||||
"org/webrtc/VideoCapturer$NativeObserver")),
|
||||
surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
|
||||
jni, j_surface_texture_helper)),
|
||||
jni, j_egl_context)),
|
||||
capturer_(nullptr) {
|
||||
LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
|
||||
thread_checker_.DetachFromThread();
|
||||
@ -54,6 +54,11 @@ AndroidVideoCapturerJni::~AndroidVideoCapturerJni() {
|
||||
*j_video_capturer_,
|
||||
GetMethodID(jni(), *j_video_capturer_class_, "dispose", "()V"));
|
||||
CHECK_EXCEPTION(jni()) << "error during VideoCapturer.dispose()";
|
||||
jni()->CallVoidMethod(
|
||||
surface_texture_helper_->GetJavaSurfaceTextureHelper(),
|
||||
GetMethodID(jni(), FindClass(jni(), "org/webrtc/SurfaceTextureHelper"),
|
||||
"dispose", "()V"));
|
||||
CHECK_EXCEPTION(jni()) << "error during SurfaceTextureHelper.dispose()";
|
||||
}
|
||||
|
||||
void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
|
||||
@ -75,11 +80,12 @@ void AndroidVideoCapturerJni::Start(int width, int height, int framerate,
|
||||
|
||||
jmethodID m = GetMethodID(
|
||||
jni(), *j_video_capturer_class_, "startCapture",
|
||||
"(IIILandroid/content/Context;"
|
||||
"(IIILorg/webrtc/SurfaceTextureHelper;Landroid/content/Context;"
|
||||
"Lorg/webrtc/VideoCapturer$CapturerObserver;)V");
|
||||
jni()->CallVoidMethod(*j_video_capturer_,
|
||||
m, width, height,
|
||||
framerate,
|
||||
surface_texture_helper_->GetJavaSurfaceTextureHelper(),
|
||||
application_context_,
|
||||
j_frame_observer);
|
||||
CHECK_EXCEPTION(jni()) << "error during VideoCapturer.startCapture";
|
||||
|
||||
@ -34,7 +34,7 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
|
||||
|
||||
AndroidVideoCapturerJni(JNIEnv* jni,
|
||||
jobject j_video_capturer,
|
||||
jobject j_surface_texture_helper);
|
||||
jobject j_egl_context);
|
||||
|
||||
void Start(int width, int height, int framerate,
|
||||
webrtc::AndroidVideoCapturer* capturer) override;
|
||||
|
||||
@ -1212,17 +1212,12 @@ JOW(jlong, PeerConnectionFactory_nativeCreateLocalMediaStream)(
|
||||
}
|
||||
|
||||
JOW(jlong, PeerConnectionFactory_nativeCreateVideoSource)(
|
||||
JNIEnv* jni, jclass, jlong native_factory, jobject j_video_capturer,
|
||||
jobject j_constraints) {
|
||||
JNIEnv* jni, jclass, jlong native_factory, jobject j_egl_context,
|
||||
jobject j_video_capturer, jobject j_constraints) {
|
||||
// Create a cricket::VideoCapturer from |j_video_capturer|.
|
||||
jobject j_surface_texture_helper = jni->CallObjectMethod(
|
||||
j_video_capturer,
|
||||
GetMethodID(jni, FindClass(jni, "org/webrtc/VideoCapturer"),
|
||||
"getSurfaceTextureHelper",
|
||||
"()Lorg/webrtc/SurfaceTextureHelper;"));
|
||||
rtc::scoped_refptr<webrtc::AndroidVideoCapturerDelegate> delegate =
|
||||
new rtc::RefCountedObject<AndroidVideoCapturerJni>(
|
||||
jni, j_video_capturer, j_surface_texture_helper);
|
||||
jni, j_video_capturer, j_egl_context);
|
||||
rtc::scoped_ptr<cricket::VideoCapturer> capturer(
|
||||
new webrtc::AndroidVideoCapturer(delegate));
|
||||
// Create a webrtc::VideoTrackSourceInterface from the cricket::VideoCapturer,
|
||||
|
||||
@ -18,8 +18,15 @@
|
||||
namespace webrtc_jni {
|
||||
|
||||
SurfaceTextureHelper::SurfaceTextureHelper(
|
||||
JNIEnv* jni, jobject surface_texture_helper)
|
||||
: j_surface_texture_helper_(jni, surface_texture_helper),
|
||||
JNIEnv* jni, jobject j_egl_context)
|
||||
: j_surface_texture_helper_(jni, jni->CallStaticObjectMethod(
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||
GetStaticMethodID(jni,
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||
"create",
|
||||
"(Lorg/webrtc/EglBase$Context;)"
|
||||
"Lorg/webrtc/SurfaceTextureHelper;"),
|
||||
j_egl_context)),
|
||||
j_return_texture_method_(
|
||||
GetMethodID(jni,
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||
@ -31,6 +38,10 @@ SurfaceTextureHelper::SurfaceTextureHelper(
|
||||
SurfaceTextureHelper::~SurfaceTextureHelper() {
|
||||
}
|
||||
|
||||
jobject SurfaceTextureHelper::GetJavaSurfaceTextureHelper() const {
|
||||
return *j_surface_texture_helper_;
|
||||
}
|
||||
|
||||
void SurfaceTextureHelper::ReturnTextureFrame() const {
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
jni->CallVoidMethod(*j_surface_texture_helper_, j_return_texture_method_);
|
||||
|
||||
@ -31,14 +31,16 @@ namespace webrtc_jni {
|
||||
// destroyed while a VideoFrameBuffer is in use.
|
||||
// This class is the C++ counterpart of the java class SurfaceTextureHelper.
|
||||
// Usage:
|
||||
// 1. Create an java instance of SurfaceTextureHelper.
|
||||
// 2. Create an instance of this class.
|
||||
// 1. Create an instance of this class.
|
||||
// 2. Get the Java SurfaceTextureHelper with GetJavaSurfaceTextureHelper().
|
||||
// 3. Register a listener to the Java SurfaceListener and start producing
|
||||
// new buffers.
|
||||
// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
|
||||
class SurfaceTextureHelper : public rtc::RefCountInterface {
|
||||
public:
|
||||
SurfaceTextureHelper(JNIEnv* jni, jobject surface_texture_helper);
|
||||
SurfaceTextureHelper(JNIEnv* jni, jobject j_egl_context);
|
||||
|
||||
jobject GetJavaSurfaceTextureHelper() const;
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
|
||||
int width,
|
||||
|
||||
@ -114,8 +114,10 @@ public class PeerConnectionFactory {
|
||||
// manually after this.
|
||||
public VideoSource createVideoSource(
|
||||
VideoCapturer capturer, MediaConstraints constraints) {
|
||||
return new VideoSource(nativeCreateVideoSource(
|
||||
nativeFactory, capturer, constraints));
|
||||
final EglBase.Context eglContext =
|
||||
localEglbase == null ? null : localEglbase.getEglBaseContext();
|
||||
return new VideoSource(nativeCreateVideoSource(nativeFactory,
|
||||
eglContext, capturer, constraints));
|
||||
}
|
||||
|
||||
public VideoTrack createVideoTrack(String id, VideoSource source) {
|
||||
@ -239,7 +241,8 @@ public class PeerConnectionFactory {
|
||||
long nativeFactory, String label);
|
||||
|
||||
private static native long nativeCreateVideoSource(
|
||||
long nativeFactory, VideoCapturer videoCapturer, MediaConstraints constraints);
|
||||
long nativeFactory, EglBase.Context eglContext, VideoCapturer videoCapturer,
|
||||
MediaConstraints constraints);
|
||||
|
||||
private static native long nativeCreateVideoTrack(
|
||||
long nativeFactory, String id, long nativeVideoSource);
|
||||
|
||||
@ -83,16 +83,29 @@ public interface VideoCapturer {
|
||||
int width, int height, int framerate);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns a list with all the formats this VideoCapturer supports.
|
||||
*/
|
||||
List<CameraEnumerationAndroid.CaptureFormat> getSupportedFormats();
|
||||
|
||||
SurfaceTextureHelper getSurfaceTextureHelper();
|
||||
|
||||
/**
|
||||
* Start capturing frames in a format that is as close as possible to |width| x |height| and
|
||||
* |framerate|. If the VideoCapturer wants to deliver texture frames, it should do this by
|
||||
* rendering on the SurfaceTexture in |surfaceTextureHelper|, register itself as a listener,
|
||||
* and forward the texture frames to CapturerObserver.onTextureFrameCaptured().
|
||||
*/
|
||||
void startCapture(
|
||||
final int width, final int height, final int framerate,
|
||||
final Context applicationContext, final CapturerObserver frameObserver);
|
||||
int width, int height, int framerate, SurfaceTextureHelper surfaceTextureHelper,
|
||||
Context applicationContext, CapturerObserver frameObserver);
|
||||
|
||||
// Blocks until capture is stopped.
|
||||
/**
|
||||
* Stop capturing. This function should block until capture is actually stopped.
|
||||
*/
|
||||
void stopCapture() throws InterruptedException;
|
||||
|
||||
/**
|
||||
* Perform any final cleanup here. No more capturing will be done after this call.
|
||||
*/
|
||||
void dispose();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user