Reland of Support adding and removing MediaRecorder to camera 2 session. (patchset #1 id:1 of https://codereview.webrtc.org/2844233002/ )

Reason for revert:
Revert "Revert of Support adding and removing MediaRecorder to camera 2 session. (patchset #5 id:80001 of https://codereview.webrtc.org/2833773003/ )"

Will fix external bots by cherry picking this CL and updating external functions which depend on CameraVideoCapturer interface

Original issue's description:
> Revert of Support adding and removing MediaRecorder to camera 2 session. (patchset #5 id:80001 of https://codereview.webrtc.org/2833773003/ )
>
> Reason for revert:
> Breaks external bot
>
> Original issue's description:
> > Support adding and removing MediaRecorder to camera 2 session.
> >
> > Camera 1 API is not supported.
> >
> > BUG=b/36684011
> >
> > Review-Url: https://codereview.webrtc.org/2833773003
> > Cr-Commit-Position: refs/heads/master@{#17901}
> > Committed: 2fc04769fa
>
> TBR=sakal@webrtc.org,glaznev@webrtc.org
> # Skipping CQ checks because original CL landed less than 1 days ago.
> NOPRESUBMIT=true
> NOTREECHECKS=true
> NOTRY=true
> BUG=b/36684011
>
> Review-Url: https://codereview.webrtc.org/2844233002
> Cr-Commit-Position: refs/heads/master@{#17905}
> Committed: 6702739862

TBR=sakal@webrtc.org,magjed@webrtc.org
# Skipping CQ checks because original CL landed less than 1 days ago.
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=b/36684011

Review-Url: https://codereview.webrtc.org/2844393002
Cr-Commit-Position: refs/heads/master@{#17915}
This commit is contained in:
glaznev
2017-04-27 13:38:29 -07:00
committed by Commit bot
parent f0736b16f4
commit 37adc5e81e
7 changed files with 302 additions and 35 deletions

View File

@ -10,11 +10,8 @@
package org.webrtc; package org.webrtc;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import android.content.Context; import android.content.Context;
import android.media.MediaRecorder;
import java.util.List;
public class Camera1Capturer extends CameraCapturer { public class Camera1Capturer extends CameraCapturer {
private final boolean captureToTexture; private final boolean captureToTexture;
@ -29,8 +26,11 @@ public class Camera1Capturer extends CameraCapturer {
@Override @Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback, protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext, CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height, SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraName,
int framerate) { int width, int height, int framerate) {
if (mediaRecorder != null) {
throw new RuntimeException("MediaRecoder is not supported for camera 1.");
}
Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext, Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height, surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
framerate); framerate);

View File

@ -13,6 +13,7 @@ package org.webrtc;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context; import android.content.Context;
import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraManager;
import android.media.MediaRecorder;
@TargetApi(21) @TargetApi(21)
public class Camera2Capturer extends CameraCapturer { public class Camera2Capturer extends CameraCapturer {
@ -29,9 +30,9 @@ public class Camera2Capturer extends CameraCapturer {
@Override @Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback, protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext, CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height, SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecoder, String cameraName,
int framerate) { int width, int height, int framerate) {
Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager, Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
surfaceTextureHelper, cameraName, width, height, framerate); surfaceTextureHelper, mediaRecoder, cameraName, width, height, framerate);
} }
} }

View File

@ -10,6 +10,8 @@
package org.webrtc; package org.webrtc;
import android.media.MediaRecorder;
/** /**
* Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a * Base interface for camera1 and camera2 implementations. Extends VideoCapturer with a
* switchCamera() function. Also provides subinterfaces for handling camera events, and a helper * switchCamera() function. Also provides subinterfaces for handling camera events, and a helper
@ -59,6 +61,32 @@ public interface CameraVideoCapturer extends VideoCapturer {
*/ */
void switchCamera(CameraSwitchHandler switchEventsHandler); void switchCamera(CameraSwitchHandler switchEventsHandler);
/**
* MediaRecorder add/remove handler - one of these functions are invoked with the result of
* addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
* The callback may be called on an arbitrary thread.
*/
public interface MediaRecorderHandler {
// Invoked on success.
void onMediaRecorderSuccess();
// Invoked on failure, e.g. camera is stopped or any exception happens.
void onMediaRecorderError(String errorDescription);
}
/**
* Add MediaRecorder to camera pipeline. This can only be called while the camera is running.
* Once MediaRecorder is added to camera pipeline camera switch is not allowed.
* This function can be called from any thread.
*/
void addMediaRecorderToCamera(MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler);
/**
* Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
* This function can be called from any thread.
*/
void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler);
/** /**
* Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks
* on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that * on the SurfaceTextureHelper thread passed in the ctor, and should only be operated from that

View File

@ -25,6 +25,7 @@ import android.support.test.InstrumentationRegistry;
import android.support.test.filters.LargeTest; import android.support.test.filters.LargeTest;
import android.support.test.filters.MediumTest; import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest; import android.support.test.filters.SmallTest;
import java.io.IOException;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import org.chromium.base.test.BaseJUnit4ClassRunner; import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.After; import org.junit.After;
@ -244,6 +245,12 @@ public class Camera2CapturerTest {
fixtures.cameraEventsInvoked(); fixtures.cameraEventsInvoked();
} }
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(true /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped. // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test @Test
@MediumTest @MediumTest

View File

@ -15,7 +15,13 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import android.annotation.TargetApi;
import android.content.Context; import android.content.Context;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Environment;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
@ -502,6 +508,95 @@ class CameraVideoCapturerTestFixtures {
disposeVideoTrackWithRenderer(videoTrackWithRenderer); disposeVideoTrackWithRenderer(videoTrackWithRenderer);
} }
@TargetApi(21)
public void updateMediaRecorder(boolean useSurfaceCapture)
throws InterruptedException, IOException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
// Wait for the camera to start so we can add and remove MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
+ "/chromium_tests_root/testmediarecorder.mp4";
File outputFile = new File(videoOutPath);
// Create MediaRecorder object
MediaRecorder mediaRecorder = new MediaRecorder();
mediaRecorder.setVideoSource(
useSurfaceCapture ? MediaRecorder.VideoSource.SURFACE : MediaRecorder.VideoSource.CAMERA);
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
profile.videoBitRate = 2500000;
profile.videoFrameWidth = 640;
profile.videoFrameHeight = 480;
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setVideoFrameRate(profile.videoFrameRate);
mediaRecorder.setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
mediaRecorder.setVideoEncodingBitRate(profile.videoBitRate);
mediaRecorder.setVideoEncoder(profile.videoCodec);
mediaRecorder.setOutputFile(outputFile.getPath());
mediaRecorder.prepare();
// Add MediaRecorder to camera pipeline.
final boolean[] addMediaRecorderSuccessful = new boolean[1];
final CountDownLatch addBarrier = new CountDownLatch(1);
CameraVideoCapturer.MediaRecorderHandler addMediaRecorderHandler =
new CameraVideoCapturer.MediaRecorderHandler() {
@Override
public void onMediaRecorderSuccess() {
addMediaRecorderSuccessful[0] = true;
addBarrier.countDown();
}
@Override
public void onMediaRecorderError(String errorDescription) {
addMediaRecorderSuccessful[0] = false;
addBarrier.countDown();
}
};
capturerInstance.capturer.addMediaRecorderToCamera(mediaRecorder, addMediaRecorderHandler);
// Wait until MediaRecoder has been added.
addBarrier.await();
// Check result.
assertTrue(addMediaRecorderSuccessful[0]);
// Start MediaRecorder and wait for a few frames to capture.
mediaRecorder.start();
for (int i = 0; i < 5; i++) {
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
}
mediaRecorder.stop();
// Remove MediaRecorder from camera pipeline.
final boolean[] removeMediaRecorderSuccessful = new boolean[1];
final CountDownLatch removeBarrier = new CountDownLatch(1);
CameraVideoCapturer.MediaRecorderHandler removeMediaRecorderHandler =
new CameraVideoCapturer.MediaRecorderHandler() {
@Override
public void onMediaRecorderSuccess() {
removeMediaRecorderSuccessful[0] = true;
removeBarrier.countDown();
}
@Override
public void onMediaRecorderError(String errorDescription) {
removeMediaRecorderSuccessful[0] = false;
removeBarrier.countDown();
}
};
capturerInstance.capturer.removeMediaRecorderFromCamera(removeMediaRecorderHandler);
// Wait until MediaRecoder has been removed.
removeBarrier.await();
// Check result.
assertTrue(removeMediaRecorderSuccessful[0]);
// Ensure that frames are received after removing MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
// Check that recorded file contains some data.
assertTrue(outputFile.length() > 0);
disposeCapturer(capturerInstance);
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
}
public void cameraEventsInvoked() throws InterruptedException { public void cameraEventsInvoked() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(true /* initialize */); final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
startCapture(capturerInstance); startCapture(capturerInstance);

View File

@ -21,11 +21,12 @@ import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure; import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureRequest;
import android.media.MediaRecorder;
import android.os.Handler; import android.os.Handler;
import android.util.Range; import android.util.Range;
import android.view.Surface; import android.view.Surface;
import android.view.WindowManager; import android.view.WindowManager;
import java.util.Arrays; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat; import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@ -49,6 +50,7 @@ class Camera2Session implements CameraSession {
private final Context applicationContext; private final Context applicationContext;
private final CameraManager cameraManager; private final CameraManager cameraManager;
private final SurfaceTextureHelper surfaceTextureHelper; private final SurfaceTextureHelper surfaceTextureHelper;
private final Surface mediaRecorderSurface;
private final String cameraId; private final String cameraId;
private final int width; private final int width;
private final int height; private final int height;
@ -123,9 +125,14 @@ class Camera2Session implements CameraSession {
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture(); final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height); surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTexture); surface = new Surface(surfaceTexture);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to capture session.");
surfaces.add(mediaRecorderSurface);
}
try { try {
camera.createCaptureSession( camera.createCaptureSession(surfaces, new CaptureSessionCallback(), cameraThreadHandler);
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
} catch (CameraAccessException e) { } catch (CameraAccessException e) {
reportError("Failed to create capture session. " + e); reportError("Failed to create capture session. " + e);
return; return;
@ -175,6 +182,10 @@ class Camera2Session implements CameraSession {
chooseFocusMode(captureRequestBuilder); chooseFocusMode(captureRequestBuilder);
captureRequestBuilder.addTarget(surface); captureRequestBuilder.addTarget(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to CaptureRequest.Builder");
captureRequestBuilder.addTarget(mediaRecorderSurface);
}
session.setRepeatingRequest( session.setRepeatingRequest(
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler); captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
} catch (CameraAccessException e) { } catch (CameraAccessException e) {
@ -280,15 +291,15 @@ class Camera2Session implements CameraSession {
public static void create(CreateSessionCallback callback, Events events, public static void create(CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager, Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height, SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraId,
int framerate) { int width, int height, int framerate) {
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper, new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
cameraId, width, height, framerate); mediaRecorder, cameraId, width, height, framerate);
} }
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext, private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId, CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
int width, int height, int framerate) { MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId); Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
constructionTimeNs = System.nanoTime(); constructionTimeNs = System.nanoTime();
@ -299,6 +310,7 @@ class Camera2Session implements CameraSession {
this.applicationContext = applicationContext; this.applicationContext = applicationContext;
this.cameraManager = cameraManager; this.cameraManager = cameraManager;
this.surfaceTextureHelper = surfaceTextureHelper; this.surfaceTextureHelper = surfaceTextureHelper;
this.mediaRecorderSurface = (mediaRecorder != null) ? mediaRecorder.getSurface() : null;
this.cameraId = cameraId; this.cameraId = cameraId;
this.width = width; this.width = width;
this.height = height; this.height = height;

View File

@ -11,6 +11,7 @@
package org.webrtc; package org.webrtc;
import android.content.Context; import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
import java.util.Arrays; import java.util.Arrays;
@ -23,6 +24,13 @@ abstract class CameraCapturer implements CameraVideoCapturer {
IN_PROGRESS, // Waiting for new switched capture session to start. IN_PROGRESS, // Waiting for new switched capture session to start.
} }
enum MediaRecorderState {
IDLE, // No media recording update (add or remove) requested.
IDLE_TO_ACTIVE, // Waiting for new capture session with added MediaRecorder surface to start.
ACTIVE_TO_IDLE, // Waiting for new capture session with removed MediaRecorder surface to start.
ACTIVE, // MediaRecorder was successfully added to camera pipeline.
}
private static final String TAG = "CameraCapturer"; private static final String TAG = "CameraCapturer";
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3; private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
private final static int OPEN_CAMERA_DELAY_MS = 500; private final static int OPEN_CAMERA_DELAY_MS = 500;
@ -37,7 +45,9 @@ abstract class CameraCapturer implements CameraVideoCapturer {
@Override @Override
public void onDone(CameraSession session) { public void onDone(CameraSession session) {
checkIsOnCameraThread(); checkIsOnCameraThread();
Logging.d(TAG, "Create session done"); Logging.d(TAG,
"Create session done. Switch state: " + switchState
+ ". MediaRecorder state: " + mediaRecorderState);
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable); uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
synchronized (stateLock) { synchronized (stateLock) {
capturerObserver.onCapturerStarted(true /* success */); capturerObserver.onCapturerStarted(true /* success */);
@ -57,6 +67,19 @@ abstract class CameraCapturer implements CameraVideoCapturer {
switchState = SwitchState.IDLE; switchState = SwitchState.IDLE;
switchCameraInternal(switchEventsHandler); switchCameraInternal(switchEventsHandler);
} }
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE
|| mediaRecorderState == MediaRecorderState.ACTIVE_TO_IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderSuccess();
mediaRecorderEventsHandler = null;
}
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE) {
mediaRecorderState = MediaRecorderState.ACTIVE;
} else {
mediaRecorderState = MediaRecorderState.IDLE;
}
}
} }
} }
@ -81,6 +104,14 @@ abstract class CameraCapturer implements CameraVideoCapturer {
switchState = SwitchState.IDLE; switchState = SwitchState.IDLE;
} }
if (mediaRecorderState != MediaRecorderState.IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderError(error);
mediaRecorderEventsHandler = null;
}
mediaRecorderState = MediaRecorderState.IDLE;
}
if (failureType == CameraSession.FailureType.DISCONNECTED) { if (failureType == CameraSession.FailureType.DISCONNECTED) {
eventsHandler.onCameraDisconnected(); eventsHandler.onCameraDisconnected();
} else { } else {
@ -88,8 +119,7 @@ abstract class CameraCapturer implements CameraVideoCapturer {
} }
} else { } else {
Logging.w(TAG, "Opening camera failed, retry: " + error); Logging.w(TAG, "Opening camera failed, retry: " + error);
createSessionInternal(OPEN_CAMERA_DELAY_MS, null /* mediaRecorder */);
createSessionInternal(OPEN_CAMERA_DELAY_MS);
} }
} }
} }
@ -213,6 +243,10 @@ abstract class CameraCapturer implements CameraVideoCapturer {
private CameraStatistics cameraStatistics; /* guarded by stateLock */ private CameraStatistics cameraStatistics; /* guarded by stateLock */
private boolean firstFrameObserved; /* guarded by stateLock */ private boolean firstFrameObserved; /* guarded by stateLock */
// Variables used on camera thread - do not require stateLock synchronization.
private MediaRecorderState mediaRecorderState = MediaRecorderState.IDLE;
private MediaRecorderHandler mediaRecorderEventsHandler;
public CameraCapturer( public CameraCapturer(
String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) { String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
if (eventsHandler == null) { if (eventsHandler == null) {
@ -277,17 +311,17 @@ abstract class CameraCapturer implements CameraVideoCapturer {
sessionOpening = true; sessionOpening = true;
openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS; openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
createSessionInternal(0); createSessionInternal(0, null /* mediaRecorder */);
} }
} }
private void createSessionInternal(int delayMs) { private void createSessionInternal(int delayMs, final MediaRecorder mediaRecorder) {
uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT); uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
cameraThreadHandler.postDelayed(new Runnable() { cameraThreadHandler.postDelayed(new Runnable() {
@Override @Override
public void run() { public void run() {
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext, createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
surfaceHelper, cameraName, width, height, framerate); surfaceHelper, mediaRecorder, cameraName, width, height, framerate);
} }
}, delayMs); }, delayMs);
} }
@ -349,6 +383,29 @@ abstract class CameraCapturer implements CameraVideoCapturer {
}); });
} }
@Override
public void addMediaRecorderToCamera(
final MediaRecorder mediaRecorder, final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "addMediaRecorderToCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(mediaRecorder, mediaRecoderEventsHandler);
}
});
}
@Override
public void removeMediaRecorderFromCamera(final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "removeMediaRecorderFromCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(null /* mediaRecorder */, mediaRecoderEventsHandler);
}
});
}
@Override @Override
public boolean isScreencast() { public boolean isScreencast() {
return false; return false;
@ -370,6 +427,13 @@ abstract class CameraCapturer implements CameraVideoCapturer {
} }
} }
private void reportCameraSwitchError(String error, CameraSwitchHandler switchEventsHandler) {
Logging.e(TAG, error);
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(error);
}
}
private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) { private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) {
Logging.d(TAG, "switchCamera internal"); Logging.d(TAG, "switchCamera internal");
@ -384,18 +448,15 @@ abstract class CameraCapturer implements CameraVideoCapturer {
synchronized (stateLock) { synchronized (stateLock) {
if (switchState != SwitchState.IDLE) { if (switchState != SwitchState.IDLE) {
Logging.d(TAG, "switchCamera switchInProgress"); reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
if (switchEventsHandler != null) { return;
switchEventsHandler.onCameraSwitchError("Camera switch already in progress."); }
} if (mediaRecorderState != MediaRecorderState.IDLE) {
reportCameraSwitchError("switchCamera: media recording is active", switchEventsHandler);
return; return;
} }
if (!sessionOpening && currentSession == null) { if (!sessionOpening && currentSession == null) {
Logging.d(TAG, "switchCamera: No session open"); reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("Camera is not running.");
}
return; return;
} }
@ -424,11 +485,74 @@ abstract class CameraCapturer implements CameraVideoCapturer {
sessionOpening = true; sessionOpening = true;
openAttemptsRemaining = 1; openAttemptsRemaining = 1;
createSessionInternal(0); createSessionInternal(0, null /* mediaRecorder */);
} }
Logging.d(TAG, "switchCamera done"); Logging.d(TAG, "switchCamera done");
} }
private void reportUpdateMediaRecorderError(
String error, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
Logging.e(TAG, error);
if (mediaRecoderEventsHandler != null) {
mediaRecoderEventsHandler.onMediaRecorderError(error);
}
}
private void updateMediaRecorderInternal(
MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
boolean addMediaRecorder = (mediaRecorder != null);
Logging.d(TAG,
"updateMediaRecoderInternal internal. State: " + mediaRecorderState
+ ". Switch state: " + switchState + ". Add MediaRecorder: " + addMediaRecorder);
synchronized (stateLock) {
if ((addMediaRecorder && mediaRecorderState != MediaRecorderState.IDLE)
|| (!addMediaRecorder && mediaRecorderState != MediaRecorderState.ACTIVE)) {
reportUpdateMediaRecorderError(
"Incorrect state for MediaRecorder update.", mediaRecoderEventsHandler);
return;
}
if (switchState != SwitchState.IDLE) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is switching.", mediaRecoderEventsHandler);
return;
}
if (currentSession == null) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is closed.", mediaRecoderEventsHandler);
return;
}
if (sessionOpening) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is still opening.", mediaRecoderEventsHandler);
return;
}
this.mediaRecorderEventsHandler = mediaRecoderEventsHandler;
mediaRecorderState =
addMediaRecorder ? MediaRecorderState.IDLE_TO_ACTIVE : MediaRecorderState.ACTIVE_TO_IDLE;
Logging.d(TAG, "updateMediaRecoder: Stopping session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
sessionOpening = true;
openAttemptsRemaining = 1;
createSessionInternal(0, mediaRecorder);
}
Logging.d(TAG, "updateMediaRecoderInternal done");
}
private void checkIsOnCameraThread() { private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
Logging.e(TAG, "Check is on camera thread failed."); Logging.e(TAG, "Check is on camera thread failed.");
@ -444,6 +568,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
abstract protected void createCameraSession( abstract protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events, CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName, Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
int width, int height, int framerate); MediaRecorder mediaRecoder, String cameraName, int width, int height, int framerate);
} }