Android: Remove video recording functionality from the camera classes

There was an attempt to add MediaRecording functionality to the camera
classes, but it was never finished and never worked properly. This CL
removes the code for it. In the future, if offline video recording is
needed we should add it as a VideoSink instead of inside the camera
classes.

Bug: webrtc:9144
Change-Id: I74b70d4b128aa212d84e70da01e5e19133c5af24
Reviewed-on: https://webrtc-review.googlesource.com/69642
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23050}
This commit is contained in:
Magnus Jedvert
2018-04-12 16:23:34 +02:00
committed by Commit Bot
parent 6a8f30e5a3
commit 5ebb82ba9c
10 changed files with 41 additions and 304 deletions

View File

@ -11,7 +11,6 @@
package org.webrtc; package org.webrtc;
import android.content.Context; import android.content.Context;
import android.media.MediaRecorder;
import javax.annotation.Nullable; import javax.annotation.Nullable;
public class Camera1Capturer extends CameraCapturer { public class Camera1Capturer extends CameraCapturer {
@ -27,10 +26,10 @@ public class Camera1Capturer extends CameraCapturer {
@Override @Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback, protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext, CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, @Nullable MediaRecorder mediaRecorder, SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
String cameraName, int width, int height, int framerate) { int framerate) {
Camera1Session.create(createSessionCallback, events, Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
captureToTexture || (mediaRecorder != null), applicationContext, surfaceTextureHelper, surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
mediaRecorder, Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate); framerate);
} }
} }

View File

@ -13,7 +13,6 @@ package org.webrtc;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context; import android.content.Context;
import android.hardware.camera2.CameraManager; import android.hardware.camera2.CameraManager;
import android.media.MediaRecorder;
import javax.annotation.Nullable; import javax.annotation.Nullable;
@TargetApi(21) @TargetApi(21)
@ -31,9 +30,9 @@ public class Camera2Capturer extends CameraCapturer {
@Override @Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback, protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback,
CameraSession.Events events, Context applicationContext, CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecoder, String cameraName, SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height,
int width, int height, int framerate) { int framerate) {
Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager, Camera2Session.create(createSessionCallback, events, applicationContext, cameraManager,
surfaceTextureHelper, mediaRecoder, cameraName, width, height, framerate); surfaceTextureHelper, cameraName, width, height, framerate);
} }
} }

View File

@ -67,6 +67,7 @@ public interface CameraVideoCapturer extends VideoCapturer {
* addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls. * addMediaRecorderToCamera() or removeMediaRecorderFromCamera calls.
* The callback may be called on an arbitrary thread. * The callback may be called on an arbitrary thread.
*/ */
@Deprecated
public interface MediaRecorderHandler { public interface MediaRecorderHandler {
// Invoked on success. // Invoked on success.
void onMediaRecorderSuccess(); void onMediaRecorderSuccess();
@ -80,13 +81,20 @@ public interface CameraVideoCapturer extends VideoCapturer {
* Once MediaRecorder is added to camera pipeline camera switch is not allowed. * Once MediaRecorder is added to camera pipeline camera switch is not allowed.
* This function can be called from any thread. * This function can be called from any thread.
*/ */
void addMediaRecorderToCamera(MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler); @Deprecated
default void addMediaRecorderToCamera(
MediaRecorder mediaRecorder, MediaRecorderHandler resultHandler) {
throw new UnsupportedOperationException("Deprecated and not implemented.");
}
/** /**
* Remove MediaRecorder from camera pipeline. This can only be called while the camera is running. * Remove MediaRecorder from camera pipeline. This can only be called while the camera is running.
* This function can be called from any thread. * This function can be called from any thread.
*/ */
void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler); @Deprecated
default void removeMediaRecorderFromCamera(MediaRecorderHandler resultHandler) {
throw new UnsupportedOperationException("Deprecated and not implemented.");
}
/** /**
* Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks * Helper class to log framerate and detect if the camera freezes. It will run periodic callbacks

View File

@ -121,12 +121,6 @@ public class Camera1CapturerUsingByteBufferTest {
fixtures.cameraEventsInvoked(); fixtures.cameraEventsInvoked();
} }
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(false /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped. // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test @Test
@MediumTest @MediumTest

View File

@ -116,12 +116,6 @@ public class Camera1CapturerUsingTextureTest {
fixtures.cameraEventsInvoked(); fixtures.cameraEventsInvoked();
} }
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(false /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped. // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test @Test
@MediumTest @MediumTest

View File

@ -246,12 +246,6 @@ public class Camera2CapturerTest {
fixtures.cameraEventsInvoked(); fixtures.cameraEventsInvoked();
} }
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(true /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped. // Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test @Test
@MediumTest @MediumTest

View File

@ -18,7 +18,6 @@ import static org.junit.Assert.fail;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context; import android.content.Context;
import android.media.CamcorderProfile; import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Environment; import android.os.Environment;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -511,111 +510,6 @@ class CameraVideoCapturerTestFixtures {
disposeVideoTrackWithRenderer(videoTrackWithRenderer); disposeVideoTrackWithRenderer(videoTrackWithRenderer);
} }
@TargetApi(21)
private static void prepareMediaRecorderForTests(
MediaRecorder mediaRecorder, File outputFile, boolean useSurfaceCapture) throws IOException {
mediaRecorder.setVideoSource(
useSurfaceCapture ? MediaRecorder.VideoSource.SURFACE : MediaRecorder.VideoSource.CAMERA);
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
profile.videoBitRate = 2500000;
profile.videoFrameWidth = 640;
profile.videoFrameHeight = 480;
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setVideoFrameRate(profile.videoFrameRate);
mediaRecorder.setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
mediaRecorder.setVideoEncodingBitRate(profile.videoBitRate);
mediaRecorder.setVideoEncoder(profile.videoCodec);
mediaRecorder.setOutputFile(outputFile.getPath());
mediaRecorder.prepare();
}
@TargetApi(21)
public void updateMediaRecorder(boolean useSurfaceCapture)
throws InterruptedException, IOException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
// Wait for the camera to start so we can add and remove MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
+ "/chromium_tests_root/testmediarecorder.mp4";
File outputFile = new File(videoOutPath);
// Create MediaRecorder object
MediaRecorder mediaRecorder = new MediaRecorder();
if (useSurfaceCapture) {
// When using using surface capture, media recorder has to be prepared before adding it to the
// camera.
prepareMediaRecorderForTests(mediaRecorder, outputFile, useSurfaceCapture);
}
// Add MediaRecorder to camera pipeline.
final boolean[] addMediaRecorderSuccessful = new boolean[1];
final CountDownLatch addBarrier = new CountDownLatch(1);
CameraVideoCapturer.MediaRecorderHandler addMediaRecorderHandler =
new CameraVideoCapturer.MediaRecorderHandler() {
@Override
public void onMediaRecorderSuccess() {
addMediaRecorderSuccessful[0] = true;
addBarrier.countDown();
}
@Override
public void onMediaRecorderError(String errorDescription) {
Logging.e(TAG, errorDescription);
addMediaRecorderSuccessful[0] = false;
addBarrier.countDown();
}
};
capturerInstance.capturer.addMediaRecorderToCamera(mediaRecorder, addMediaRecorderHandler);
// Wait until MediaRecoder has been added.
addBarrier.await();
// Check result.
assertTrue(addMediaRecorderSuccessful[0]);
// Start MediaRecorder and wait for a few frames to capture.
if (!useSurfaceCapture) {
// When using using camera capture, media recorder has to be prepared after adding it to the
// camera.
prepareMediaRecorderForTests(mediaRecorder, outputFile, useSurfaceCapture);
}
mediaRecorder.start();
for (int i = 0; i < 5; i++) {
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
}
mediaRecorder.stop();
// Remove MediaRecorder from camera pipeline.
final boolean[] removeMediaRecorderSuccessful = new boolean[1];
final CountDownLatch removeBarrier = new CountDownLatch(1);
CameraVideoCapturer.MediaRecorderHandler removeMediaRecorderHandler =
new CameraVideoCapturer.MediaRecorderHandler() {
@Override
public void onMediaRecorderSuccess() {
removeMediaRecorderSuccessful[0] = true;
removeBarrier.countDown();
}
@Override
public void onMediaRecorderError(String errorDescription) {
removeMediaRecorderSuccessful[0] = false;
removeBarrier.countDown();
}
};
capturerInstance.capturer.removeMediaRecorderFromCamera(removeMediaRecorderHandler);
// Wait until MediaRecoder has been removed.
removeBarrier.await();
// Check result.
assertTrue(removeMediaRecorderSuccessful[0]);
// Ensure that frames are received after removing MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
// Check that recorded file contains some data.
assertTrue(outputFile.length() > 0);
disposeCapturer(capturerInstance);
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
}
public void cameraEventsInvoked() throws InterruptedException { public void cameraEventsInvoked() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(true /* initialize */); final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
startCapture(capturerInstance); startCapture(capturerInstance);

View File

@ -11,7 +11,6 @@
package org.webrtc; package org.webrtc;
import android.content.Context; import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler; import android.os.Handler;
import android.os.SystemClock; import android.os.SystemClock;
import android.view.Surface; import android.view.Surface;
@ -56,8 +55,8 @@ class Camera1Session implements CameraSession {
@SuppressWarnings("ByteBufferBackingArray") @SuppressWarnings("ByteBufferBackingArray")
public static void create(final CreateSessionCallback callback, final Events events, public static void create(final CreateSessionCallback callback, final Events events,
final boolean captureToTexture, final Context applicationContext, final boolean captureToTexture, final Context applicationContext,
final SurfaceTextureHelper surfaceTextureHelper, final MediaRecorder mediaRecorder, final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width,
final int cameraId, final int width, final int height, final int framerate) { final int height, final int framerate) {
final long constructionTimeNs = System.nanoTime(); final long constructionTimeNs = System.nanoTime();
Logging.d(TAG, "Open camera " + cameraId); Logging.d(TAG, "Open camera " + cameraId);
events.onCameraOpening(); events.onCameraOpening();
@ -105,9 +104,8 @@ class Camera1Session implements CameraSession {
// Calculate orientation manually and send it as CVO insted. // Calculate orientation manually and send it as CVO insted.
camera.setDisplayOrientation(0 /* degrees */); camera.setDisplayOrientation(0 /* degrees */);
callback.onDone( callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
new Camera1Session(events, captureToTexture, applicationContext, surfaceTextureHelper, surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
mediaRecorder, cameraId, camera, info, captureFormat, constructionTimeNs));
} }
private static void updateCameraParameters(android.hardware.Camera camera, private static void updateCameraParameters(android.hardware.Camera camera,
@ -155,9 +153,9 @@ class Camera1Session implements CameraSession {
} }
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext, private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, @Nullable MediaRecorder mediaRecorder, SurfaceTextureHelper surfaceTextureHelper, int cameraId, android.hardware.Camera camera,
int cameraId, android.hardware.Camera camera, android.hardware.Camera.CameraInfo info, android.hardware.Camera.CameraInfo info, CaptureFormat captureFormat,
CaptureFormat captureFormat, long constructionTimeNs) { long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId); Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
this.cameraThreadHandler = new Handler(); this.cameraThreadHandler = new Handler();
@ -172,11 +170,6 @@ class Camera1Session implements CameraSession {
this.constructionTimeNs = constructionTimeNs; this.constructionTimeNs = constructionTimeNs;
startCapturing(); startCapturing();
if (mediaRecorder != null) {
camera.unlock();
mediaRecorder.setCamera(camera);
}
} }
@Override @Override

View File

@ -22,13 +22,12 @@ import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata; import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure; import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.CaptureRequest;
import android.media.MediaRecorder;
import android.os.Handler; import android.os.Handler;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import android.util.Range; import android.util.Range;
import android.view.Surface; import android.view.Surface;
import android.view.WindowManager; import android.view.WindowManager;
import java.util.ArrayList; import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat; import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@ -52,7 +51,6 @@ class Camera2Session implements CameraSession {
private final Context applicationContext; private final Context applicationContext;
private final CameraManager cameraManager; private final CameraManager cameraManager;
private final SurfaceTextureHelper surfaceTextureHelper; private final SurfaceTextureHelper surfaceTextureHelper;
@Nullable private final Surface mediaRecorderSurface;
private final String cameraId; private final String cameraId;
private final int width; private final int width;
private final int height; private final int height;
@ -127,14 +125,9 @@ class Camera2Session implements CameraSession {
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture(); final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height); surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTexture); surface = new Surface(surfaceTexture);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to capture session.");
surfaces.add(mediaRecorderSurface);
}
try { try {
camera.createCaptureSession(surfaces, new CaptureSessionCallback(), cameraThreadHandler); camera.createCaptureSession(
Arrays.asList(surface), new CaptureSessionCallback(), cameraThreadHandler);
} catch (CameraAccessException e) { } catch (CameraAccessException e) {
reportError("Failed to create capture session. " + e); reportError("Failed to create capture session. " + e);
return; return;
@ -184,10 +177,6 @@ class Camera2Session implements CameraSession {
chooseFocusMode(captureRequestBuilder); chooseFocusMode(captureRequestBuilder);
captureRequestBuilder.addTarget(surface); captureRequestBuilder.addTarget(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to CaptureRequest.Builder");
captureRequestBuilder.addTarget(mediaRecorderSurface);
}
session.setRepeatingRequest( session.setRepeatingRequest(
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler); captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
} catch (CameraAccessException e) { } catch (CameraAccessException e) {
@ -297,16 +286,15 @@ class Camera2Session implements CameraSession {
public static void create(CreateSessionCallback callback, Events events, public static void create(CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager, Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraId, SurfaceTextureHelper surfaceTextureHelper, String cameraId, int width, int height,
int width, int height, int framerate) { int framerate) {
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper, new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
mediaRecorder, cameraId, width, height, framerate); cameraId, width, height, framerate);
} }
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext, private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper, String cameraId,
@Nullable MediaRecorder mediaRecorder, String cameraId, int width, int height, int width, int height, int framerate) {
int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId); Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
constructionTimeNs = System.nanoTime(); constructionTimeNs = System.nanoTime();
@ -317,7 +305,6 @@ class Camera2Session implements CameraSession {
this.applicationContext = applicationContext; this.applicationContext = applicationContext;
this.cameraManager = cameraManager; this.cameraManager = cameraManager;
this.surfaceTextureHelper = surfaceTextureHelper; this.surfaceTextureHelper = surfaceTextureHelper;
this.mediaRecorderSurface = (mediaRecorder != null) ? mediaRecorder.getSurface() : null;
this.cameraId = cameraId; this.cameraId = cameraId;
this.width = width; this.width = width;
this.height = height; this.height = height;

View File

@ -11,7 +11,6 @@
package org.webrtc; package org.webrtc;
import android.content.Context; import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
import javax.annotation.Nullable; import javax.annotation.Nullable;
@ -25,13 +24,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
IN_PROGRESS, // Waiting for new switched capture session to start. IN_PROGRESS, // Waiting for new switched capture session to start.
} }
enum MediaRecorderState {
IDLE, // No media recording update (add or remove) requested.
IDLE_TO_ACTIVE, // Waiting for new capture session with added MediaRecorder surface to start.
ACTIVE_TO_IDLE, // Waiting for new capture session with removed MediaRecorder surface to start.
ACTIVE, // MediaRecorder was successfully added to camera pipeline.
}
private static final String TAG = "CameraCapturer"; private static final String TAG = "CameraCapturer";
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3; private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
private final static int OPEN_CAMERA_DELAY_MS = 500; private final static int OPEN_CAMERA_DELAY_MS = 500;
@ -47,9 +39,7 @@ abstract class CameraCapturer implements CameraVideoCapturer {
@Override @Override
public void onDone(CameraSession session) { public void onDone(CameraSession session) {
checkIsOnCameraThread(); checkIsOnCameraThread();
Logging.d(TAG, Logging.d(TAG, "Create session done. Switch state: " + switchState);
"Create session done. Switch state: " + switchState
+ ". MediaRecorder state: " + mediaRecorderState);
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable); uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
synchronized (stateLock) { synchronized (stateLock) {
capturerObserver.onCapturerStarted(true /* success */); capturerObserver.onCapturerStarted(true /* success */);
@ -69,19 +59,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
switchState = SwitchState.IDLE; switchState = SwitchState.IDLE;
switchCameraInternal(switchEventsHandler); switchCameraInternal(switchEventsHandler);
} }
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE
|| mediaRecorderState == MediaRecorderState.ACTIVE_TO_IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderSuccess();
mediaRecorderEventsHandler = null;
}
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE) {
mediaRecorderState = MediaRecorderState.ACTIVE;
} else {
mediaRecorderState = MediaRecorderState.IDLE;
}
}
} }
} }
@ -106,14 +83,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
switchState = SwitchState.IDLE; switchState = SwitchState.IDLE;
} }
if (mediaRecorderState != MediaRecorderState.IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderError(error);
mediaRecorderEventsHandler = null;
}
mediaRecorderState = MediaRecorderState.IDLE;
}
if (failureType == CameraSession.FailureType.DISCONNECTED) { if (failureType == CameraSession.FailureType.DISCONNECTED) {
eventsHandler.onCameraDisconnected(); eventsHandler.onCameraDisconnected();
} else { } else {
@ -121,7 +90,7 @@ abstract class CameraCapturer implements CameraVideoCapturer {
} }
} else { } else {
Logging.w(TAG, "Opening camera failed, retry: " + error); Logging.w(TAG, "Opening camera failed, retry: " + error);
createSessionInternal(OPEN_CAMERA_DELAY_MS, null /* mediaRecorder */); createSessionInternal(OPEN_CAMERA_DELAY_MS);
} }
} }
} }
@ -225,10 +194,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
@Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */ @Nullable private CameraStatistics cameraStatistics; /* guarded by stateLock */
private boolean firstFrameObserved; /* guarded by stateLock */ private boolean firstFrameObserved; /* guarded by stateLock */
// Variables used on camera thread - do not require stateLock synchronization.
private MediaRecorderState mediaRecorderState = MediaRecorderState.IDLE;
@Nullable private MediaRecorderHandler mediaRecorderEventsHandler;
public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler, public CameraCapturer(String cameraName, @Nullable CameraEventsHandler eventsHandler,
CameraEnumerator cameraEnumerator) { CameraEnumerator cameraEnumerator) {
if (eventsHandler == null) { if (eventsHandler == null) {
@ -293,17 +258,17 @@ abstract class CameraCapturer implements CameraVideoCapturer {
sessionOpening = true; sessionOpening = true;
openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS; openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
createSessionInternal(0, null /* mediaRecorder */); createSessionInternal(0);
} }
} }
private void createSessionInternal(int delayMs, final MediaRecorder mediaRecorder) { private void createSessionInternal(int delayMs) {
uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT); uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
cameraThreadHandler.postDelayed(new Runnable() { cameraThreadHandler.postDelayed(new Runnable() {
@Override @Override
public void run() { public void run() {
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext, createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
surfaceHelper, mediaRecorder, cameraName, width, height, framerate); surfaceHelper, cameraName, width, height, framerate);
} }
}, delayMs); }, delayMs);
} }
@ -371,29 +336,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
}); });
} }
@Override
public void addMediaRecorderToCamera(
final MediaRecorder mediaRecorder, final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "addMediaRecorderToCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(mediaRecorder, mediaRecoderEventsHandler);
}
});
}
@Override
public void removeMediaRecorderFromCamera(final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "removeMediaRecorderFromCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(null /* mediaRecorder */, mediaRecoderEventsHandler);
}
});
}
@Override @Override
public boolean isScreencast() { public boolean isScreencast() {
return false; return false;
@ -440,10 +382,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler); reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
return; return;
} }
if (mediaRecorderState != MediaRecorderState.IDLE) {
reportCameraSwitchError("switchCamera: media recording is active", switchEventsHandler);
return;
}
if (!sessionOpening && currentSession == null) { if (!sessionOpening && currentSession == null) {
reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler); reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
return; return;
@ -474,74 +412,11 @@ abstract class CameraCapturer implements CameraVideoCapturer {
sessionOpening = true; sessionOpening = true;
openAttemptsRemaining = 1; openAttemptsRemaining = 1;
createSessionInternal(0, null /* mediaRecorder */); createSessionInternal(0);
} }
Logging.d(TAG, "switchCamera done"); Logging.d(TAG, "switchCamera done");
} }
private void reportUpdateMediaRecorderError(
String error, @Nullable MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
Logging.e(TAG, error);
if (mediaRecoderEventsHandler != null) {
mediaRecoderEventsHandler.onMediaRecorderError(error);
}
}
private void updateMediaRecorderInternal(
@Nullable MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
boolean addMediaRecorder = (mediaRecorder != null);
Logging.d(TAG,
"updateMediaRecoderInternal internal. State: " + mediaRecorderState
+ ". Switch state: " + switchState + ". Add MediaRecorder: " + addMediaRecorder);
synchronized (stateLock) {
if ((addMediaRecorder && mediaRecorderState != MediaRecorderState.IDLE)
|| (!addMediaRecorder && mediaRecorderState != MediaRecorderState.ACTIVE)) {
reportUpdateMediaRecorderError(
"Incorrect state for MediaRecorder update.", mediaRecoderEventsHandler);
return;
}
if (switchState != SwitchState.IDLE) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is switching.", mediaRecoderEventsHandler);
return;
}
if (currentSession == null) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is closed.", mediaRecoderEventsHandler);
return;
}
if (sessionOpening) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is still opening.", mediaRecoderEventsHandler);
return;
}
this.mediaRecorderEventsHandler = mediaRecoderEventsHandler;
mediaRecorderState =
addMediaRecorder ? MediaRecorderState.IDLE_TO_ACTIVE : MediaRecorderState.ACTIVE_TO_IDLE;
Logging.d(TAG, "updateMediaRecoder: Stopping session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
sessionOpening = true;
openAttemptsRemaining = 1;
createSessionInternal(0, mediaRecorder);
}
Logging.d(TAG, "updateMediaRecoderInternal done");
}
private void checkIsOnCameraThread() { private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) { if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
Logging.e(TAG, "Check is on camera thread failed."); Logging.e(TAG, "Check is on camera thread failed.");
@ -557,6 +432,6 @@ abstract class CameraCapturer implements CameraVideoCapturer {
abstract protected void createCameraSession( abstract protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events, CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName,
MediaRecorder mediaRecoder, String cameraName, int width, int height, int framerate); int width, int height, int framerate);
} }