Add support for media recorders in Camera1Capturer.

BUG=b/36684011

Review-Url: https://codereview.webrtc.org/2861893003
Cr-Commit-Position: refs/heads/master@{#18024}
This commit is contained in:
sakal
2017-05-05 01:48:48 -07:00
committed by Commit bot
parent ea12f4c5e8
commit bac4c8013f
5 changed files with 61 additions and 27 deletions

View File

@ -28,11 +28,8 @@ public class Camera1Capturer extends CameraCapturer {
CameraSession.Events events, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraName,
int width, int height, int framerate) {
if (mediaRecorder != null) {
throw new RuntimeException("MediaRecoder is not supported for camera 1.");
}
Camera1Session.create(createSessionCallback, events, captureToTexture, applicationContext,
surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height,
framerate);
Camera1Session.create(createSessionCallback, events,
captureToTexture || (mediaRecorder != null), applicationContext, surfaceTextureHelper,
mediaRecorder, Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate);
}
}

View File

@ -15,6 +15,7 @@ import android.support.test.InstrumentationRegistry;
import android.support.test.filters.LargeTest;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import java.io.IOException;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.After;
import org.junit.Before;
@ -120,6 +121,12 @@ public class Camera1CapturerUsingByteBufferTest {
fixtures.cameraEventsInvoked();
}
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(false /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test
@MediumTest

View File

@ -15,6 +15,7 @@ import android.support.test.InstrumentationRegistry;
import android.support.test.filters.LargeTest;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import java.io.IOException;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.After;
import org.junit.Before;
@ -115,6 +116,12 @@ public class Camera1CapturerUsingTextureTest {
fixtures.cameraEventsInvoked();
}
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(false /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test
@MediumTest

View File

@ -509,20 +509,8 @@ class CameraVideoCapturerTestFixtures {
}
@TargetApi(21)
public void updateMediaRecorder(boolean useSurfaceCapture)
throws InterruptedException, IOException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
// Wait for the camera to start so we can add and remove MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
+ "/chromium_tests_root/testmediarecorder.mp4";
File outputFile = new File(videoOutPath);
// Create MediaRecorder object
MediaRecorder mediaRecorder = new MediaRecorder();
private static void prepareMediaRecorderForTests(
MediaRecorder mediaRecorder, File outputFile, boolean useSurfaceCapture) throws IOException {
mediaRecorder.setVideoSource(
useSurfaceCapture ? MediaRecorder.VideoSource.SURFACE : MediaRecorder.VideoSource.CAMERA);
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
@ -537,6 +525,28 @@ class CameraVideoCapturerTestFixtures {
mediaRecorder.setVideoEncoder(profile.videoCodec);
mediaRecorder.setOutputFile(outputFile.getPath());
mediaRecorder.prepare();
}
@TargetApi(21)
public void updateMediaRecorder(boolean useSurfaceCapture)
throws InterruptedException, IOException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
// Wait for the camera to start so we can add and remove MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
+ "/chromium_tests_root/testmediarecorder.mp4";
File outputFile = new File(videoOutPath);
// Create MediaRecorder object
MediaRecorder mediaRecorder = new MediaRecorder();
if (useSurfaceCapture) {
// When using using surface capture, media recorder has to be prepared before adding it to the
// camera.
prepareMediaRecorderForTests(mediaRecorder, outputFile, useSurfaceCapture);
}
// Add MediaRecorder to camera pipeline.
final boolean[] addMediaRecorderSuccessful = new boolean[1];
@ -550,6 +560,7 @@ class CameraVideoCapturerTestFixtures {
}
@Override
public void onMediaRecorderError(String errorDescription) {
Logging.e(TAG, errorDescription);
addMediaRecorderSuccessful[0] = false;
addBarrier.countDown();
}
@ -561,6 +572,11 @@ class CameraVideoCapturerTestFixtures {
assertTrue(addMediaRecorderSuccessful[0]);
// Start MediaRecorder and wait for a few frames to capture.
if (!useSurfaceCapture) {
// When using using camera capture, media recorder has to be prepared after adding it to the
// camera.
prepareMediaRecorderForTests(mediaRecorder, outputFile, useSurfaceCapture);
}
mediaRecorder.start();
for (int i = 0; i < 5; i++) {
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);

View File

@ -11,6 +11,7 @@
package org.webrtc;
import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.SystemClock;
import android.view.Surface;
@ -52,8 +53,8 @@ class Camera1Session implements CameraSession {
public static void create(final CreateSessionCallback callback, final Events events,
final boolean captureToTexture, final Context applicationContext,
final SurfaceTextureHelper surfaceTextureHelper, final int cameraId, final int width,
final int height, final int framerate) {
final SurfaceTextureHelper surfaceTextureHelper, final MediaRecorder mediaRecorder,
final int cameraId, final int width, final int height, final int framerate) {
final long constructionTimeNs = System.nanoTime();
Logging.d(TAG, "Open camera " + cameraId);
events.onCameraOpening();
@ -96,8 +97,9 @@ class Camera1Session implements CameraSession {
// Calculate orientation manually and send it as CVO insted.
camera.setDisplayOrientation(0 /* degrees */);
callback.onDone(new Camera1Session(events, captureToTexture, applicationContext,
surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
callback.onDone(
new Camera1Session(events, captureToTexture, applicationContext, surfaceTextureHelper,
mediaRecorder, cameraId, camera, info, captureFormat, constructionTimeNs));
}
private static void updateCameraParameters(android.hardware.Camera camera,
@ -145,9 +147,9 @@ class Camera1Session implements CameraSession {
}
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, int cameraId, android.hardware.Camera camera,
android.hardware.Camera.CameraInfo info, CaptureFormat captureFormat,
long constructionTimeNs) {
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, int cameraId,
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
this.cameraThreadHandler = new Handler();
@ -162,6 +164,11 @@ class Camera1Session implements CameraSession {
this.constructionTimeNs = constructionTimeNs;
startCapturing();
if (mediaRecorder != null) {
camera.unlock();
mediaRecorder.setCamera(camera);
}
}
@Override