Use NV21 instead of YUV12 and clean up.
BUG=webrtc:5375 Review URL: https://codereview.webrtc.org/1530843002 Cr-Commit-Position: refs/heads/master@{#11079}
This commit is contained in:
@ -308,12 +308,15 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
||||
@MediumTest
|
||||
// This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
|
||||
// the capturer.
|
||||
public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
|
||||
public void testCameraFreezedEventOnBufferStarvationUsingTextures() throws InterruptedException {
|
||||
EglBase eglBase = EglBase.create();
|
||||
VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
|
||||
VideoCapturerAndroidTestFixtures.createCameraEvents();
|
||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents);
|
||||
VideoCapturerAndroidTestFixtures.cameraFreezedEventOnBufferStarvation(capturer,
|
||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents,
|
||||
eglBase.getEglBaseContext());
|
||||
VideoCapturerAndroidTestFixtures.cameraFreezedEventOnBufferStarvationUsingTextures(capturer,
|
||||
cameraEvents, getInstrumentation().getContext());
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@MediumTest
|
||||
|
||||
@ -119,11 +119,11 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(byte[] frame, int length, int width, int height,
|
||||
int rotation, long timeStamp) {
|
||||
public void onByteBufferFrameCaptured(byte[] frame, int width, int height, int rotation,
|
||||
long timeStamp) {
|
||||
synchronized (frameLock) {
|
||||
++framesCaptured;
|
||||
frameSize = length;
|
||||
frameSize = frame.length;
|
||||
frameWidth = width;
|
||||
frameHeight = height;
|
||||
timestamps.add(timeStamp);
|
||||
@ -311,8 +311,8 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
assertTrue(observer.WaitForCapturerToStart());
|
||||
observer.WaitForNextCapturedFrame();
|
||||
capturer.stopCapture();
|
||||
for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
|
||||
capturer.returnBuffer(timeStamp);
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
}
|
||||
capturer.dispose();
|
||||
|
||||
@ -332,9 +332,10 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
// Make sure camera is started and then stop it.
|
||||
assertTrue(observer.WaitForCapturerToStart());
|
||||
capturer.stopCapture();
|
||||
for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
|
||||
capturer.returnBuffer(timeStamp);
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
}
|
||||
|
||||
// We can't change |capturer| at this point, but we should not crash.
|
||||
capturer.switchCamera(null);
|
||||
capturer.onOutputFormatRequest(640, 480, 15);
|
||||
@ -393,11 +394,11 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
assertEquals(0, observer.frameSize());
|
||||
} else {
|
||||
assertEquals(format.frameSize(), observer.frameSize());
|
||||
assertTrue(format.frameSize() <= observer.frameSize());
|
||||
}
|
||||
capturer.stopCapture();
|
||||
for (long timestamp : observer.getCopyAndResetListOftimeStamps()) {
|
||||
capturer.returnBuffer(timestamp);
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
}
|
||||
}
|
||||
capturer.dispose();
|
||||
@ -455,8 +456,8 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
assertTrue(observer.WaitForCapturerToStart());
|
||||
observer.WaitForNextCapturedFrame();
|
||||
capturer.stopCapture();
|
||||
for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
|
||||
capturer.returnBuffer(timeStamp);
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
}
|
||||
capturer.dispose();
|
||||
assertTrue(capturer.isReleased());
|
||||
@ -496,9 +497,8 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||
appContext, observer);
|
||||
observer.WaitForCapturerToStart();
|
||||
|
||||
for (Long timeStamp : listOftimestamps) {
|
||||
capturer.returnBuffer(timeStamp);
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
}
|
||||
|
||||
observer.WaitForNextCapturedFrame();
|
||||
@ -506,9 +506,10 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
|
||||
listOftimestamps = observer.getCopyAndResetListOftimeStamps();
|
||||
assertTrue(listOftimestamps.size() >= 1);
|
||||
for (Long timeStamp : listOftimestamps) {
|
||||
capturer.returnBuffer(timeStamp);
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
}
|
||||
|
||||
capturer.dispose();
|
||||
assertTrue(capturer.isReleased());
|
||||
}
|
||||
@ -519,6 +520,7 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints());
|
||||
final VideoTrack track = factory.createVideoTrack("dummy", source);
|
||||
final FakeAsyncRenderer renderer = new FakeAsyncRenderer();
|
||||
|
||||
track.addRenderer(new VideoRenderer(renderer));
|
||||
// Wait for at least one frame that has not been returned.
|
||||
assertFalse(renderer.waitForPendingFrames().isEmpty());
|
||||
@ -529,9 +531,7 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
track.dispose();
|
||||
source.dispose();
|
||||
factory.dispose();
|
||||
|
||||
// The pending frames should keep the JNI parts and |capturer| alive.
|
||||
assertFalse(capturer.isReleased());
|
||||
assertTrue(capturer.isReleased());
|
||||
|
||||
// Return the frame(s), on a different thread out of spite.
|
||||
final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
|
||||
@ -545,13 +545,13 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
});
|
||||
returnThread.start();
|
||||
returnThread.join();
|
||||
|
||||
// Check that frames have successfully returned. This will cause |capturer| to be released.
|
||||
assertTrue(capturer.isReleased());
|
||||
}
|
||||
|
||||
static public void cameraFreezedEventOnBufferStarvation(VideoCapturerAndroid capturer,
|
||||
static public void cameraFreezedEventOnBufferStarvationUsingTextures(
|
||||
VideoCapturerAndroid capturer,
|
||||
CameraEvents events, Context appContext) throws InterruptedException {
|
||||
assertTrue("Not capturing to textures.", capturer.isCapturingToTexture());
|
||||
|
||||
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||
|
||||
@ -560,14 +560,16 @@ public class VideoCapturerAndroidTestFixtures {
|
||||
appContext, observer);
|
||||
// Make sure camera is started.
|
||||
assertTrue(observer.WaitForCapturerToStart());
|
||||
// Since we don't call returnBuffer, we should get a starvation message.
|
||||
// Since we don't return the buffer, we should get a starvation message if we are
|
||||
// capturing to a texture.
|
||||
assertEquals("Camera failure. Client must return video buffers.",
|
||||
events.WaitForCameraFreezed());
|
||||
|
||||
capturer.stopCapture();
|
||||
for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
|
||||
capturer.returnBuffer(timeStamp);
|
||||
if (capturer.isCapturingToTexture()) {
|
||||
capturer.surfaceHelper.returnTextureFrame();
|
||||
}
|
||||
|
||||
capturer.dispose();
|
||||
assertTrue(capturer.isReleased());
|
||||
}
|
||||
|
||||
@ -71,7 +71,7 @@ public class CameraEnumerationAndroid {
|
||||
// other image formats then this needs to be updated and
|
||||
// VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
|
||||
// all imageFormats.
|
||||
public final int imageFormat = ImageFormat.YV12;
|
||||
public final int imageFormat = ImageFormat.NV21;
|
||||
|
||||
public CaptureFormat(int width, int height, int minFramerate,
|
||||
int maxFramerate) {
|
||||
@ -87,25 +87,15 @@ public class CameraEnumerationAndroid {
|
||||
}
|
||||
|
||||
// Calculates the frame size of the specified image format. Currently only
|
||||
// supporting ImageFormat.YV12. The YV12's stride is the closest rounded up
|
||||
// multiple of 16 of the width and width and height are always even.
|
||||
// Android guarantees this:
|
||||
// http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
|
||||
// supporting ImageFormat.NV21.
|
||||
// The size is width * height * number of bytes per pixel.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
|
||||
public static int frameSize(int width, int height, int imageFormat) {
|
||||
if (imageFormat != ImageFormat.YV12) {
|
||||
if (imageFormat != ImageFormat.NV21) {
|
||||
throw new UnsupportedOperationException("Don't know how to calculate "
|
||||
+ "the frame size of non-YV12 image formats.");
|
||||
+ "the frame size of non-NV21 image formats.");
|
||||
}
|
||||
int yStride = roundUp(width, 16);
|
||||
int uvStride = roundUp(yStride / 2, 16);
|
||||
int ySize = yStride * height;
|
||||
int uvSize = uvStride * height / 2;
|
||||
return ySize + uvSize * 2;
|
||||
}
|
||||
|
||||
// Rounds up |x| to the closest value that is a multiple of |alignment|.
|
||||
private static int roundUp(int x, int alignment) {
|
||||
return (int)ceil(x / (double)alignment) * alignment;
|
||||
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@ -317,7 +317,7 @@ class SurfaceTextureHelper {
|
||||
private OnTextureFrameAvailableListener listener;
|
||||
// The possible states of this class.
|
||||
private boolean hasPendingTexture = false;
|
||||
private boolean isTextureInUse = false;
|
||||
private volatile boolean isTextureInUse = false;
|
||||
private boolean isQuitting = false;
|
||||
|
||||
private SurfaceTextureHelper(EglBase.Context sharedContext,
|
||||
@ -392,6 +392,10 @@ class SurfaceTextureHelper {
|
||||
});
|
||||
}
|
||||
|
||||
public boolean isTextureInUse() {
|
||||
return isTextureInUse;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call disconnect() to stop receiving frames. Resources are released when the texture frame has
|
||||
* been returned by a call to returnTextureFrame(). You are guaranteed to not receive any more
|
||||
|
||||
@ -77,7 +77,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
private final Object cameraIdLock = new Object();
|
||||
private int id;
|
||||
private android.hardware.Camera.CameraInfo info;
|
||||
private final FramePool videoBuffers;
|
||||
private final CameraStatistics cameraStatistics;
|
||||
// Remember the requested format in case we want to switch cameras.
|
||||
private int requestedWidth;
|
||||
@ -90,8 +89,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
private CapturerObserver frameObserver = null;
|
||||
private final CameraEventsHandler eventsHandler;
|
||||
private boolean firstFrameReported;
|
||||
// Arbitrary queue depth. Higher number means more memory allocated & held,
|
||||
// lower number means more sensitivity to processing time in the client (and
|
||||
// potentially stalling the capturer if it runs out of buffers to write to).
|
||||
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
|
||||
private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
|
||||
private final boolean isCapturingToTexture;
|
||||
private final SurfaceTextureHelper surfaceHelper;
|
||||
final SurfaceTextureHelper surfaceHelper; // Package visible for testing purposes.
|
||||
// The camera API can output one old frame after the camera has been switched or the resolution
|
||||
// has been changed. This flag is used for dropping the first frame after camera restart.
|
||||
private boolean dropNextFrame = false;
|
||||
@ -129,14 +133,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
|
||||
/ CAMERA_OBSERVER_PERIOD_MS;
|
||||
|
||||
Logging.d(TAG, "Camera fps: " + cameraFps +
|
||||
". Pending buffers: " + cameraStatistics.pendingFramesTimeStamps());
|
||||
Logging.d(TAG, "Camera fps: " + cameraFps +".");
|
||||
if (cameraFramesCount == 0) {
|
||||
++freezePeriodCount;
|
||||
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS
|
||||
&& eventsHandler != null) {
|
||||
Logging.e(TAG, "Camera freezed.");
|
||||
if (cameraStatistics.pendingFramesCount() == cameraStatistics.maxPendingFrames) {
|
||||
if (surfaceHelper.isTextureInUse()) {
|
||||
// This can only happen if we are capturing to textures.
|
||||
eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
|
||||
} else {
|
||||
eventsHandler.onCameraFreezed("Camera failure.");
|
||||
@ -153,27 +157,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
private static class CameraStatistics {
|
||||
private int frameCount = 0;
|
||||
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
||||
private final Set<Long> timeStampsNs = new HashSet<Long>();
|
||||
public final int maxPendingFrames;
|
||||
|
||||
CameraStatistics(int maxPendingFrames) {
|
||||
this.maxPendingFrames = maxPendingFrames;
|
||||
CameraStatistics() {
|
||||
threadChecker.detachThread();
|
||||
}
|
||||
|
||||
public void addPendingFrame(long timestamp) {
|
||||
public void addFrame() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
++frameCount;
|
||||
timeStampsNs.add(timestamp);
|
||||
}
|
||||
|
||||
public void frameReturned(long timestamp) {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
if (!timeStampsNs.contains(timestamp)) {
|
||||
throw new IllegalStateException(
|
||||
"CameraStatistics.frameReturned called with unknown timestamp " + timestamp);
|
||||
}
|
||||
timeStampsNs.remove(timestamp);
|
||||
}
|
||||
|
||||
public int getAndResetFrameCount() {
|
||||
@ -182,21 +173,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
frameCount = 0;
|
||||
return count;
|
||||
}
|
||||
|
||||
// Return number of pending frames that have not been returned.
|
||||
public int pendingFramesCount() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
return timeStampsNs.size();
|
||||
}
|
||||
|
||||
public String pendingFramesTimeStamps() {
|
||||
threadChecker.checkIsOnValidThread();
|
||||
List<Long> timeStampsMs = new ArrayList<Long>();
|
||||
for (long ts : timeStampsNs) {
|
||||
timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(ts));
|
||||
}
|
||||
return timeStampsMs.toString();
|
||||
}
|
||||
}
|
||||
|
||||
public static interface CameraEventsHandler {
|
||||
@ -350,20 +326,18 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
|
||||
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
|
||||
EglBase.Context sharedContext) {
|
||||
Logging.d(TAG, "VideoCapturerAndroid");
|
||||
this.id = cameraId;
|
||||
this.eventsHandler = eventsHandler;
|
||||
cameraThread = new HandlerThread(TAG);
|
||||
cameraThread.start();
|
||||
cameraThreadHandler = new Handler(cameraThread.getLooper());
|
||||
videoBuffers = new FramePool(cameraThread);
|
||||
isCapturingToTexture = (sharedContext != null);
|
||||
cameraStatistics =
|
||||
new CameraStatistics(isCapturingToTexture ? 1 : FramePool.NUMBER_OF_CAPTURE_BUFFERS);
|
||||
cameraStatistics = new CameraStatistics();
|
||||
surfaceHelper = SurfaceTextureHelper.create(sharedContext, cameraThreadHandler);
|
||||
if (isCapturingToTexture) {
|
||||
surfaceHelper.setListener(this);
|
||||
}
|
||||
Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
|
||||
}
|
||||
|
||||
private void checkIsOnCameraThread() {
|
||||
@ -403,9 +377,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
if (camera != null) {
|
||||
throw new IllegalStateException("Release called while camera is running");
|
||||
}
|
||||
if (cameraStatistics.pendingFramesCount() != 0) {
|
||||
throw new IllegalStateException("Release called with pending frames left");
|
||||
}
|
||||
}
|
||||
});
|
||||
surfaceHelper.disconnect(cameraThreadHandler);
|
||||
@ -582,7 +553,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
|
||||
camera.setParameters(parameters);
|
||||
if (!isCapturingToTexture) {
|
||||
videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera);
|
||||
queuedBuffers.clear();
|
||||
final int frameSize = captureFormat.frameSize();
|
||||
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
||||
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
|
||||
queuedBuffers.add(buffer.array());
|
||||
camera.addCallbackBuffer(buffer.array());
|
||||
}
|
||||
camera.setPreviewCallbackWithBuffer(this);
|
||||
}
|
||||
camera.startPreview();
|
||||
@ -619,13 +596,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
Logging.d(TAG, "Stop preview.");
|
||||
camera.stopPreview();
|
||||
camera.setPreviewCallbackWithBuffer(null);
|
||||
if (!isCapturingToTexture()) {
|
||||
videoBuffers.stopReturnBuffersToCamera();
|
||||
Logging.d(TAG, "stopReturnBuffersToCamera called."
|
||||
+ (cameraStatistics.pendingFramesCount() == 0?
|
||||
" All buffers have been returned."
|
||||
: " Pending buffers: " + cameraStatistics.pendingFramesTimeStamps() + "."));
|
||||
}
|
||||
queuedBuffers.clear();
|
||||
captureFormat = null;
|
||||
|
||||
Logging.d(TAG, "Release camera.");
|
||||
@ -665,19 +636,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
return cameraThreadHandler;
|
||||
}
|
||||
|
||||
public void returnBuffer(final long timeStamp) {
|
||||
cameraThreadHandler.post(new Runnable() {
|
||||
@Override public void run() {
|
||||
cameraStatistics.frameReturned(timeStamp);
|
||||
if (isCapturingToTexture) {
|
||||
surfaceHelper.returnTextureFrame();
|
||||
} else {
|
||||
videoBuffers.returnBuffer(timeStamp);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private int getDeviceOrientation() {
|
||||
int orientation = 0;
|
||||
|
||||
@ -713,7 +671,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
@Override
|
||||
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
|
||||
checkIsOnCameraThread();
|
||||
if (camera == null) {
|
||||
if (camera == null || !queuedBuffers.contains(data)) {
|
||||
// The camera has been stopped or |data| is an old invalid buffer.
|
||||
return;
|
||||
}
|
||||
if (camera != callbackCamera) {
|
||||
@ -728,16 +687,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
// Mark the frame owning |data| as used.
|
||||
// Note that since data is directBuffer,
|
||||
// data.length >= videoBuffers.frameSize.
|
||||
if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) {
|
||||
cameraStatistics.addPendingFrame(captureTimeNs);
|
||||
frameObserver.onByteBufferFrameCaptured(data, videoBuffers.frameSize, captureFormat.width,
|
||||
captureFormat.height, getFrameOrientation(), captureTimeNs);
|
||||
} else {
|
||||
Logging.w(TAG, "reserveByteBuffer failed - dropping frame.");
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
|
||||
getFrameOrientation(), captureTimeNs);
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -762,121 +715,11 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
transformMatrix =
|
||||
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||
}
|
||||
cameraStatistics.addPendingFrame(timestampNs);
|
||||
|
||||
cameraStatistics.addFrame();
|
||||
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
|
||||
transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
|
||||
// Class used for allocating and bookkeeping video frames. All buffers are
|
||||
// direct allocated so that they can be directly used from native code. This class is
|
||||
// not thread-safe, and enforces single thread use.
|
||||
private static class FramePool {
|
||||
// Thread that all calls should be made on.
|
||||
private final Thread thread;
|
||||
// Arbitrary queue depth. Higher number means more memory allocated & held,
|
||||
// lower number means more sensitivity to processing time in the client (and
|
||||
// potentially stalling the capturer if it runs out of buffers to write to).
|
||||
public static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
|
||||
// This container tracks the buffers added as camera callback buffers. It is needed for finding
|
||||
// the corresponding ByteBuffer given a byte[].
|
||||
private final Map<byte[], ByteBuffer> queuedBuffers = new IdentityHashMap<byte[], ByteBuffer>();
|
||||
// This container tracks the frames that have been sent but not returned. It is needed for
|
||||
// keeping the buffers alive and for finding the corresponding ByteBuffer given a timestamp.
|
||||
private final Map<Long, ByteBuffer> pendingBuffers = new HashMap<Long, ByteBuffer>();
|
||||
private int frameSize = 0;
|
||||
private android.hardware.Camera camera;
|
||||
|
||||
public FramePool(Thread thread) {
|
||||
this.thread = thread;
|
||||
}
|
||||
|
||||
private void checkIsOnValidThread() {
|
||||
if (Thread.currentThread() != thread) {
|
||||
throw new IllegalStateException("Wrong thread");
|
||||
}
|
||||
}
|
||||
|
||||
// Discards previous queued buffers and adds new callback buffers to camera.
|
||||
public void queueCameraBuffers(int frameSize, android.hardware.Camera camera) {
|
||||
checkIsOnValidThread();
|
||||
this.camera = camera;
|
||||
this.frameSize = frameSize;
|
||||
|
||||
queuedBuffers.clear();
|
||||
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
||||
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
|
||||
camera.addCallbackBuffer(buffer.array());
|
||||
queuedBuffers.put(buffer.array(), buffer);
|
||||
}
|
||||
Logging.d(TAG, "queueCameraBuffers enqueued " + NUMBER_OF_CAPTURE_BUFFERS
|
||||
+ " buffers of size " + frameSize + ".");
|
||||
}
|
||||
|
||||
public void stopReturnBuffersToCamera() {
|
||||
checkIsOnValidThread();
|
||||
this.camera = null;
|
||||
queuedBuffers.clear();
|
||||
// Frames in |pendingBuffers| need to be kept alive until they are returned.
|
||||
}
|
||||
|
||||
public boolean reserveByteBuffer(byte[] data, long timeStamp) {
|
||||
checkIsOnValidThread();
|
||||
final ByteBuffer buffer = queuedBuffers.remove(data);
|
||||
if (buffer == null) {
|
||||
// Frames might be posted to |onPreviewFrame| with the previous format while changing
|
||||
// capture format in |startPreviewOnCameraThread|. Drop these old frames.
|
||||
Logging.w(TAG, "Received callback buffer from previous configuration with length: "
|
||||
+ (data == null ? "null" : data.length));
|
||||
return false;
|
||||
}
|
||||
if (buffer.capacity() != frameSize) {
|
||||
throw new IllegalStateException("Callback buffer has unexpected frame size");
|
||||
}
|
||||
if (pendingBuffers.containsKey(timeStamp)) {
|
||||
Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique");
|
||||
return false;
|
||||
}
|
||||
pendingBuffers.put(timeStamp, buffer);
|
||||
if (queuedBuffers.isEmpty()) {
|
||||
Logging.d(TAG, "Camera is running out of capture buffers.");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public void returnBuffer(long timeStamp) {
|
||||
checkIsOnValidThread();
|
||||
final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
|
||||
if (returnedFrame == null) {
|
||||
throw new RuntimeException("unknown data buffer with time stamp "
|
||||
+ timeStamp + "returned?!?");
|
||||
}
|
||||
|
||||
if (camera != null && returnedFrame.capacity() == frameSize) {
|
||||
camera.addCallbackBuffer(returnedFrame.array());
|
||||
if (queuedBuffers.isEmpty()) {
|
||||
Logging.d(TAG, "Frame returned when camera is running out of capture"
|
||||
+ " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp));
|
||||
}
|
||||
queuedBuffers.put(returnedFrame.array(), returnedFrame);
|
||||
return;
|
||||
}
|
||||
|
||||
if (returnedFrame.capacity() != frameSize) {
|
||||
Logging.d(TAG, "returnBuffer with time stamp "
|
||||
+ TimeUnit.NANOSECONDS.toMillis(timeStamp)
|
||||
+ " called with old frame size, " + returnedFrame.capacity() + ".");
|
||||
// Since this frame has the wrong size, don't requeue it. Frames with the correct size are
|
||||
// created in queueCameraBuffers so this must be an old buffer.
|
||||
return;
|
||||
}
|
||||
|
||||
Logging.d(TAG, "returnBuffer with time stamp "
|
||||
+ TimeUnit.NANOSECONDS.toMillis(timeStamp)
|
||||
+ " called after camera has been stopped.");
|
||||
}
|
||||
}
|
||||
|
||||
// Interface used for providing callbacks to an observer.
|
||||
interface CapturerObserver {
|
||||
// Notify if the camera have been started successfully or not.
|
||||
@ -885,8 +728,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
|
||||
// Delivers a captured frame. Called on a Java thread owned by
|
||||
// VideoCapturerAndroid.
|
||||
abstract void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
|
||||
int rotation, long timeStamp);
|
||||
abstract void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
|
||||
long timeStamp);
|
||||
|
||||
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
|
||||
// owned by VideoCapturerAndroid.
|
||||
@ -915,9 +758,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
|
||||
public void onByteBufferFrameCaptured(byte[] data, int width, int height,
|
||||
int rotation, long timeStamp) {
|
||||
nativeOnByteBufferFrameCaptured(nativeCapturer, data, length, width, height, rotation,
|
||||
nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
|
||||
timeStamp);
|
||||
}
|
||||
|
||||
|
||||
@ -318,9 +318,19 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||
frames_received_ = 0;
|
||||
frames_decoded_ = 0;
|
||||
|
||||
jobject java_surface_texture_helper_ = nullptr;
|
||||
if (use_surface_) {
|
||||
java_surface_texture_helper_ = jni->CallStaticObjectMethod(
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||
GetStaticMethodID(jni,
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||
"create",
|
||||
"(Lorg/webrtc/EglBase$Context;)"
|
||||
"Lorg/webrtc/SurfaceTextureHelper;"),
|
||||
render_egl_context_);
|
||||
RTC_CHECK(java_surface_texture_helper_ != nullptr);
|
||||
surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
|
||||
jni, render_egl_context_);
|
||||
jni, java_surface_texture_helper_);
|
||||
}
|
||||
|
||||
jobject j_video_codec_enum = JavaEnumFromIndex(
|
||||
@ -331,8 +341,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||
j_video_codec_enum,
|
||||
codec_.width,
|
||||
codec_.height,
|
||||
use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
|
||||
: nullptr);
|
||||
java_surface_texture_helper_);
|
||||
if (CheckException(jni) || !success) {
|
||||
ALOGE << "Codec initialization error - fallback to SW codec.";
|
||||
sw_fallback_required_ = true;
|
||||
|
||||
@ -29,8 +29,9 @@
|
||||
#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
|
||||
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
||||
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
|
||||
#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "webrtc/base/bind.h"
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
@ -52,13 +53,14 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(
|
||||
jobject j_video_capturer,
|
||||
jobject j_surface_texture_helper)
|
||||
: j_video_capturer_(jni, j_video_capturer),
|
||||
j_surface_texture_helper_(jni, j_surface_texture_helper),
|
||||
j_video_capturer_class_(
|
||||
jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
|
||||
j_observer_class_(
|
||||
jni,
|
||||
FindClass(jni,
|
||||
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
|
||||
surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
|
||||
jni, j_surface_texture_helper)),
|
||||
capturer_(nullptr) {
|
||||
LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
|
||||
thread_checker_.DetachFromThread();
|
||||
@ -130,13 +132,6 @@ void AndroidVideoCapturerJni::AsyncCapturerInvoke(
|
||||
invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
|
||||
}
|
||||
|
||||
void AndroidVideoCapturerJni::ReturnBuffer(int64_t time_stamp) {
|
||||
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
|
||||
"returnBuffer", "(J)V");
|
||||
jni()->CallVoidMethod(*j_video_capturer_, m, time_stamp);
|
||||
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.returnBuffer";
|
||||
}
|
||||
|
||||
std::string AndroidVideoCapturerJni::GetSupportedFormats() {
|
||||
jmethodID m =
|
||||
GetMethodID(jni(), *j_video_capturer_class_,
|
||||
@ -161,23 +156,17 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
|
||||
int rotation,
|
||||
int64_t timestamp_ns) {
|
||||
const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
|
||||
// Android guarantees that the stride is a multiple of 16.
|
||||
// http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
|
||||
int y_stride;
|
||||
int uv_stride;
|
||||
webrtc::Calc16ByteAlignedStride(width, &y_stride, &uv_stride);
|
||||
const uint8_t* v_plane = y_plane + y_stride * height;
|
||||
const uint8_t* u_plane =
|
||||
v_plane + uv_stride * webrtc::AlignInt(height, 2) / 2;
|
||||
const uint8_t* vu_plane = y_plane + width * height;
|
||||
|
||||
// Wrap the Java buffer, and call ReturnBuffer() in the wrapped
|
||||
// VideoFrameBuffer destructor.
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
|
||||
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
||||
width, height, y_plane, y_stride, u_plane, uv_stride, v_plane,
|
||||
uv_stride,
|
||||
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
|
||||
timestamp_ns)));
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
|
||||
buffer_pool_.CreateBuffer(width, height);
|
||||
libyuv::NV21ToI420(
|
||||
y_plane, width,
|
||||
vu_plane, width,
|
||||
buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
|
||||
buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
|
||||
buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
|
||||
width, height);
|
||||
AsyncCapturerInvoke("OnIncomingFrame",
|
||||
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
|
||||
buffer, rotation, timestamp_ns);
|
||||
@ -189,10 +178,8 @@ void AndroidVideoCapturerJni::OnTextureFrame(int width,
|
||||
int64_t timestamp_ns,
|
||||
const NativeHandleImpl& handle) {
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
|
||||
new rtc::RefCountedObject<AndroidTextureBuffer>(
|
||||
width, height, handle, *j_surface_texture_helper_,
|
||||
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
|
||||
timestamp_ns)));
|
||||
surface_texture_helper_->CreateTextureFrame(width, height, handle));
|
||||
|
||||
AsyncCapturerInvoke("OnIncomingFrame",
|
||||
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
|
||||
buffer, rotation, timestamp_ns);
|
||||
@ -214,13 +201,6 @@ JOW(void,
|
||||
jint width, jint height, jint rotation, jlong timestamp) {
|
||||
jboolean is_copy = true;
|
||||
jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
|
||||
// If this is a copy of the original frame, it means that the memory
|
||||
// is not direct memory and thus VideoCapturerAndroid does not guarantee
|
||||
// that the memory is valid when we have released |j_frame|.
|
||||
// TODO(magjed): Move ReleaseByteArrayElements() into ReturnBuffer() and
|
||||
// remove this check.
|
||||
RTC_CHECK(!is_copy)
|
||||
<< "NativeObserver_nativeOnFrameCaptured: frame is a copy";
|
||||
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
|
||||
->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
|
||||
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
|
||||
|
||||
@ -36,10 +36,12 @@
|
||||
#include "webrtc/base/asyncinvoker.h"
|
||||
#include "webrtc/base/criticalsection.h"
|
||||
#include "webrtc/base/thread_checker.h"
|
||||
#include "webrtc/common_video/include/i420_buffer_pool.h"
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
class NativeHandleImpl;
|
||||
class SurfaceTextureHelper;
|
||||
|
||||
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
|
||||
// The purpose of the delegate is to hide the JNI specifics from the C++ only
|
||||
@ -70,7 +72,6 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
|
||||
~AndroidVideoCapturerJni();
|
||||
|
||||
private:
|
||||
void ReturnBuffer(int64_t time_stamp);
|
||||
JNIEnv* jni();
|
||||
|
||||
// To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
|
||||
@ -88,10 +89,12 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
|
||||
typename Identity<Args>::type... args);
|
||||
|
||||
const ScopedGlobalRef<jobject> j_video_capturer_;
|
||||
const ScopedGlobalRef<jobject> j_surface_texture_helper_;
|
||||
const ScopedGlobalRef<jclass> j_video_capturer_class_;
|
||||
const ScopedGlobalRef<jclass> j_observer_class_;
|
||||
|
||||
// Used on the Java thread running the camera.
|
||||
webrtc::I420BufferPool buffer_pool_;
|
||||
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
|
||||
// |capturer| is a guaranteed to be a valid pointer between a call to
|
||||
|
||||
@ -35,23 +35,12 @@
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
SurfaceTextureHelper::SurfaceTextureHelper(JNIEnv* jni,
|
||||
jobject egl_shared_context)
|
||||
: j_surface_texture_helper_class_(
|
||||
jni,
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper")),
|
||||
j_surface_texture_helper_(
|
||||
jni,
|
||||
jni->CallStaticObjectMethod(
|
||||
*j_surface_texture_helper_class_,
|
||||
GetStaticMethodID(jni,
|
||||
*j_surface_texture_helper_class_,
|
||||
"create",
|
||||
"(Lorg/webrtc/EglBase$Context;)"
|
||||
"Lorg/webrtc/SurfaceTextureHelper;"),
|
||||
egl_shared_context)),
|
||||
j_return_texture_method_(GetMethodID(jni,
|
||||
*j_surface_texture_helper_class_,
|
||||
SurfaceTextureHelper::SurfaceTextureHelper(
|
||||
JNIEnv* jni, jobject surface_texture_helper)
|
||||
: j_surface_texture_helper_(jni, surface_texture_helper),
|
||||
j_return_texture_method_(
|
||||
GetMethodID(jni,
|
||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||
"returnTextureFrame",
|
||||
"()V")) {
|
||||
CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
|
||||
|
||||
@ -49,19 +49,14 @@ namespace webrtc_jni {
|
||||
// destroyed while a VideoFrameBuffer is in use.
|
||||
// This class is the C++ counterpart of the java class SurfaceTextureHelper.
|
||||
// Usage:
|
||||
// 1. Create an instance of this class.
|
||||
// 2. Call GetJavaSurfaceTextureHelper to get the Java SurfaceTextureHelper.
|
||||
// 1. Create an java instance of SurfaceTextureHelper.
|
||||
// 2. Create an instance of this class.
|
||||
// 3. Register a listener to the Java SurfaceListener and start producing
|
||||
// new buffers.
|
||||
// 3. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
|
||||
// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
|
||||
class SurfaceTextureHelper : public rtc::RefCountInterface {
|
||||
public:
|
||||
SurfaceTextureHelper(JNIEnv* jni, jobject shared_egl_context);
|
||||
|
||||
// Returns the Java SurfaceTextureHelper.
|
||||
jobject GetJavaSurfaceTextureHelper() const {
|
||||
return *j_surface_texture_helper_;
|
||||
}
|
||||
SurfaceTextureHelper(JNIEnv* jni, jobject surface_texture_helper);
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
|
||||
int width,
|
||||
@ -75,7 +70,6 @@ class SurfaceTextureHelper : public rtc::RefCountInterface {
|
||||
// May be called on arbitrary thread.
|
||||
void ReturnTextureFrame() const;
|
||||
|
||||
const ScopedGlobalRef<jclass> j_surface_texture_helper_class_;
|
||||
const ScopedGlobalRef<jobject> j_surface_texture_helper_;
|
||||
const jmethodID j_return_texture_method_;
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user