Use NV21 instead of YUV12 and clean up.
BUG=webrtc:5375 Review URL: https://codereview.webrtc.org/1530843002 Cr-Commit-Position: refs/heads/master@{#11079}
This commit is contained in:
@ -308,12 +308,15 @@ public class VideoCapturerAndroidTest extends ActivityTestCase {
|
|||||||
@MediumTest
|
@MediumTest
|
||||||
// This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
|
// This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
|
||||||
// the capturer.
|
// the capturer.
|
||||||
public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
|
public void testCameraFreezedEventOnBufferStarvationUsingTextures() throws InterruptedException {
|
||||||
|
EglBase eglBase = EglBase.create();
|
||||||
VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
|
VideoCapturerAndroidTestFixtures.CameraEvents cameraEvents =
|
||||||
VideoCapturerAndroidTestFixtures.createCameraEvents();
|
VideoCapturerAndroidTestFixtures.createCameraEvents();
|
||||||
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents);
|
VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", cameraEvents,
|
||||||
VideoCapturerAndroidTestFixtures.cameraFreezedEventOnBufferStarvation(capturer,
|
eglBase.getEglBaseContext());
|
||||||
|
VideoCapturerAndroidTestFixtures.cameraFreezedEventOnBufferStarvationUsingTextures(capturer,
|
||||||
cameraEvents, getInstrumentation().getContext());
|
cameraEvents, getInstrumentation().getContext());
|
||||||
|
eglBase.release();
|
||||||
}
|
}
|
||||||
|
|
||||||
@MediumTest
|
@MediumTest
|
||||||
|
|||||||
@ -119,11 +119,11 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onByteBufferFrameCaptured(byte[] frame, int length, int width, int height,
|
public void onByteBufferFrameCaptured(byte[] frame, int width, int height, int rotation,
|
||||||
int rotation, long timeStamp) {
|
long timeStamp) {
|
||||||
synchronized (frameLock) {
|
synchronized (frameLock) {
|
||||||
++framesCaptured;
|
++framesCaptured;
|
||||||
frameSize = length;
|
frameSize = frame.length;
|
||||||
frameWidth = width;
|
frameWidth = width;
|
||||||
frameHeight = height;
|
frameHeight = height;
|
||||||
timestamps.add(timeStamp);
|
timestamps.add(timeStamp);
|
||||||
@ -311,8 +311,8 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
assertTrue(observer.WaitForCapturerToStart());
|
assertTrue(observer.WaitForCapturerToStart());
|
||||||
observer.WaitForNextCapturedFrame();
|
observer.WaitForNextCapturedFrame();
|
||||||
capturer.stopCapture();
|
capturer.stopCapture();
|
||||||
for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
|
if (capturer.isCapturingToTexture()) {
|
||||||
capturer.returnBuffer(timeStamp);
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
capturer.dispose();
|
capturer.dispose();
|
||||||
|
|
||||||
@ -332,9 +332,10 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
// Make sure camera is started and then stop it.
|
// Make sure camera is started and then stop it.
|
||||||
assertTrue(observer.WaitForCapturerToStart());
|
assertTrue(observer.WaitForCapturerToStart());
|
||||||
capturer.stopCapture();
|
capturer.stopCapture();
|
||||||
for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
|
if (capturer.isCapturingToTexture()) {
|
||||||
capturer.returnBuffer(timeStamp);
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
|
|
||||||
// We can't change |capturer| at this point, but we should not crash.
|
// We can't change |capturer| at this point, but we should not crash.
|
||||||
capturer.switchCamera(null);
|
capturer.switchCamera(null);
|
||||||
capturer.onOutputFormatRequest(640, 480, 15);
|
capturer.onOutputFormatRequest(640, 480, 15);
|
||||||
@ -393,11 +394,11 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
if (capturer.isCapturingToTexture()) {
|
if (capturer.isCapturingToTexture()) {
|
||||||
assertEquals(0, observer.frameSize());
|
assertEquals(0, observer.frameSize());
|
||||||
} else {
|
} else {
|
||||||
assertEquals(format.frameSize(), observer.frameSize());
|
assertTrue(format.frameSize() <= observer.frameSize());
|
||||||
}
|
}
|
||||||
capturer.stopCapture();
|
capturer.stopCapture();
|
||||||
for (long timestamp : observer.getCopyAndResetListOftimeStamps()) {
|
if (capturer.isCapturingToTexture()) {
|
||||||
capturer.returnBuffer(timestamp);
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
capturer.dispose();
|
capturer.dispose();
|
||||||
@ -455,8 +456,8 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
assertTrue(observer.WaitForCapturerToStart());
|
assertTrue(observer.WaitForCapturerToStart());
|
||||||
observer.WaitForNextCapturedFrame();
|
observer.WaitForNextCapturedFrame();
|
||||||
capturer.stopCapture();
|
capturer.stopCapture();
|
||||||
for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
|
if (capturer.isCapturingToTexture()) {
|
||||||
capturer.returnBuffer(timeStamp);
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
capturer.dispose();
|
capturer.dispose();
|
||||||
assertTrue(capturer.isReleased());
|
assertTrue(capturer.isReleased());
|
||||||
@ -496,9 +497,8 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
capturer.startCapture(format.width, format.height, format.maxFramerate,
|
||||||
appContext, observer);
|
appContext, observer);
|
||||||
observer.WaitForCapturerToStart();
|
observer.WaitForCapturerToStart();
|
||||||
|
if (capturer.isCapturingToTexture()) {
|
||||||
for (Long timeStamp : listOftimestamps) {
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
capturer.returnBuffer(timeStamp);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
observer.WaitForNextCapturedFrame();
|
observer.WaitForNextCapturedFrame();
|
||||||
@ -506,9 +506,10 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
|
|
||||||
listOftimestamps = observer.getCopyAndResetListOftimeStamps();
|
listOftimestamps = observer.getCopyAndResetListOftimeStamps();
|
||||||
assertTrue(listOftimestamps.size() >= 1);
|
assertTrue(listOftimestamps.size() >= 1);
|
||||||
for (Long timeStamp : listOftimestamps) {
|
if (capturer.isCapturingToTexture()) {
|
||||||
capturer.returnBuffer(timeStamp);
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
|
|
||||||
capturer.dispose();
|
capturer.dispose();
|
||||||
assertTrue(capturer.isReleased());
|
assertTrue(capturer.isReleased());
|
||||||
}
|
}
|
||||||
@ -519,6 +520,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints());
|
final VideoSource source = factory.createVideoSource(capturer, new MediaConstraints());
|
||||||
final VideoTrack track = factory.createVideoTrack("dummy", source);
|
final VideoTrack track = factory.createVideoTrack("dummy", source);
|
||||||
final FakeAsyncRenderer renderer = new FakeAsyncRenderer();
|
final FakeAsyncRenderer renderer = new FakeAsyncRenderer();
|
||||||
|
|
||||||
track.addRenderer(new VideoRenderer(renderer));
|
track.addRenderer(new VideoRenderer(renderer));
|
||||||
// Wait for at least one frame that has not been returned.
|
// Wait for at least one frame that has not been returned.
|
||||||
assertFalse(renderer.waitForPendingFrames().isEmpty());
|
assertFalse(renderer.waitForPendingFrames().isEmpty());
|
||||||
@ -529,9 +531,7 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
track.dispose();
|
track.dispose();
|
||||||
source.dispose();
|
source.dispose();
|
||||||
factory.dispose();
|
factory.dispose();
|
||||||
|
assertTrue(capturer.isReleased());
|
||||||
// The pending frames should keep the JNI parts and |capturer| alive.
|
|
||||||
assertFalse(capturer.isReleased());
|
|
||||||
|
|
||||||
// Return the frame(s), on a different thread out of spite.
|
// Return the frame(s), on a different thread out of spite.
|
||||||
final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
|
final List<I420Frame> pendingFrames = renderer.waitForPendingFrames();
|
||||||
@ -545,13 +545,13 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
});
|
});
|
||||||
returnThread.start();
|
returnThread.start();
|
||||||
returnThread.join();
|
returnThread.join();
|
||||||
|
|
||||||
// Check that frames have successfully returned. This will cause |capturer| to be released.
|
|
||||||
assertTrue(capturer.isReleased());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static public void cameraFreezedEventOnBufferStarvation(VideoCapturerAndroid capturer,
|
static public void cameraFreezedEventOnBufferStarvationUsingTextures(
|
||||||
|
VideoCapturerAndroid capturer,
|
||||||
CameraEvents events, Context appContext) throws InterruptedException {
|
CameraEvents events, Context appContext) throws InterruptedException {
|
||||||
|
assertTrue("Not capturing to textures.", capturer.isCapturingToTexture());
|
||||||
|
|
||||||
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
final List<CaptureFormat> formats = capturer.getSupportedFormats();
|
||||||
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
final CameraEnumerationAndroid.CaptureFormat format = formats.get(0);
|
||||||
|
|
||||||
@ -560,14 +560,16 @@ public class VideoCapturerAndroidTestFixtures {
|
|||||||
appContext, observer);
|
appContext, observer);
|
||||||
// Make sure camera is started.
|
// Make sure camera is started.
|
||||||
assertTrue(observer.WaitForCapturerToStart());
|
assertTrue(observer.WaitForCapturerToStart());
|
||||||
// Since we don't call returnBuffer, we should get a starvation message.
|
// Since we don't return the buffer, we should get a starvation message if we are
|
||||||
|
// capturing to a texture.
|
||||||
assertEquals("Camera failure. Client must return video buffers.",
|
assertEquals("Camera failure. Client must return video buffers.",
|
||||||
events.WaitForCameraFreezed());
|
events.WaitForCameraFreezed());
|
||||||
|
|
||||||
capturer.stopCapture();
|
capturer.stopCapture();
|
||||||
for (long timeStamp : observer.getCopyAndResetListOftimeStamps()) {
|
if (capturer.isCapturingToTexture()) {
|
||||||
capturer.returnBuffer(timeStamp);
|
capturer.surfaceHelper.returnTextureFrame();
|
||||||
}
|
}
|
||||||
|
|
||||||
capturer.dispose();
|
capturer.dispose();
|
||||||
assertTrue(capturer.isReleased());
|
assertTrue(capturer.isReleased());
|
||||||
}
|
}
|
||||||
|
|||||||
@ -71,7 +71,7 @@ public class CameraEnumerationAndroid {
|
|||||||
// other image formats then this needs to be updated and
|
// other image formats then this needs to be updated and
|
||||||
// VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
|
// VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
|
||||||
// all imageFormats.
|
// all imageFormats.
|
||||||
public final int imageFormat = ImageFormat.YV12;
|
public final int imageFormat = ImageFormat.NV21;
|
||||||
|
|
||||||
public CaptureFormat(int width, int height, int minFramerate,
|
public CaptureFormat(int width, int height, int minFramerate,
|
||||||
int maxFramerate) {
|
int maxFramerate) {
|
||||||
@ -87,25 +87,15 @@ public class CameraEnumerationAndroid {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Calculates the frame size of the specified image format. Currently only
|
// Calculates the frame size of the specified image format. Currently only
|
||||||
// supporting ImageFormat.YV12. The YV12's stride is the closest rounded up
|
// supporting ImageFormat.NV21.
|
||||||
// multiple of 16 of the width and width and height are always even.
|
// The size is width * height * number of bytes per pixel.
|
||||||
// Android guarantees this:
|
// http://developer.android.com/reference/android/hardware/Camera.html#addCallbackBuffer(byte[])
|
||||||
// http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
|
|
||||||
public static int frameSize(int width, int height, int imageFormat) {
|
public static int frameSize(int width, int height, int imageFormat) {
|
||||||
if (imageFormat != ImageFormat.YV12) {
|
if (imageFormat != ImageFormat.NV21) {
|
||||||
throw new UnsupportedOperationException("Don't know how to calculate "
|
throw new UnsupportedOperationException("Don't know how to calculate "
|
||||||
+ "the frame size of non-YV12 image formats.");
|
+ "the frame size of non-NV21 image formats.");
|
||||||
}
|
}
|
||||||
int yStride = roundUp(width, 16);
|
return (width * height * ImageFormat.getBitsPerPixel(imageFormat)) / 8;
|
||||||
int uvStride = roundUp(yStride / 2, 16);
|
|
||||||
int ySize = yStride * height;
|
|
||||||
int uvSize = uvStride * height / 2;
|
|
||||||
return ySize + uvSize * 2;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rounds up |x| to the closest value that is a multiple of |alignment|.
|
|
||||||
private static int roundUp(int x, int alignment) {
|
|
||||||
return (int)ceil(x / (double)alignment) * alignment;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@ -317,7 +317,7 @@ class SurfaceTextureHelper {
|
|||||||
private OnTextureFrameAvailableListener listener;
|
private OnTextureFrameAvailableListener listener;
|
||||||
// The possible states of this class.
|
// The possible states of this class.
|
||||||
private boolean hasPendingTexture = false;
|
private boolean hasPendingTexture = false;
|
||||||
private boolean isTextureInUse = false;
|
private volatile boolean isTextureInUse = false;
|
||||||
private boolean isQuitting = false;
|
private boolean isQuitting = false;
|
||||||
|
|
||||||
private SurfaceTextureHelper(EglBase.Context sharedContext,
|
private SurfaceTextureHelper(EglBase.Context sharedContext,
|
||||||
@ -392,6 +392,10 @@ class SurfaceTextureHelper {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean isTextureInUse() {
|
||||||
|
return isTextureInUse;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Call disconnect() to stop receiving frames. Resources are released when the texture frame has
|
* Call disconnect() to stop receiving frames. Resources are released when the texture frame has
|
||||||
* been returned by a call to returnTextureFrame(). You are guaranteed to not receive any more
|
* been returned by a call to returnTextureFrame(). You are guaranteed to not receive any more
|
||||||
|
|||||||
@ -77,7 +77,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
private final Object cameraIdLock = new Object();
|
private final Object cameraIdLock = new Object();
|
||||||
private int id;
|
private int id;
|
||||||
private android.hardware.Camera.CameraInfo info;
|
private android.hardware.Camera.CameraInfo info;
|
||||||
private final FramePool videoBuffers;
|
|
||||||
private final CameraStatistics cameraStatistics;
|
private final CameraStatistics cameraStatistics;
|
||||||
// Remember the requested format in case we want to switch cameras.
|
// Remember the requested format in case we want to switch cameras.
|
||||||
private int requestedWidth;
|
private int requestedWidth;
|
||||||
@ -90,8 +89,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
private CapturerObserver frameObserver = null;
|
private CapturerObserver frameObserver = null;
|
||||||
private final CameraEventsHandler eventsHandler;
|
private final CameraEventsHandler eventsHandler;
|
||||||
private boolean firstFrameReported;
|
private boolean firstFrameReported;
|
||||||
|
// Arbitrary queue depth. Higher number means more memory allocated & held,
|
||||||
|
// lower number means more sensitivity to processing time in the client (and
|
||||||
|
// potentially stalling the capturer if it runs out of buffers to write to).
|
||||||
|
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
|
||||||
|
private final Set<byte[]> queuedBuffers = new HashSet<byte[]>();
|
||||||
private final boolean isCapturingToTexture;
|
private final boolean isCapturingToTexture;
|
||||||
private final SurfaceTextureHelper surfaceHelper;
|
final SurfaceTextureHelper surfaceHelper; // Package visible for testing purposes.
|
||||||
// The camera API can output one old frame after the camera has been switched or the resolution
|
// The camera API can output one old frame after the camera has been switched or the resolution
|
||||||
// has been changed. This flag is used for dropping the first frame after camera restart.
|
// has been changed. This flag is used for dropping the first frame after camera restart.
|
||||||
private boolean dropNextFrame = false;
|
private boolean dropNextFrame = false;
|
||||||
@ -129,14 +133,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
|
int cameraFps = (cameraFramesCount * 1000 + CAMERA_OBSERVER_PERIOD_MS / 2)
|
||||||
/ CAMERA_OBSERVER_PERIOD_MS;
|
/ CAMERA_OBSERVER_PERIOD_MS;
|
||||||
|
|
||||||
Logging.d(TAG, "Camera fps: " + cameraFps +
|
Logging.d(TAG, "Camera fps: " + cameraFps +".");
|
||||||
". Pending buffers: " + cameraStatistics.pendingFramesTimeStamps());
|
|
||||||
if (cameraFramesCount == 0) {
|
if (cameraFramesCount == 0) {
|
||||||
++freezePeriodCount;
|
++freezePeriodCount;
|
||||||
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS
|
if (CAMERA_OBSERVER_PERIOD_MS * freezePeriodCount > CAMERA_FREEZE_REPORT_TIMOUT_MS
|
||||||
&& eventsHandler != null) {
|
&& eventsHandler != null) {
|
||||||
Logging.e(TAG, "Camera freezed.");
|
Logging.e(TAG, "Camera freezed.");
|
||||||
if (cameraStatistics.pendingFramesCount() == cameraStatistics.maxPendingFrames) {
|
if (surfaceHelper.isTextureInUse()) {
|
||||||
|
// This can only happen if we are capturing to textures.
|
||||||
eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
|
eventsHandler.onCameraFreezed("Camera failure. Client must return video buffers.");
|
||||||
} else {
|
} else {
|
||||||
eventsHandler.onCameraFreezed("Camera failure.");
|
eventsHandler.onCameraFreezed("Camera failure.");
|
||||||
@ -153,27 +157,14 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
private static class CameraStatistics {
|
private static class CameraStatistics {
|
||||||
private int frameCount = 0;
|
private int frameCount = 0;
|
||||||
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
|
||||||
private final Set<Long> timeStampsNs = new HashSet<Long>();
|
|
||||||
public final int maxPendingFrames;
|
|
||||||
|
|
||||||
CameraStatistics(int maxPendingFrames) {
|
CameraStatistics() {
|
||||||
this.maxPendingFrames = maxPendingFrames;
|
|
||||||
threadChecker.detachThread();
|
threadChecker.detachThread();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void addPendingFrame(long timestamp) {
|
public void addFrame() {
|
||||||
threadChecker.checkIsOnValidThread();
|
threadChecker.checkIsOnValidThread();
|
||||||
++frameCount;
|
++frameCount;
|
||||||
timeStampsNs.add(timestamp);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void frameReturned(long timestamp) {
|
|
||||||
threadChecker.checkIsOnValidThread();
|
|
||||||
if (!timeStampsNs.contains(timestamp)) {
|
|
||||||
throw new IllegalStateException(
|
|
||||||
"CameraStatistics.frameReturned called with unknown timestamp " + timestamp);
|
|
||||||
}
|
|
||||||
timeStampsNs.remove(timestamp);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public int getAndResetFrameCount() {
|
public int getAndResetFrameCount() {
|
||||||
@ -182,21 +173,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
frameCount = 0;
|
frameCount = 0;
|
||||||
return count;
|
return count;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return number of pending frames that have not been returned.
|
|
||||||
public int pendingFramesCount() {
|
|
||||||
threadChecker.checkIsOnValidThread();
|
|
||||||
return timeStampsNs.size();
|
|
||||||
}
|
|
||||||
|
|
||||||
public String pendingFramesTimeStamps() {
|
|
||||||
threadChecker.checkIsOnValidThread();
|
|
||||||
List<Long> timeStampsMs = new ArrayList<Long>();
|
|
||||||
for (long ts : timeStampsNs) {
|
|
||||||
timeStampsMs.add(TimeUnit.NANOSECONDS.toMillis(ts));
|
|
||||||
}
|
|
||||||
return timeStampsMs.toString();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static interface CameraEventsHandler {
|
public static interface CameraEventsHandler {
|
||||||
@ -350,20 +326,18 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
|
|
||||||
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
|
private VideoCapturerAndroid(int cameraId, CameraEventsHandler eventsHandler,
|
||||||
EglBase.Context sharedContext) {
|
EglBase.Context sharedContext) {
|
||||||
Logging.d(TAG, "VideoCapturerAndroid");
|
|
||||||
this.id = cameraId;
|
this.id = cameraId;
|
||||||
this.eventsHandler = eventsHandler;
|
this.eventsHandler = eventsHandler;
|
||||||
cameraThread = new HandlerThread(TAG);
|
cameraThread = new HandlerThread(TAG);
|
||||||
cameraThread.start();
|
cameraThread.start();
|
||||||
cameraThreadHandler = new Handler(cameraThread.getLooper());
|
cameraThreadHandler = new Handler(cameraThread.getLooper());
|
||||||
videoBuffers = new FramePool(cameraThread);
|
|
||||||
isCapturingToTexture = (sharedContext != null);
|
isCapturingToTexture = (sharedContext != null);
|
||||||
cameraStatistics =
|
cameraStatistics = new CameraStatistics();
|
||||||
new CameraStatistics(isCapturingToTexture ? 1 : FramePool.NUMBER_OF_CAPTURE_BUFFERS);
|
|
||||||
surfaceHelper = SurfaceTextureHelper.create(sharedContext, cameraThreadHandler);
|
surfaceHelper = SurfaceTextureHelper.create(sharedContext, cameraThreadHandler);
|
||||||
if (isCapturingToTexture) {
|
if (isCapturingToTexture) {
|
||||||
surfaceHelper.setListener(this);
|
surfaceHelper.setListener(this);
|
||||||
}
|
}
|
||||||
|
Logging.d(TAG, "VideoCapturerAndroid isCapturingToTexture : " + isCapturingToTexture);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkIsOnCameraThread() {
|
private void checkIsOnCameraThread() {
|
||||||
@ -403,9 +377,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
if (camera != null) {
|
if (camera != null) {
|
||||||
throw new IllegalStateException("Release called while camera is running");
|
throw new IllegalStateException("Release called while camera is running");
|
||||||
}
|
}
|
||||||
if (cameraStatistics.pendingFramesCount() != 0) {
|
|
||||||
throw new IllegalStateException("Release called with pending frames left");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
surfaceHelper.disconnect(cameraThreadHandler);
|
surfaceHelper.disconnect(cameraThreadHandler);
|
||||||
@ -582,7 +553,13 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
|
|
||||||
camera.setParameters(parameters);
|
camera.setParameters(parameters);
|
||||||
if (!isCapturingToTexture) {
|
if (!isCapturingToTexture) {
|
||||||
videoBuffers.queueCameraBuffers(captureFormat.frameSize(), camera);
|
queuedBuffers.clear();
|
||||||
|
final int frameSize = captureFormat.frameSize();
|
||||||
|
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
||||||
|
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
|
||||||
|
queuedBuffers.add(buffer.array());
|
||||||
|
camera.addCallbackBuffer(buffer.array());
|
||||||
|
}
|
||||||
camera.setPreviewCallbackWithBuffer(this);
|
camera.setPreviewCallbackWithBuffer(this);
|
||||||
}
|
}
|
||||||
camera.startPreview();
|
camera.startPreview();
|
||||||
@ -619,13 +596,7 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
Logging.d(TAG, "Stop preview.");
|
Logging.d(TAG, "Stop preview.");
|
||||||
camera.stopPreview();
|
camera.stopPreview();
|
||||||
camera.setPreviewCallbackWithBuffer(null);
|
camera.setPreviewCallbackWithBuffer(null);
|
||||||
if (!isCapturingToTexture()) {
|
queuedBuffers.clear();
|
||||||
videoBuffers.stopReturnBuffersToCamera();
|
|
||||||
Logging.d(TAG, "stopReturnBuffersToCamera called."
|
|
||||||
+ (cameraStatistics.pendingFramesCount() == 0?
|
|
||||||
" All buffers have been returned."
|
|
||||||
: " Pending buffers: " + cameraStatistics.pendingFramesTimeStamps() + "."));
|
|
||||||
}
|
|
||||||
captureFormat = null;
|
captureFormat = null;
|
||||||
|
|
||||||
Logging.d(TAG, "Release camera.");
|
Logging.d(TAG, "Release camera.");
|
||||||
@ -665,19 +636,6 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
return cameraThreadHandler;
|
return cameraThreadHandler;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void returnBuffer(final long timeStamp) {
|
|
||||||
cameraThreadHandler.post(new Runnable() {
|
|
||||||
@Override public void run() {
|
|
||||||
cameraStatistics.frameReturned(timeStamp);
|
|
||||||
if (isCapturingToTexture) {
|
|
||||||
surfaceHelper.returnTextureFrame();
|
|
||||||
} else {
|
|
||||||
videoBuffers.returnBuffer(timeStamp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
private int getDeviceOrientation() {
|
private int getDeviceOrientation() {
|
||||||
int orientation = 0;
|
int orientation = 0;
|
||||||
|
|
||||||
@ -713,7 +671,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
@Override
|
@Override
|
||||||
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
|
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
|
||||||
checkIsOnCameraThread();
|
checkIsOnCameraThread();
|
||||||
if (camera == null) {
|
if (camera == null || !queuedBuffers.contains(data)) {
|
||||||
|
// The camera has been stopped or |data| is an old invalid buffer.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (camera != callbackCamera) {
|
if (camera != callbackCamera) {
|
||||||
@ -728,16 +687,10 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
firstFrameReported = true;
|
firstFrameReported = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Mark the frame owning |data| as used.
|
cameraStatistics.addFrame();
|
||||||
// Note that since data is directBuffer,
|
frameObserver.onByteBufferFrameCaptured(data, captureFormat.width, captureFormat.height,
|
||||||
// data.length >= videoBuffers.frameSize.
|
getFrameOrientation(), captureTimeNs);
|
||||||
if (videoBuffers.reserveByteBuffer(data, captureTimeNs)) {
|
camera.addCallbackBuffer(data);
|
||||||
cameraStatistics.addPendingFrame(captureTimeNs);
|
|
||||||
frameObserver.onByteBufferFrameCaptured(data, videoBuffers.frameSize, captureFormat.width,
|
|
||||||
captureFormat.height, getFrameOrientation(), captureTimeNs);
|
|
||||||
} else {
|
|
||||||
Logging.w(TAG, "reserveByteBuffer failed - dropping frame.");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -762,121 +715,11 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
transformMatrix =
|
transformMatrix =
|
||||||
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
|
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||||
}
|
}
|
||||||
cameraStatistics.addPendingFrame(timestampNs);
|
cameraStatistics.addFrame();
|
||||||
|
|
||||||
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
|
frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
|
||||||
transformMatrix, rotation, timestampNs);
|
transformMatrix, rotation, timestampNs);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Class used for allocating and bookkeeping video frames. All buffers are
|
|
||||||
// direct allocated so that they can be directly used from native code. This class is
|
|
||||||
// not thread-safe, and enforces single thread use.
|
|
||||||
private static class FramePool {
|
|
||||||
// Thread that all calls should be made on.
|
|
||||||
private final Thread thread;
|
|
||||||
// Arbitrary queue depth. Higher number means more memory allocated & held,
|
|
||||||
// lower number means more sensitivity to processing time in the client (and
|
|
||||||
// potentially stalling the capturer if it runs out of buffers to write to).
|
|
||||||
public static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
|
|
||||||
// This container tracks the buffers added as camera callback buffers. It is needed for finding
|
|
||||||
// the corresponding ByteBuffer given a byte[].
|
|
||||||
private final Map<byte[], ByteBuffer> queuedBuffers = new IdentityHashMap<byte[], ByteBuffer>();
|
|
||||||
// This container tracks the frames that have been sent but not returned. It is needed for
|
|
||||||
// keeping the buffers alive and for finding the corresponding ByteBuffer given a timestamp.
|
|
||||||
private final Map<Long, ByteBuffer> pendingBuffers = new HashMap<Long, ByteBuffer>();
|
|
||||||
private int frameSize = 0;
|
|
||||||
private android.hardware.Camera camera;
|
|
||||||
|
|
||||||
public FramePool(Thread thread) {
|
|
||||||
this.thread = thread;
|
|
||||||
}
|
|
||||||
|
|
||||||
private void checkIsOnValidThread() {
|
|
||||||
if (Thread.currentThread() != thread) {
|
|
||||||
throw new IllegalStateException("Wrong thread");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Discards previous queued buffers and adds new callback buffers to camera.
|
|
||||||
public void queueCameraBuffers(int frameSize, android.hardware.Camera camera) {
|
|
||||||
checkIsOnValidThread();
|
|
||||||
this.camera = camera;
|
|
||||||
this.frameSize = frameSize;
|
|
||||||
|
|
||||||
queuedBuffers.clear();
|
|
||||||
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
|
||||||
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
|
|
||||||
camera.addCallbackBuffer(buffer.array());
|
|
||||||
queuedBuffers.put(buffer.array(), buffer);
|
|
||||||
}
|
|
||||||
Logging.d(TAG, "queueCameraBuffers enqueued " + NUMBER_OF_CAPTURE_BUFFERS
|
|
||||||
+ " buffers of size " + frameSize + ".");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void stopReturnBuffersToCamera() {
|
|
||||||
checkIsOnValidThread();
|
|
||||||
this.camera = null;
|
|
||||||
queuedBuffers.clear();
|
|
||||||
// Frames in |pendingBuffers| need to be kept alive until they are returned.
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean reserveByteBuffer(byte[] data, long timeStamp) {
|
|
||||||
checkIsOnValidThread();
|
|
||||||
final ByteBuffer buffer = queuedBuffers.remove(data);
|
|
||||||
if (buffer == null) {
|
|
||||||
// Frames might be posted to |onPreviewFrame| with the previous format while changing
|
|
||||||
// capture format in |startPreviewOnCameraThread|. Drop these old frames.
|
|
||||||
Logging.w(TAG, "Received callback buffer from previous configuration with length: "
|
|
||||||
+ (data == null ? "null" : data.length));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (buffer.capacity() != frameSize) {
|
|
||||||
throw new IllegalStateException("Callback buffer has unexpected frame size");
|
|
||||||
}
|
|
||||||
if (pendingBuffers.containsKey(timeStamp)) {
|
|
||||||
Logging.e(TAG, "Timestamp already present in pending buffers - they need to be unique");
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
pendingBuffers.put(timeStamp, buffer);
|
|
||||||
if (queuedBuffers.isEmpty()) {
|
|
||||||
Logging.d(TAG, "Camera is running out of capture buffers.");
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void returnBuffer(long timeStamp) {
|
|
||||||
checkIsOnValidThread();
|
|
||||||
final ByteBuffer returnedFrame = pendingBuffers.remove(timeStamp);
|
|
||||||
if (returnedFrame == null) {
|
|
||||||
throw new RuntimeException("unknown data buffer with time stamp "
|
|
||||||
+ timeStamp + "returned?!?");
|
|
||||||
}
|
|
||||||
|
|
||||||
if (camera != null && returnedFrame.capacity() == frameSize) {
|
|
||||||
camera.addCallbackBuffer(returnedFrame.array());
|
|
||||||
if (queuedBuffers.isEmpty()) {
|
|
||||||
Logging.d(TAG, "Frame returned when camera is running out of capture"
|
|
||||||
+ " buffers for TS " + TimeUnit.NANOSECONDS.toMillis(timeStamp));
|
|
||||||
}
|
|
||||||
queuedBuffers.put(returnedFrame.array(), returnedFrame);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (returnedFrame.capacity() != frameSize) {
|
|
||||||
Logging.d(TAG, "returnBuffer with time stamp "
|
|
||||||
+ TimeUnit.NANOSECONDS.toMillis(timeStamp)
|
|
||||||
+ " called with old frame size, " + returnedFrame.capacity() + ".");
|
|
||||||
// Since this frame has the wrong size, don't requeue it. Frames with the correct size are
|
|
||||||
// created in queueCameraBuffers so this must be an old buffer.
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
Logging.d(TAG, "returnBuffer with time stamp "
|
|
||||||
+ TimeUnit.NANOSECONDS.toMillis(timeStamp)
|
|
||||||
+ " called after camera has been stopped.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Interface used for providing callbacks to an observer.
|
// Interface used for providing callbacks to an observer.
|
||||||
interface CapturerObserver {
|
interface CapturerObserver {
|
||||||
// Notify if the camera have been started successfully or not.
|
// Notify if the camera have been started successfully or not.
|
||||||
@ -885,8 +728,8 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
|
|
||||||
// Delivers a captured frame. Called on a Java thread owned by
|
// Delivers a captured frame. Called on a Java thread owned by
|
||||||
// VideoCapturerAndroid.
|
// VideoCapturerAndroid.
|
||||||
abstract void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
|
abstract void onByteBufferFrameCaptured(byte[] data, int width, int height, int rotation,
|
||||||
int rotation, long timeStamp);
|
long timeStamp);
|
||||||
|
|
||||||
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
|
// Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
|
||||||
// owned by VideoCapturerAndroid.
|
// owned by VideoCapturerAndroid.
|
||||||
@ -915,9 +758,9 @@ public class VideoCapturerAndroid extends VideoCapturer implements
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onByteBufferFrameCaptured(byte[] data, int length, int width, int height,
|
public void onByteBufferFrameCaptured(byte[] data, int width, int height,
|
||||||
int rotation, long timeStamp) {
|
int rotation, long timeStamp) {
|
||||||
nativeOnByteBufferFrameCaptured(nativeCapturer, data, length, width, height, rotation,
|
nativeOnByteBufferFrameCaptured(nativeCapturer, data, data.length, width, height, rotation,
|
||||||
timeStamp);
|
timeStamp);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -318,9 +318,19 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
|||||||
frames_received_ = 0;
|
frames_received_ = 0;
|
||||||
frames_decoded_ = 0;
|
frames_decoded_ = 0;
|
||||||
|
|
||||||
|
jobject java_surface_texture_helper_ = nullptr;
|
||||||
if (use_surface_) {
|
if (use_surface_) {
|
||||||
|
java_surface_texture_helper_ = jni->CallStaticObjectMethod(
|
||||||
|
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||||
|
GetStaticMethodID(jni,
|
||||||
|
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||||
|
"create",
|
||||||
|
"(Lorg/webrtc/EglBase$Context;)"
|
||||||
|
"Lorg/webrtc/SurfaceTextureHelper;"),
|
||||||
|
render_egl_context_);
|
||||||
|
RTC_CHECK(java_surface_texture_helper_ != nullptr);
|
||||||
surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
|
surface_texture_helper_ = new rtc::RefCountedObject<SurfaceTextureHelper>(
|
||||||
jni, render_egl_context_);
|
jni, java_surface_texture_helper_);
|
||||||
}
|
}
|
||||||
|
|
||||||
jobject j_video_codec_enum = JavaEnumFromIndex(
|
jobject j_video_codec_enum = JavaEnumFromIndex(
|
||||||
@ -331,8 +341,7 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
|||||||
j_video_codec_enum,
|
j_video_codec_enum,
|
||||||
codec_.width,
|
codec_.width,
|
||||||
codec_.height,
|
codec_.height,
|
||||||
use_surface_ ? surface_texture_helper_->GetJavaSurfaceTextureHelper()
|
java_surface_texture_helper_);
|
||||||
: nullptr);
|
|
||||||
if (CheckException(jni) || !success) {
|
if (CheckException(jni) || !success) {
|
||||||
ALOGE << "Codec initialization error - fallback to SW codec.";
|
ALOGE << "Codec initialization error - fallback to SW codec.";
|
||||||
sw_fallback_required_ = true;
|
sw_fallback_required_ = true;
|
||||||
|
|||||||
@ -29,8 +29,9 @@
|
|||||||
#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
|
#include "talk/app/webrtc/java/jni/androidvideocapturer_jni.h"
|
||||||
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
||||||
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
|
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
|
||||||
|
#include "talk/app/webrtc/java/jni/surfacetexturehelper_jni.h"
|
||||||
|
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||||
#include "webrtc/base/bind.h"
|
#include "webrtc/base/bind.h"
|
||||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
|
||||||
|
|
||||||
namespace webrtc_jni {
|
namespace webrtc_jni {
|
||||||
|
|
||||||
@ -52,13 +53,14 @@ AndroidVideoCapturerJni::AndroidVideoCapturerJni(
|
|||||||
jobject j_video_capturer,
|
jobject j_video_capturer,
|
||||||
jobject j_surface_texture_helper)
|
jobject j_surface_texture_helper)
|
||||||
: j_video_capturer_(jni, j_video_capturer),
|
: j_video_capturer_(jni, j_video_capturer),
|
||||||
j_surface_texture_helper_(jni, j_surface_texture_helper),
|
|
||||||
j_video_capturer_class_(
|
j_video_capturer_class_(
|
||||||
jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
|
jni, FindClass(jni, "org/webrtc/VideoCapturerAndroid")),
|
||||||
j_observer_class_(
|
j_observer_class_(
|
||||||
jni,
|
jni,
|
||||||
FindClass(jni,
|
FindClass(jni,
|
||||||
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
|
"org/webrtc/VideoCapturerAndroid$NativeObserver")),
|
||||||
|
surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
|
||||||
|
jni, j_surface_texture_helper)),
|
||||||
capturer_(nullptr) {
|
capturer_(nullptr) {
|
||||||
LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
|
LOG(LS_INFO) << "AndroidVideoCapturerJni ctor";
|
||||||
thread_checker_.DetachFromThread();
|
thread_checker_.DetachFromThread();
|
||||||
@ -130,13 +132,6 @@ void AndroidVideoCapturerJni::AsyncCapturerInvoke(
|
|||||||
invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
|
invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
|
||||||
}
|
}
|
||||||
|
|
||||||
void AndroidVideoCapturerJni::ReturnBuffer(int64_t time_stamp) {
|
|
||||||
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
|
|
||||||
"returnBuffer", "(J)V");
|
|
||||||
jni()->CallVoidMethod(*j_video_capturer_, m, time_stamp);
|
|
||||||
CHECK_EXCEPTION(jni()) << "error during VideoCapturerAndroid.returnBuffer";
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string AndroidVideoCapturerJni::GetSupportedFormats() {
|
std::string AndroidVideoCapturerJni::GetSupportedFormats() {
|
||||||
jmethodID m =
|
jmethodID m =
|
||||||
GetMethodID(jni(), *j_video_capturer_class_,
|
GetMethodID(jni(), *j_video_capturer_class_,
|
||||||
@ -161,23 +156,17 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
|
|||||||
int rotation,
|
int rotation,
|
||||||
int64_t timestamp_ns) {
|
int64_t timestamp_ns) {
|
||||||
const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
|
const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
|
||||||
// Android guarantees that the stride is a multiple of 16.
|
const uint8_t* vu_plane = y_plane + width * height;
|
||||||
// http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
|
|
||||||
int y_stride;
|
|
||||||
int uv_stride;
|
|
||||||
webrtc::Calc16ByteAlignedStride(width, &y_stride, &uv_stride);
|
|
||||||
const uint8_t* v_plane = y_plane + y_stride * height;
|
|
||||||
const uint8_t* u_plane =
|
|
||||||
v_plane + uv_stride * webrtc::AlignInt(height, 2) / 2;
|
|
||||||
|
|
||||||
// Wrap the Java buffer, and call ReturnBuffer() in the wrapped
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
|
||||||
// VideoFrameBuffer destructor.
|
buffer_pool_.CreateBuffer(width, height);
|
||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
|
libyuv::NV21ToI420(
|
||||||
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
|
y_plane, width,
|
||||||
width, height, y_plane, y_stride, u_plane, uv_stride, v_plane,
|
vu_plane, width,
|
||||||
uv_stride,
|
buffer->MutableData(webrtc::kYPlane), buffer->stride(webrtc::kYPlane),
|
||||||
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
|
buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane),
|
||||||
timestamp_ns)));
|
buffer->MutableData(webrtc::kVPlane), buffer->stride(webrtc::kVPlane),
|
||||||
|
width, height);
|
||||||
AsyncCapturerInvoke("OnIncomingFrame",
|
AsyncCapturerInvoke("OnIncomingFrame",
|
||||||
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
|
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
|
||||||
buffer, rotation, timestamp_ns);
|
buffer, rotation, timestamp_ns);
|
||||||
@ -189,10 +178,8 @@ void AndroidVideoCapturerJni::OnTextureFrame(int width,
|
|||||||
int64_t timestamp_ns,
|
int64_t timestamp_ns,
|
||||||
const NativeHandleImpl& handle) {
|
const NativeHandleImpl& handle) {
|
||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
|
||||||
new rtc::RefCountedObject<AndroidTextureBuffer>(
|
surface_texture_helper_->CreateTextureFrame(width, height, handle));
|
||||||
width, height, handle, *j_surface_texture_helper_,
|
|
||||||
rtc::Bind(&AndroidVideoCapturerJni::ReturnBuffer, this,
|
|
||||||
timestamp_ns)));
|
|
||||||
AsyncCapturerInvoke("OnIncomingFrame",
|
AsyncCapturerInvoke("OnIncomingFrame",
|
||||||
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
|
&webrtc::AndroidVideoCapturer::OnIncomingFrame,
|
||||||
buffer, rotation, timestamp_ns);
|
buffer, rotation, timestamp_ns);
|
||||||
@ -214,13 +201,6 @@ JOW(void,
|
|||||||
jint width, jint height, jint rotation, jlong timestamp) {
|
jint width, jint height, jint rotation, jlong timestamp) {
|
||||||
jboolean is_copy = true;
|
jboolean is_copy = true;
|
||||||
jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
|
jbyte* bytes = jni->GetByteArrayElements(j_frame, &is_copy);
|
||||||
// If this is a copy of the original frame, it means that the memory
|
|
||||||
// is not direct memory and thus VideoCapturerAndroid does not guarantee
|
|
||||||
// that the memory is valid when we have released |j_frame|.
|
|
||||||
// TODO(magjed): Move ReleaseByteArrayElements() into ReturnBuffer() and
|
|
||||||
// remove this check.
|
|
||||||
RTC_CHECK(!is_copy)
|
|
||||||
<< "NativeObserver_nativeOnFrameCaptured: frame is a copy";
|
|
||||||
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
|
reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
|
||||||
->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
|
->OnMemoryBufferFrame(bytes, length, width, height, rotation, timestamp);
|
||||||
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
|
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
|
||||||
|
|||||||
@ -36,10 +36,12 @@
|
|||||||
#include "webrtc/base/asyncinvoker.h"
|
#include "webrtc/base/asyncinvoker.h"
|
||||||
#include "webrtc/base/criticalsection.h"
|
#include "webrtc/base/criticalsection.h"
|
||||||
#include "webrtc/base/thread_checker.h"
|
#include "webrtc/base/thread_checker.h"
|
||||||
|
#include "webrtc/common_video/include/i420_buffer_pool.h"
|
||||||
|
|
||||||
namespace webrtc_jni {
|
namespace webrtc_jni {
|
||||||
|
|
||||||
class NativeHandleImpl;
|
class NativeHandleImpl;
|
||||||
|
class SurfaceTextureHelper;
|
||||||
|
|
||||||
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
|
// AndroidVideoCapturerJni implements AndroidVideoCapturerDelegate.
|
||||||
// The purpose of the delegate is to hide the JNI specifics from the C++ only
|
// The purpose of the delegate is to hide the JNI specifics from the C++ only
|
||||||
@ -70,7 +72,6 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
|
|||||||
~AndroidVideoCapturerJni();
|
~AndroidVideoCapturerJni();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void ReturnBuffer(int64_t time_stamp);
|
|
||||||
JNIEnv* jni();
|
JNIEnv* jni();
|
||||||
|
|
||||||
// To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
|
// To avoid deducing Args from the 3rd parameter of AsyncCapturerInvoke.
|
||||||
@ -88,10 +89,12 @@ class AndroidVideoCapturerJni : public webrtc::AndroidVideoCapturerDelegate {
|
|||||||
typename Identity<Args>::type... args);
|
typename Identity<Args>::type... args);
|
||||||
|
|
||||||
const ScopedGlobalRef<jobject> j_video_capturer_;
|
const ScopedGlobalRef<jobject> j_video_capturer_;
|
||||||
const ScopedGlobalRef<jobject> j_surface_texture_helper_;
|
|
||||||
const ScopedGlobalRef<jclass> j_video_capturer_class_;
|
const ScopedGlobalRef<jclass> j_video_capturer_class_;
|
||||||
const ScopedGlobalRef<jclass> j_observer_class_;
|
const ScopedGlobalRef<jclass> j_observer_class_;
|
||||||
|
|
||||||
|
// Used on the Java thread running the camera.
|
||||||
|
webrtc::I420BufferPool buffer_pool_;
|
||||||
|
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
|
||||||
rtc::ThreadChecker thread_checker_;
|
rtc::ThreadChecker thread_checker_;
|
||||||
|
|
||||||
// |capturer| is a guaranteed to be a valid pointer between a call to
|
// |capturer| is a guaranteed to be a valid pointer between a call to
|
||||||
|
|||||||
@ -35,23 +35,12 @@
|
|||||||
|
|
||||||
namespace webrtc_jni {
|
namespace webrtc_jni {
|
||||||
|
|
||||||
SurfaceTextureHelper::SurfaceTextureHelper(JNIEnv* jni,
|
SurfaceTextureHelper::SurfaceTextureHelper(
|
||||||
jobject egl_shared_context)
|
JNIEnv* jni, jobject surface_texture_helper)
|
||||||
: j_surface_texture_helper_class_(
|
: j_surface_texture_helper_(jni, surface_texture_helper),
|
||||||
jni,
|
j_return_texture_method_(
|
||||||
FindClass(jni, "org/webrtc/SurfaceTextureHelper")),
|
GetMethodID(jni,
|
||||||
j_surface_texture_helper_(
|
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
|
||||||
jni,
|
|
||||||
jni->CallStaticObjectMethod(
|
|
||||||
*j_surface_texture_helper_class_,
|
|
||||||
GetStaticMethodID(jni,
|
|
||||||
*j_surface_texture_helper_class_,
|
|
||||||
"create",
|
|
||||||
"(Lorg/webrtc/EglBase$Context;)"
|
|
||||||
"Lorg/webrtc/SurfaceTextureHelper;"),
|
|
||||||
egl_shared_context)),
|
|
||||||
j_return_texture_method_(GetMethodID(jni,
|
|
||||||
*j_surface_texture_helper_class_,
|
|
||||||
"returnTextureFrame",
|
"returnTextureFrame",
|
||||||
"()V")) {
|
"()V")) {
|
||||||
CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
|
CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
|
||||||
|
|||||||
@ -49,19 +49,14 @@ namespace webrtc_jni {
|
|||||||
// destroyed while a VideoFrameBuffer is in use.
|
// destroyed while a VideoFrameBuffer is in use.
|
||||||
// This class is the C++ counterpart of the java class SurfaceTextureHelper.
|
// This class is the C++ counterpart of the java class SurfaceTextureHelper.
|
||||||
// Usage:
|
// Usage:
|
||||||
// 1. Create an instance of this class.
|
// 1. Create an java instance of SurfaceTextureHelper.
|
||||||
// 2. Call GetJavaSurfaceTextureHelper to get the Java SurfaceTextureHelper.
|
// 2. Create an instance of this class.
|
||||||
// 3. Register a listener to the Java SurfaceListener and start producing
|
// 3. Register a listener to the Java SurfaceListener and start producing
|
||||||
// new buffers.
|
// new buffers.
|
||||||
// 3. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
|
// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
|
||||||
class SurfaceTextureHelper : public rtc::RefCountInterface {
|
class SurfaceTextureHelper : public rtc::RefCountInterface {
|
||||||
public:
|
public:
|
||||||
SurfaceTextureHelper(JNIEnv* jni, jobject shared_egl_context);
|
SurfaceTextureHelper(JNIEnv* jni, jobject surface_texture_helper);
|
||||||
|
|
||||||
// Returns the Java SurfaceTextureHelper.
|
|
||||||
jobject GetJavaSurfaceTextureHelper() const {
|
|
||||||
return *j_surface_texture_helper_;
|
|
||||||
}
|
|
||||||
|
|
||||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
|
rtc::scoped_refptr<webrtc::VideoFrameBuffer> CreateTextureFrame(
|
||||||
int width,
|
int width,
|
||||||
@ -75,7 +70,6 @@ class SurfaceTextureHelper : public rtc::RefCountInterface {
|
|||||||
// May be called on arbitrary thread.
|
// May be called on arbitrary thread.
|
||||||
void ReturnTextureFrame() const;
|
void ReturnTextureFrame() const;
|
||||||
|
|
||||||
const ScopedGlobalRef<jclass> j_surface_texture_helper_class_;
|
|
||||||
const ScopedGlobalRef<jobject> j_surface_texture_helper_;
|
const ScopedGlobalRef<jobject> j_surface_texture_helper_;
|
||||||
const jmethodID j_return_texture_method_;
|
const jmethodID j_return_texture_method_;
|
||||||
};
|
};
|
||||||
|
|||||||
Reference in New Issue
Block a user