Add a field trial to produce VideoFrames in camera capturers.
The field trials enables producing new VideoFrames in camera classes. This field trial should be enabled if VideoSinks are used. BUG=webrtc:7749, webrtc:7760 Review-Url: https://codereview.webrtc.org/2984633002 Cr-Commit-Position: refs/heads/master@{#19467}
This commit is contained in:
@ -36,6 +36,8 @@ class Camera1Session implements CameraSession {
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final boolean videoFrameEmitTrialEnabled;
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final Events events;
|
||||
private final boolean captureToTexture;
|
||||
@ -85,7 +87,6 @@ class Camera1Session implements CameraSession {
|
||||
|
||||
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
|
||||
|
||||
// Initialize the capture buffers.
|
||||
if (!captureToTexture) {
|
||||
final int frameSize = captureFormat.frameSize();
|
||||
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
||||
@ -151,6 +152,9 @@ class Camera1Session implements CameraSession {
|
||||
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
|
||||
CaptureFormat captureFormat, long constructionTimeNs) {
|
||||
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
|
||||
videoFrameEmitTrialEnabled =
|
||||
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
|
||||
.equals(PeerConnectionFactory.TRIAL_ENABLED);
|
||||
|
||||
this.cameraThreadHandler = new Handler();
|
||||
this.events = events;
|
||||
@ -267,8 +271,17 @@ class Camera1Session implements CameraSession {
|
||||
transformMatrix = RendererCommon.multiplyMatrices(
|
||||
transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||
}
|
||||
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
if (videoFrameEmitTrialEnabled) {
|
||||
final VideoFrame.Buffer buffer =
|
||||
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
|
||||
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
|
||||
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
|
||||
events.onFrameCaptured(Camera1Session.this, frame);
|
||||
frame.release();
|
||||
} else {
|
||||
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -276,7 +289,7 @@ class Camera1Session implements CameraSession {
|
||||
private void listenForBytebufferFrames() {
|
||||
camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
|
||||
@Override
|
||||
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
|
||||
public void onPreviewFrame(final byte[] data, android.hardware.Camera callbackCamera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (callbackCamera != camera) {
|
||||
@ -298,9 +311,22 @@ class Camera1Session implements CameraSession {
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
|
||||
captureFormat.height, getFrameOrientation(), captureTimeNs);
|
||||
camera.addCallbackBuffer(data);
|
||||
if (videoFrameEmitTrialEnabled) {
|
||||
VideoFrame.Buffer frameBuffer = new NV21Buffer(data, captureFormat.width,
|
||||
captureFormat.height, () -> cameraThreadHandler.post(() -> {
|
||||
if (state == SessionState.RUNNING) {
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
}));
|
||||
final VideoFrame frame =
|
||||
new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
|
||||
events.onFrameCaptured(Camera1Session.this, frame);
|
||||
frame.release();
|
||||
} else {
|
||||
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
|
||||
captureFormat.height, getFrameOrientation(), captureTimeNs);
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -12,6 +12,7 @@ package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.graphics.Matrix;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCaptureSession;
|
||||
@ -44,6 +45,8 @@ class Camera2Session implements CameraSession {
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final boolean videoFrameEmitTrialEnabled;
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final CreateSessionCallback callback;
|
||||
private final Events events;
|
||||
@ -225,8 +228,17 @@ class Camera2Session implements CameraSession {
|
||||
transformMatrix =
|
||||
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
|
||||
|
||||
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
if (videoFrameEmitTrialEnabled) {
|
||||
VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
|
||||
captureFormat.width, captureFormat.height,
|
||||
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
|
||||
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
|
||||
events.onFrameCaptured(Camera2Session.this, frame);
|
||||
frame.release();
|
||||
} else {
|
||||
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
}
|
||||
});
|
||||
Logging.d(TAG, "Camera device successfully started.");
|
||||
@ -301,6 +313,9 @@ class Camera2Session implements CameraSession {
|
||||
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
|
||||
MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
|
||||
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
|
||||
videoFrameEmitTrialEnabled =
|
||||
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
|
||||
.equals(PeerConnectionFactory.TRIAL_ENABLED);
|
||||
|
||||
constructionTimeNs = System.nanoTime();
|
||||
|
||||
|
||||
@ -176,6 +176,23 @@ abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameCaptured(CameraSession session, VideoFrame frame) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onTextureFrameCaptured from another session.");
|
||||
return;
|
||||
}
|
||||
if (!firstFrameObserved) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameObserved = true;
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
capturerObserver.onFrameCaptured(frame);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
|
||||
|
||||
@ -25,6 +25,9 @@ interface CameraSession {
|
||||
void onCameraError(CameraSession session, String error);
|
||||
void onCameraDisconnected(CameraSession session);
|
||||
void onCameraClosed(CameraSession session);
|
||||
void onFrameCaptured(CameraSession session, VideoFrame frame);
|
||||
|
||||
// The old way of passing frames. Will be removed eventually.
|
||||
void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
|
||||
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
|
||||
|
||||
77
webrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
Normal file
77
webrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
Normal file
@ -0,0 +1,77 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public class NV21Buffer implements VideoFrame.Buffer {
|
||||
private final byte[] data;
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final Runnable releaseCallback;
|
||||
private final Object refCountLock = new Object();
|
||||
|
||||
private int refCount = 1;
|
||||
|
||||
public NV21Buffer(byte[] data, int width, int height, Runnable releaseCallback) {
|
||||
this.data = data;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.releaseCallback = releaseCallback;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.I420Buffer toI420() {
|
||||
// Cropping converts the frame to I420. Just crop and scale to the whole image.
|
||||
return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
|
||||
height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retain() {
|
||||
synchronized (refCountLock) {
|
||||
++refCount;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
synchronized (refCountLock) {
|
||||
if (--refCount == 0 && releaseCallback != null) {
|
||||
releaseCallback.run();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.Buffer cropAndScale(
|
||||
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||
I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
|
||||
nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
|
||||
height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
|
||||
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
|
||||
return newBuffer;
|
||||
}
|
||||
|
||||
private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
|
||||
int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
|
||||
int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
|
||||
}
|
||||
@ -1479,6 +1479,7 @@ void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
|
||||
|
||||
JOW(void, MediaCodecVideoEncoder_nativeFillBuffer)
|
||||
(JNIEnv* jni,
|
||||
jclass,
|
||||
jlong native_encoder,
|
||||
jint input_buffer,
|
||||
jobject j_buffer_y,
|
||||
|
||||
73
webrtc/sdk/android/src/jni/nv21buffer_jni.cc
Normal file
73
webrtc/sdk/android/src/jni/nv21buffer_jni.cc
Normal file
@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <jni.h>
|
||||
#include <vector>
|
||||
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
extern "C" JNIEXPORT void JNICALL
|
||||
Java_org_webrtc_NV21Buffer_nativeCropAndScale(JNIEnv* jni,
|
||||
jclass,
|
||||
jint crop_x,
|
||||
jint crop_y,
|
||||
jint crop_width,
|
||||
jint crop_height,
|
||||
jint scale_width,
|
||||
jint scale_height,
|
||||
jbyteArray j_src,
|
||||
jint src_width,
|
||||
jint src_height,
|
||||
jobject j_dst_y,
|
||||
jint dst_stride_y,
|
||||
jobject j_dst_u,
|
||||
jint dst_stride_u,
|
||||
jobject j_dst_v,
|
||||
jint dst_stride_v) {
|
||||
const int src_stride_y = src_width;
|
||||
const int src_stride_uv = src_width;
|
||||
const int crop_chroma_x = crop_x / 2;
|
||||
const int crop_chroma_y = crop_y / 2;
|
||||
const int crop_chroma_width = (crop_width + 1) / 2;
|
||||
const int crop_chroma_height = (crop_height + 1) / 2;
|
||||
const int tmp_stride_u = crop_chroma_width;
|
||||
const int tmp_stride_v = crop_chroma_width;
|
||||
const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v);
|
||||
|
||||
jboolean was_copy;
|
||||
jbyte* src_bytes = jni->GetByteArrayElements(j_src, &was_copy);
|
||||
RTC_DCHECK(!was_copy);
|
||||
uint8_t const* src_y = reinterpret_cast<uint8_t const*>(src_bytes);
|
||||
uint8_t const* src_uv = src_y + src_height * src_stride_y;
|
||||
|
||||
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
|
||||
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
|
||||
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
|
||||
|
||||
// Crop using pointer arithmetic.
|
||||
src_y += crop_x + crop_y * src_stride_y;
|
||||
src_uv += crop_chroma_x + crop_chroma_y * src_stride_uv;
|
||||
|
||||
webrtc::NV12ToI420Scaler scaler;
|
||||
// U- and V-planes are swapped because this is NV21 not NV12.
|
||||
scaler.NV12ToI420Scale(src_y, src_stride_y, src_uv, src_stride_uv, crop_width,
|
||||
crop_height, dst_y, dst_stride_y, dst_v, dst_stride_v,
|
||||
dst_u, dst_stride_u, scale_width, scale_height);
|
||||
|
||||
jni->ReleaseByteArrayElements(j_src, src_bytes, JNI_ABORT);
|
||||
}
|
||||
|
||||
} // namespace webrtc_jni
|
||||
Reference in New Issue
Block a user