Add a field trial to produce VideoFrames in camera capturers.
The field trials enables producing new VideoFrames in camera classes. This field trial should be enabled if VideoSinks are used. BUG=webrtc:7749, webrtc:7760 Review-Url: https://codereview.webrtc.org/2984633002 Cr-Commit-Position: refs/heads/master@{#19467}
This commit is contained in:
@ -26,8 +26,8 @@ import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
|
||||
@ -56,9 +56,9 @@ import org.webrtc.VideoSource;
|
||||
import org.webrtc.VideoTrack;
|
||||
import org.webrtc.voiceengine.WebRtcAudioManager;
|
||||
import org.webrtc.voiceengine.WebRtcAudioRecord;
|
||||
import org.webrtc.voiceengine.WebRtcAudioTrack;
|
||||
import org.webrtc.voiceengine.WebRtcAudioRecord.AudioRecordStartErrorCode;
|
||||
import org.webrtc.voiceengine.WebRtcAudioRecord.WebRtcAudioRecordErrorCallback;
|
||||
import org.webrtc.voiceengine.WebRtcAudioTrack;
|
||||
import org.webrtc.voiceengine.WebRtcAudioTrack.WebRtcAudioTrackErrorCallback;
|
||||
import org.webrtc.voiceengine.WebRtcAudioUtils;
|
||||
|
||||
@ -89,6 +89,9 @@ public class PeerConnectionClient {
|
||||
"WebRTC-H264HighProfile/Enabled/";
|
||||
private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL =
|
||||
"WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/";
|
||||
private static final String VIDEO_FRAME_EMIT_FIELDTRIAL =
|
||||
PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/" + PeerConnectionFactory.TRIAL_ENABLED
|
||||
+ "/";
|
||||
private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate";
|
||||
private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation";
|
||||
private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl";
|
||||
@ -398,6 +401,7 @@ public class PeerConnectionClient {
|
||||
fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL;
|
||||
Log.d(TAG, "Disable WebRTC AGC field trial.");
|
||||
}
|
||||
fieldTrials += VIDEO_FRAME_EMIT_FIELDTRIAL;
|
||||
|
||||
// Check preferred video codec.
|
||||
preferredVideoCodec = VIDEO_CODEC_VP8;
|
||||
|
||||
@ -98,6 +98,7 @@ rtc_static_library("video_jni") {
|
||||
"src/jni/native_handle_impl.cc",
|
||||
"src/jni/native_handle_impl.h",
|
||||
"src/jni/nv12buffer_jni.cc",
|
||||
"src/jni/nv21buffer_jni.cc",
|
||||
"src/jni/pc/video_jni.cc",
|
||||
"src/jni/surfacetexturehelper_jni.cc",
|
||||
"src/jni/surfacetexturehelper_jni.h",
|
||||
@ -438,6 +439,7 @@ android_library("libjingle_peerconnection_java") {
|
||||
"src/java/org/webrtc/I420BufferImpl.java",
|
||||
"src/java/org/webrtc/MediaCodecUtils.java",
|
||||
"src/java/org/webrtc/NV12Buffer.java",
|
||||
"src/java/org/webrtc/NV21Buffer.java",
|
||||
"src/java/org/webrtc/TextureBufferImpl.java",
|
||||
"src/java/org/webrtc/VideoCodecType.java",
|
||||
"src/java/org/webrtc/VideoDecoderWrapperCallback.java",
|
||||
|
||||
@ -29,6 +29,9 @@ public class PeerConnectionFactory {
|
||||
}
|
||||
}
|
||||
|
||||
public static final String TRIAL_ENABLED = "Enabled";
|
||||
public static final String VIDEO_FRAME_EMIT_TRIAL = "VideoFrameEmit";
|
||||
|
||||
private static final String TAG = "PeerConnectionFactory";
|
||||
private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
|
||||
private final long nativeFactory;
|
||||
|
||||
@ -59,6 +59,9 @@ public class Camera1CapturerUsingByteBufferTest {
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
// Enable VideoFrame capture.
|
||||
PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
|
||||
+ PeerConnectionFactory.TRIAL_ENABLED + "/");
|
||||
fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
|
||||
}
|
||||
|
||||
|
||||
@ -54,6 +54,9 @@ public class Camera1CapturerUsingTextureTest {
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
// Enable VideoFrame capture.
|
||||
PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
|
||||
+ PeerConnectionFactory.TRIAL_ENABLED + "/");
|
||||
fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
|
||||
}
|
||||
|
||||
|
||||
@ -183,6 +183,9 @@ public class Camera2CapturerTest {
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
// Enable VideoFrame capture.
|
||||
PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
|
||||
+ PeerConnectionFactory.TRIAL_ENABLED + "/");
|
||||
fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
|
||||
}
|
||||
|
||||
|
||||
@ -100,9 +100,7 @@ class CameraVideoCapturerTestFixtures {
|
||||
|
||||
static private class FakeCapturerObserver implements CameraVideoCapturer.CapturerObserver {
|
||||
private int framesCaptured = 0;
|
||||
private int frameSize = 0;
|
||||
private int frameWidth = 0;
|
||||
private int frameHeight = 0;
|
||||
private VideoFrame videoFrame;
|
||||
final private Object frameLock = new Object();
|
||||
final private Object capturerStartLock = new Object();
|
||||
private boolean capturerStartResult = false;
|
||||
@ -126,32 +124,27 @@ class CameraVideoCapturerTestFixtures {
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(
|
||||
byte[] frame, int width, int height, int rotation, long timeStamp) {
|
||||
synchronized (frameLock) {
|
||||
++framesCaptured;
|
||||
frameSize = frame.length;
|
||||
frameWidth = width;
|
||||
frameHeight = height;
|
||||
timestamps.add(timeStamp);
|
||||
frameLock.notify();
|
||||
}
|
||||
throw new RuntimeException("onByteBufferFrameCaptured called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
|
||||
float[] transformMatrix, int rotation, long timeStamp) {
|
||||
synchronized (frameLock) {
|
||||
++framesCaptured;
|
||||
frameWidth = width;
|
||||
frameHeight = height;
|
||||
frameSize = 0;
|
||||
timestamps.add(timeStamp);
|
||||
frameLock.notify();
|
||||
}
|
||||
throw new RuntimeException("onTextureFrameCaptured called");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameCaptured(VideoFrame frame) {
|
||||
// Empty on purpose.
|
||||
synchronized (frameLock) {
|
||||
++framesCaptured;
|
||||
if (videoFrame != null) {
|
||||
videoFrame.release();
|
||||
}
|
||||
videoFrame = frame;
|
||||
videoFrame.retain();
|
||||
timestamps.add(videoFrame.getTimestampNs());
|
||||
frameLock.notify();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean waitForCapturerToStart() throws InterruptedException {
|
||||
@ -170,21 +163,24 @@ class CameraVideoCapturerTestFixtures {
|
||||
}
|
||||
}
|
||||
|
||||
int frameSize() {
|
||||
synchronized (frameLock) {
|
||||
return frameSize;
|
||||
}
|
||||
}
|
||||
|
||||
int frameWidth() {
|
||||
synchronized (frameLock) {
|
||||
return frameWidth;
|
||||
return videoFrame.getBuffer().getWidth();
|
||||
}
|
||||
}
|
||||
|
||||
int frameHeight() {
|
||||
synchronized (frameLock) {
|
||||
return frameHeight;
|
||||
return videoFrame.getBuffer().getHeight();
|
||||
}
|
||||
}
|
||||
|
||||
void releaseFrame() {
|
||||
synchronized (frameLock) {
|
||||
if (videoFrame != null) {
|
||||
videoFrame.release();
|
||||
videoFrame = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -385,7 +381,7 @@ class CameraVideoCapturerTestFixtures {
|
||||
instance.capturer.stopCapture();
|
||||
instance.cameraEvents.waitForCameraClosed();
|
||||
instance.capturer.dispose();
|
||||
instance.surfaceTextureHelper.returnTextureFrame();
|
||||
instance.observer.releaseFrame();
|
||||
instance.surfaceTextureHelper.dispose();
|
||||
}
|
||||
|
||||
@ -637,7 +633,7 @@ class CameraVideoCapturerTestFixtures {
|
||||
// Make sure camera is started and then stop it.
|
||||
assertTrue(capturerInstance.observer.waitForCapturerToStart());
|
||||
capturerInstance.capturer.stopCapture();
|
||||
capturerInstance.surfaceTextureHelper.returnTextureFrame();
|
||||
capturerInstance.observer.releaseFrame();
|
||||
|
||||
// We can't change |capturer| at this point, but we should not crash.
|
||||
capturerInstance.capturer.switchCamera(null /* switchEventsHandler */);
|
||||
@ -687,13 +683,8 @@ class CameraVideoCapturerTestFixtures {
|
||||
+ capturerInstance.format.height + "x" + capturerInstance.format.width);
|
||||
}
|
||||
|
||||
if (testObjectFactory.isCapturingToTexture()) {
|
||||
assertEquals(0, capturerInstance.observer.frameSize());
|
||||
} else {
|
||||
assertTrue(capturerInstance.format.frameSize() <= capturerInstance.observer.frameSize());
|
||||
}
|
||||
capturerInstance.capturer.stopCapture();
|
||||
capturerInstance.surfaceTextureHelper.returnTextureFrame();
|
||||
capturerInstance.observer.releaseFrame();
|
||||
}
|
||||
disposeCapturer(capturerInstance);
|
||||
}
|
||||
@ -710,7 +701,7 @@ class CameraVideoCapturerTestFixtures {
|
||||
|
||||
startCapture(capturerInstance, 1);
|
||||
capturerInstance.observer.waitForCapturerToStart();
|
||||
capturerInstance.surfaceTextureHelper.returnTextureFrame();
|
||||
capturerInstance.observer.releaseFrame();
|
||||
|
||||
capturerInstance.observer.waitForNextCapturedFrame();
|
||||
capturerInstance.capturer.stopCapture();
|
||||
|
||||
@ -36,6 +36,8 @@ class Camera1Session implements CameraSession {
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final boolean videoFrameEmitTrialEnabled;
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final Events events;
|
||||
private final boolean captureToTexture;
|
||||
@ -85,7 +87,6 @@ class Camera1Session implements CameraSession {
|
||||
|
||||
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
|
||||
|
||||
// Initialize the capture buffers.
|
||||
if (!captureToTexture) {
|
||||
final int frameSize = captureFormat.frameSize();
|
||||
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
|
||||
@ -151,6 +152,9 @@ class Camera1Session implements CameraSession {
|
||||
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
|
||||
CaptureFormat captureFormat, long constructionTimeNs) {
|
||||
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
|
||||
videoFrameEmitTrialEnabled =
|
||||
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
|
||||
.equals(PeerConnectionFactory.TRIAL_ENABLED);
|
||||
|
||||
this.cameraThreadHandler = new Handler();
|
||||
this.events = events;
|
||||
@ -267,8 +271,17 @@ class Camera1Session implements CameraSession {
|
||||
transformMatrix = RendererCommon.multiplyMatrices(
|
||||
transformMatrix, RendererCommon.horizontalFlipMatrix());
|
||||
}
|
||||
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
if (videoFrameEmitTrialEnabled) {
|
||||
final VideoFrame.Buffer buffer =
|
||||
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
|
||||
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
|
||||
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
|
||||
events.onFrameCaptured(Camera1Session.this, frame);
|
||||
frame.release();
|
||||
} else {
|
||||
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@ -276,7 +289,7 @@ class Camera1Session implements CameraSession {
|
||||
private void listenForBytebufferFrames() {
|
||||
camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
|
||||
@Override
|
||||
public void onPreviewFrame(byte[] data, android.hardware.Camera callbackCamera) {
|
||||
public void onPreviewFrame(final byte[] data, android.hardware.Camera callbackCamera) {
|
||||
checkIsOnCameraThread();
|
||||
|
||||
if (callbackCamera != camera) {
|
||||
@ -298,9 +311,22 @@ class Camera1Session implements CameraSession {
|
||||
firstFrameReported = true;
|
||||
}
|
||||
|
||||
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
|
||||
captureFormat.height, getFrameOrientation(), captureTimeNs);
|
||||
camera.addCallbackBuffer(data);
|
||||
if (videoFrameEmitTrialEnabled) {
|
||||
VideoFrame.Buffer frameBuffer = new NV21Buffer(data, captureFormat.width,
|
||||
captureFormat.height, () -> cameraThreadHandler.post(() -> {
|
||||
if (state == SessionState.RUNNING) {
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
}));
|
||||
final VideoFrame frame =
|
||||
new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
|
||||
events.onFrameCaptured(Camera1Session.this, frame);
|
||||
frame.release();
|
||||
} else {
|
||||
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
|
||||
captureFormat.height, getFrameOrientation(), captureTimeNs);
|
||||
camera.addCallbackBuffer(data);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@ -12,6 +12,7 @@ package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.content.Context;
|
||||
import android.graphics.Matrix;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.camera2.CameraAccessException;
|
||||
import android.hardware.camera2.CameraCaptureSession;
|
||||
@ -44,6 +45,8 @@ class Camera2Session implements CameraSession {
|
||||
|
||||
private static enum SessionState { RUNNING, STOPPED }
|
||||
|
||||
private final boolean videoFrameEmitTrialEnabled;
|
||||
|
||||
private final Handler cameraThreadHandler;
|
||||
private final CreateSessionCallback callback;
|
||||
private final Events events;
|
||||
@ -225,8 +228,17 @@ class Camera2Session implements CameraSession {
|
||||
transformMatrix =
|
||||
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
|
||||
|
||||
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
if (videoFrameEmitTrialEnabled) {
|
||||
VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
|
||||
captureFormat.width, captureFormat.height,
|
||||
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
|
||||
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
|
||||
events.onFrameCaptured(Camera2Session.this, frame);
|
||||
frame.release();
|
||||
} else {
|
||||
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
|
||||
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
|
||||
}
|
||||
}
|
||||
});
|
||||
Logging.d(TAG, "Camera device successfully started.");
|
||||
@ -301,6 +313,9 @@ class Camera2Session implements CameraSession {
|
||||
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
|
||||
MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
|
||||
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
|
||||
videoFrameEmitTrialEnabled =
|
||||
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
|
||||
.equals(PeerConnectionFactory.TRIAL_ENABLED);
|
||||
|
||||
constructionTimeNs = System.nanoTime();
|
||||
|
||||
|
||||
@ -176,6 +176,23 @@ abstract class CameraCapturer implements CameraVideoCapturer {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFrameCaptured(CameraSession session, VideoFrame frame) {
|
||||
checkIsOnCameraThread();
|
||||
synchronized (stateLock) {
|
||||
if (session != currentSession) {
|
||||
Logging.w(TAG, "onTextureFrameCaptured from another session.");
|
||||
return;
|
||||
}
|
||||
if (!firstFrameObserved) {
|
||||
eventsHandler.onFirstFrameAvailable();
|
||||
firstFrameObserved = true;
|
||||
}
|
||||
cameraStatistics.addFrame();
|
||||
capturerObserver.onFrameCaptured(frame);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
|
||||
|
||||
@ -25,6 +25,9 @@ interface CameraSession {
|
||||
void onCameraError(CameraSession session, String error);
|
||||
void onCameraDisconnected(CameraSession session);
|
||||
void onCameraClosed(CameraSession session);
|
||||
void onFrameCaptured(CameraSession session, VideoFrame frame);
|
||||
|
||||
// The old way of passing frames. Will be removed eventually.
|
||||
void onByteBufferFrameCaptured(
|
||||
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
|
||||
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
|
||||
|
||||
77
webrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
Normal file
77
webrtc/sdk/android/src/java/org/webrtc/NV21Buffer.java
Normal file
@ -0,0 +1,77 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
public class NV21Buffer implements VideoFrame.Buffer {
|
||||
private final byte[] data;
|
||||
private final int width;
|
||||
private final int height;
|
||||
private final Runnable releaseCallback;
|
||||
private final Object refCountLock = new Object();
|
||||
|
||||
private int refCount = 1;
|
||||
|
||||
public NV21Buffer(byte[] data, int width, int height, Runnable releaseCallback) {
|
||||
this.data = data;
|
||||
this.width = width;
|
||||
this.height = height;
|
||||
this.releaseCallback = releaseCallback;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getWidth() {
|
||||
return width;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getHeight() {
|
||||
return height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.I420Buffer toI420() {
|
||||
// Cropping converts the frame to I420. Just crop and scale to the whole image.
|
||||
return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
|
||||
height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retain() {
|
||||
synchronized (refCountLock) {
|
||||
++refCount;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release() {
|
||||
synchronized (refCountLock) {
|
||||
if (--refCount == 0 && releaseCallback != null) {
|
||||
releaseCallback.run();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.Buffer cropAndScale(
|
||||
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
|
||||
I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
|
||||
nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
|
||||
height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
|
||||
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
|
||||
return newBuffer;
|
||||
}
|
||||
|
||||
private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
|
||||
int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
|
||||
int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
|
||||
}
|
||||
@ -1479,6 +1479,7 @@ void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
|
||||
|
||||
JOW(void, MediaCodecVideoEncoder_nativeFillBuffer)
|
||||
(JNIEnv* jni,
|
||||
jclass,
|
||||
jlong native_encoder,
|
||||
jint input_buffer,
|
||||
jobject j_buffer_y,
|
||||
|
||||
73
webrtc/sdk/android/src/jni/nv21buffer_jni.cc
Normal file
73
webrtc/sdk/android/src/jni/nv21buffer_jni.cc
Normal file
@ -0,0 +1,73 @@
|
||||
/*
|
||||
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <jni.h>
|
||||
#include <vector>
|
||||
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/scale.h"
|
||||
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "webrtc/rtc_base/checks.h"
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
extern "C" JNIEXPORT void JNICALL
|
||||
Java_org_webrtc_NV21Buffer_nativeCropAndScale(JNIEnv* jni,
|
||||
jclass,
|
||||
jint crop_x,
|
||||
jint crop_y,
|
||||
jint crop_width,
|
||||
jint crop_height,
|
||||
jint scale_width,
|
||||
jint scale_height,
|
||||
jbyteArray j_src,
|
||||
jint src_width,
|
||||
jint src_height,
|
||||
jobject j_dst_y,
|
||||
jint dst_stride_y,
|
||||
jobject j_dst_u,
|
||||
jint dst_stride_u,
|
||||
jobject j_dst_v,
|
||||
jint dst_stride_v) {
|
||||
const int src_stride_y = src_width;
|
||||
const int src_stride_uv = src_width;
|
||||
const int crop_chroma_x = crop_x / 2;
|
||||
const int crop_chroma_y = crop_y / 2;
|
||||
const int crop_chroma_width = (crop_width + 1) / 2;
|
||||
const int crop_chroma_height = (crop_height + 1) / 2;
|
||||
const int tmp_stride_u = crop_chroma_width;
|
||||
const int tmp_stride_v = crop_chroma_width;
|
||||
const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v);
|
||||
|
||||
jboolean was_copy;
|
||||
jbyte* src_bytes = jni->GetByteArrayElements(j_src, &was_copy);
|
||||
RTC_DCHECK(!was_copy);
|
||||
uint8_t const* src_y = reinterpret_cast<uint8_t const*>(src_bytes);
|
||||
uint8_t const* src_uv = src_y + src_height * src_stride_y;
|
||||
|
||||
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
|
||||
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
|
||||
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
|
||||
|
||||
// Crop using pointer arithmetic.
|
||||
src_y += crop_x + crop_y * src_stride_y;
|
||||
src_uv += crop_chroma_x + crop_chroma_y * src_stride_uv;
|
||||
|
||||
webrtc::NV12ToI420Scaler scaler;
|
||||
// U- and V-planes are swapped because this is NV21 not NV12.
|
||||
scaler.NV12ToI420Scale(src_y, src_stride_y, src_uv, src_stride_uv, crop_width,
|
||||
crop_height, dst_y, dst_stride_y, dst_v, dst_stride_v,
|
||||
dst_u, dst_stride_u, scale_width, scale_height);
|
||||
|
||||
jni->ReleaseByteArrayElements(j_src, src_bytes, JNI_ABORT);
|
||||
}
|
||||
|
||||
} // namespace webrtc_jni
|
||||
Reference in New Issue
Block a user