Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1,60 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** An implementation of CapturerObserver that forwards all calls from Java to the C layer. */
class AndroidVideoTrackSourceObserver implements VideoCapturer.CapturerObserver {
// Pointer to VideoTrackSourceProxy proxying AndroidVideoTrackSource.
private final long nativeSource;
public AndroidVideoTrackSourceObserver(long nativeSource) {
this.nativeSource = nativeSource;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeSource, success);
}
@Override
public void onCapturerStopped() {
nativeCapturerStopped(nativeSource);
}
@Override
public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp) {
nativeOnByteBufferFrameCaptured(
nativeSource, data, data.length, width, height, rotation, timeStamp);
}
@Override
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp) {
nativeOnTextureFrameCaptured(
nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp);
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(), frame.getBuffer().getHeight(),
frame.getRotation(), frame.getTimestampNs(), frame.getBuffer());
}
private native void nativeCapturerStarted(long nativeSource, boolean success);
private native void nativeCapturerStopped(long nativeSource);
private native void nativeOnByteBufferFrameCaptured(long nativeSource, byte[] data, int length,
int width, int height, int rotation, long timeStamp);
private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
private native void nativeOnFrameCaptured(long nativeSource, int width, int height, int rotation,
long timestampNs, VideoFrame.Buffer frame);
}

View File

@ -0,0 +1,38 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
class BaseBitrateAdjuster implements BitrateAdjuster {
protected int targetBitrateBps = 0;
protected int targetFps = 0;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
this.targetBitrateBps = targetBitrateBps;
this.targetFps = targetFps;
}
@Override
public void reportEncodedFrame(int size) {
// No op.
}
@Override
public int getAdjustedBitrateBps() {
return targetBitrateBps;
}
@Override
public int getAdjustedFramerate() {
return targetFps;
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Object that adjusts the bitrate of a hardware codec. */
interface BitrateAdjuster {
/**
* Sets the target bitrate in bits per second and framerate in frames per second.
*/
void setTargets(int targetBitrateBps, int targetFps);
/**
* Reports that a frame of the given size has been encoded. Returns true if the bitrate should
* be adjusted.
*/
void reportEncodedFrame(int size);
/** Gets the current bitrate. */
int getAdjustedBitrateBps();
/** Gets the current framerate. */
int getAdjustedFramerate();
}

View File

@ -0,0 +1,369 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.SystemClock;
import android.view.Surface;
import android.view.WindowManager;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@SuppressWarnings("deprecation")
class Camera1Session implements CameraSession {
private static final String TAG = "Camera1Session";
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
private static final Histogram camera1StartTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
private static final Histogram camera1StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
"WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
private static enum SessionState { RUNNING, STOPPED }
private final boolean videoFrameEmitTrialEnabled;
private final Handler cameraThreadHandler;
private final Events events;
private final boolean captureToTexture;
private final Context applicationContext;
private final SurfaceTextureHelper surfaceTextureHelper;
private final int cameraId;
private final android.hardware.Camera camera;
private final android.hardware.Camera.CameraInfo info;
private final CaptureFormat captureFormat;
// Used only for stats. Only used on the camera thread.
private final long constructionTimeNs; // Construction time of this class.
private SessionState state;
private boolean firstFrameReported = false;
public static void create(final CreateSessionCallback callback, final Events events,
final boolean captureToTexture, final Context applicationContext,
final SurfaceTextureHelper surfaceTextureHelper, final MediaRecorder mediaRecorder,
final int cameraId, final int width, final int height, final int framerate) {
final long constructionTimeNs = System.nanoTime();
Logging.d(TAG, "Open camera " + cameraId);
events.onCameraOpening();
final android.hardware.Camera camera;
try {
camera = android.hardware.Camera.open(cameraId);
} catch (RuntimeException e) {
callback.onFailure(FailureType.ERROR, e.getMessage());
return;
}
try {
camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
} catch (IOException e) {
camera.release();
callback.onFailure(FailureType.ERROR, e.getMessage());
return;
}
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
final android.hardware.Camera.Parameters parameters = camera.getParameters();
final CaptureFormat captureFormat =
findClosestCaptureFormat(parameters, width, height, framerate);
final Size pictureSize = findClosestPictureSize(parameters, width, height);
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
if (!captureToTexture) {
final int frameSize = captureFormat.frameSize();
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
camera.addCallbackBuffer(buffer.array());
}
}
// Calculate orientation manually and send it as CVO insted.
camera.setDisplayOrientation(0 /* degrees */);
callback.onDone(
new Camera1Session(events, captureToTexture, applicationContext, surfaceTextureHelper,
mediaRecorder, cameraId, camera, info, captureFormat, constructionTimeNs));
}
private static void updateCameraParameters(android.hardware.Camera camera,
android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
boolean captureToTexture) {
final List<String> focusModes = parameters.getSupportedFocusModes();
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
if (!captureToTexture) {
parameters.setPreviewFormat(captureFormat.imageFormat);
}
if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true);
}
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
camera.setParameters(parameters);
}
private static CaptureFormat findClosestCaptureFormat(
android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
// Find closest supported format for |width| x |height| @ |framerate|.
final List<CaptureFormat.FramerateRange> supportedFramerates =
Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
final CaptureFormat.FramerateRange fpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
private static Size findClosestPictureSize(
android.hardware.Camera.Parameters parameters, int width, int height) {
return CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
}
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, int cameraId,
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
videoFrameEmitTrialEnabled =
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
.equals(PeerConnectionFactory.TRIAL_ENABLED);
this.cameraThreadHandler = new Handler();
this.events = events;
this.captureToTexture = captureToTexture;
this.applicationContext = applicationContext;
this.surfaceTextureHelper = surfaceTextureHelper;
this.cameraId = cameraId;
this.camera = camera;
this.info = info;
this.captureFormat = captureFormat;
this.constructionTimeNs = constructionTimeNs;
startCapturing();
if (mediaRecorder != null) {
camera.unlock();
mediaRecorder.setCamera(camera);
}
}
@Override
public void stop() {
Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
checkIsOnCameraThread();
if (state != SessionState.STOPPED) {
final long stopStartTime = System.nanoTime();
stopInternal();
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera1StopTimeMsHistogram.addSample(stopTimeMs);
}
}
private void startCapturing() {
Logging.d(TAG, "Start capturing");
checkIsOnCameraThread();
state = SessionState.RUNNING;
camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() {
@Override
public void onError(int error, android.hardware.Camera camera) {
String errorMessage;
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
} else {
errorMessage = "Camera error: " + error;
}
Logging.e(TAG, errorMessage);
stopInternal();
if (error == android.hardware.Camera.CAMERA_ERROR_EVICTED) {
events.onCameraDisconnected(Camera1Session.this);
} else {
events.onCameraError(Camera1Session.this, errorMessage);
}
}
});
if (captureToTexture) {
listenForTextureFrames();
} else {
listenForBytebufferFrames();
}
try {
camera.startPreview();
} catch (RuntimeException e) {
stopInternal();
events.onCameraError(this, e.getMessage());
}
}
private void stopInternal() {
Logging.d(TAG, "Stop internal");
checkIsOnCameraThread();
if (state == SessionState.STOPPED) {
Logging.d(TAG, "Camera is already stopped");
return;
}
state = SessionState.STOPPED;
surfaceTextureHelper.stopListening();
// Note: stopPreview or other driver code might deadlock. Deadlock in
// android.hardware.Camera._stopPreview(Native Method) has been observed on
// Nexus 5 (hammerhead), OS version LMY48I.
camera.stopPreview();
camera.release();
events.onCameraClosed(this);
Logging.d(TAG, "Stop done");
}
private void listenForTextureFrames() {
surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread();
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
surfaceTextureHelper.returnTextureFrame();
return;
}
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
int rotation = getFrameOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
transformMatrix = RendererCommon.multiplyMatrices(
transformMatrix, RendererCommon.horizontalFlipMatrix());
}
if (videoFrameEmitTrialEnabled) {
final VideoFrame.Buffer buffer =
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
events.onFrameCaptured(Camera1Session.this, frame);
frame.release();
} else {
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
}
}
});
}
private void listenForBytebufferFrames() {
camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
@Override
public void onPreviewFrame(final byte[] data, android.hardware.Camera callbackCamera) {
checkIsOnCameraThread();
if (callbackCamera != camera) {
Logging.e(TAG, "Callback from a different camera. This should never happen.");
return;
}
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
return;
}
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
if (videoFrameEmitTrialEnabled) {
VideoFrame.Buffer frameBuffer = new NV21Buffer(data, captureFormat.width,
captureFormat.height, () -> cameraThreadHandler.post(() -> {
if (state == SessionState.RUNNING) {
camera.addCallbackBuffer(data);
}
}));
final VideoFrame frame =
new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
events.onFrameCaptured(Camera1Session.this, frame);
frame.release();
} else {
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
captureFormat.height, getFrameOrientation(), captureTimeNs);
camera.addCallbackBuffer(data);
}
}
});
}
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
private int getFrameOrientation() {
int rotation = getDeviceOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation;
}
return (info.orientation + rotation) % 360;
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}

View File

@ -0,0 +1,480 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.media.MediaRecorder;
import android.os.Handler;
import android.util.Range;
import android.view.Surface;
import android.view.WindowManager;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@TargetApi(21)
class Camera2Session implements CameraSession {
private static final String TAG = "Camera2Session";
private static final Histogram camera2StartTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
private static final Histogram camera2StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
"WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
private static enum SessionState { RUNNING, STOPPED }
private final boolean videoFrameEmitTrialEnabled;
private final Handler cameraThreadHandler;
private final CreateSessionCallback callback;
private final Events events;
private final Context applicationContext;
private final CameraManager cameraManager;
private final SurfaceTextureHelper surfaceTextureHelper;
private final Surface mediaRecorderSurface;
private final String cameraId;
private final int width;
private final int height;
private final int framerate;
// Initialized at start
private CameraCharacteristics cameraCharacteristics;
private int cameraOrientation;
private boolean isCameraFrontFacing;
private int fpsUnitFactor;
private CaptureFormat captureFormat;
// Initialized when camera opens
private CameraDevice cameraDevice;
private Surface surface;
// Initialized when capture session is created
private CameraCaptureSession captureSession;
// State
private SessionState state = SessionState.RUNNING;
private boolean firstFrameReported = false;
// Used only for stats. Only used on the camera thread.
private final long constructionTimeNs; // Construction time of this class.
private class CameraStateCallback extends CameraDevice.StateCallback {
private String getErrorDescription(int errorCode) {
switch (errorCode) {
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
return "Camera device has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
return "Camera device could not be opened due to a device policy.";
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
return "Camera device is in use already.";
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
return "Camera service has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
return "Camera device could not be opened because"
+ " there are too many other open camera devices.";
default:
return "Unknown camera error: " + errorCode;
}
}
@Override
public void onDisconnected(CameraDevice camera) {
checkIsOnCameraThread();
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
state = SessionState.STOPPED;
stopInternal();
if (startFailure) {
callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
} else {
events.onCameraDisconnected(Camera2Session.this);
}
}
@Override
public void onError(CameraDevice camera, int errorCode) {
checkIsOnCameraThread();
reportError(getErrorDescription(errorCode));
}
@Override
public void onOpened(CameraDevice camera) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera opened.");
cameraDevice = camera;
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTexture);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to capture session.");
surfaces.add(mediaRecorderSurface);
}
try {
camera.createCaptureSession(surfaces, new CaptureSessionCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to create capture session. " + e);
return;
}
}
@Override
public void onClosed(CameraDevice camera) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera device closed.");
events.onCameraClosed(Camera2Session.this);
}
}
private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
@Override
public void onConfigureFailed(CameraCaptureSession session) {
checkIsOnCameraThread();
session.close();
reportError("Failed to configure capture session.");
}
@Override
public void onConfigured(CameraCaptureSession session) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera capture session configured.");
captureSession = session;
try {
/*
* The viable options for video capture requests are:
* TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
* post-processing.
* TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
* quality.
*/
final CaptureRequest.Builder captureRequestBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// Set auto exposure fps range.
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
captureFormat.framerate.max / fpsUnitFactor));
captureRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
chooseStabilizationMode(captureRequestBuilder);
chooseFocusMode(captureRequestBuilder);
captureRequestBuilder.addTarget(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to CaptureRequest.Builder");
captureRequestBuilder.addTarget(mediaRecorderSurface);
}
session.setRepeatingRequest(
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to start capture request. " + e);
return;
}
surfaceTextureHelper.startListening(
new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread();
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
surfaceTextureHelper.returnTextureFrame();
return;
}
if (!firstFrameReported) {
firstFrameReported = true;
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera2StartTimeMsHistogram.addSample(startTimeMs);
}
int rotation = getFrameOrientation();
if (isCameraFrontFacing) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
transformMatrix = RendererCommon.multiplyMatrices(
transformMatrix, RendererCommon.horizontalFlipMatrix());
}
// Undo camera orientation - we report it as rotation instead.
transformMatrix =
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
if (videoFrameEmitTrialEnabled) {
VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
events.onFrameCaptured(Camera2Session.this, frame);
frame.release();
} else {
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
}
}
});
Logging.d(TAG, "Camera device successfully started.");
callback.onDone(Camera2Session.this);
}
// Prefers optical stabilization over software stabilization if available. Only enables one of
// the stabilization modes at a time because having both enabled can cause strange results.
private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableOpticalStabilization = cameraCharacteristics.get(
CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
if (availableOpticalStabilization != null) {
for (int mode : availableOpticalStabilization) {
if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
Logging.d(TAG, "Using optical stabilization.");
return;
}
}
}
// If no optical mode is available, try software.
final int[] availableVideoStabilization = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
for (int mode : availableVideoStabilization) {
if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
Logging.d(TAG, "Using video stabilization.");
return;
}
}
Logging.d(TAG, "Stabilization not available.");
}
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableFocusModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
for (int mode : availableFocusModes) {
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
captureRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
Logging.d(TAG, "Using continuous video auto-focus.");
return;
}
}
Logging.d(TAG, "Auto-focus is not available.");
}
}
private class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
@Override
public void onCaptureFailed(
CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
Logging.d(TAG, "Capture failed: " + failure);
}
}
public static void create(CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraId,
int width, int height, int framerate) {
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
mediaRecorder, cameraId, width, height, framerate);
}
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
videoFrameEmitTrialEnabled =
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
.equals(PeerConnectionFactory.TRIAL_ENABLED);
constructionTimeNs = System.nanoTime();
this.cameraThreadHandler = new Handler();
this.callback = callback;
this.events = events;
this.applicationContext = applicationContext;
this.cameraManager = cameraManager;
this.surfaceTextureHelper = surfaceTextureHelper;
this.mediaRecorderSurface = (mediaRecorder != null) ? mediaRecorder.getSurface() : null;
this.cameraId = cameraId;
this.width = width;
this.height = height;
this.framerate = framerate;
start();
}
private void start() {
checkIsOnCameraThread();
Logging.d(TAG, "start");
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (final CameraAccessException e) {
reportError("getCameraCharacteristics(): " + e.getMessage());
return;
}
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_FRONT;
findCaptureFormat();
openCamera();
}
private void findCaptureFormat() {
checkIsOnCameraThread();
Range<Integer>[] fpsRanges =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
List<CaptureFormat.FramerateRange> framerateRanges =
Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
Logging.d(TAG, "Available preview sizes: " + sizes);
Logging.d(TAG, "Available fps ranges: " + framerateRanges);
if (framerateRanges.isEmpty() || sizes.isEmpty()) {
reportError("No supported capture formats.");
return;
}
final CaptureFormat.FramerateRange bestFpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
Logging.d(TAG, "Using capture format: " + captureFormat);
}
private void openCamera() {
checkIsOnCameraThread();
Logging.d(TAG, "Opening camera " + cameraId);
events.onCameraOpening();
try {
cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to open camera: " + e);
return;
}
}
@Override
public void stop() {
Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
checkIsOnCameraThread();
if (state != SessionState.STOPPED) {
final long stopStartTime = System.nanoTime();
state = SessionState.STOPPED;
stopInternal();
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera2StopTimeMsHistogram.addSample(stopTimeMs);
}
}
private void stopInternal() {
Logging.d(TAG, "Stop internal");
checkIsOnCameraThread();
surfaceTextureHelper.stopListening();
if (captureSession != null) {
captureSession.close();
captureSession = null;
}
if (surface != null) {
surface.release();
surface = null;
}
if (cameraDevice != null) {
cameraDevice.close();
cameraDevice = null;
}
Logging.d(TAG, "Stop done");
}
private void reportError(String error) {
checkIsOnCameraThread();
Logging.e(TAG, "Error: " + error);
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
state = SessionState.STOPPED;
stopInternal();
if (startFailure) {
callback.onFailure(FailureType.ERROR, error);
} else {
events.onCameraError(this, error);
}
}
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
private int getFrameOrientation() {
int rotation = getDeviceOrientation();
if (!isCameraFrontFacing) {
rotation = 360 - rotation;
}
return (cameraOrientation + rotation) % 360;
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}

View File

@ -0,0 +1,590 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Looper;
import java.util.Arrays;
@SuppressWarnings("deprecation")
abstract class CameraCapturer implements CameraVideoCapturer {
enum SwitchState {
IDLE, // No switch requested.
PENDING, // Waiting for previous capture session to open.
IN_PROGRESS, // Waiting for new switched capture session to start.
}
enum MediaRecorderState {
IDLE, // No media recording update (add or remove) requested.
IDLE_TO_ACTIVE, // Waiting for new capture session with added MediaRecorder surface to start.
ACTIVE_TO_IDLE, // Waiting for new capture session with removed MediaRecorder surface to start.
ACTIVE, // MediaRecorder was successfully added to camera pipeline.
}
private static final String TAG = "CameraCapturer";
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
private final static int OPEN_CAMERA_DELAY_MS = 500;
private final static int OPEN_CAMERA_TIMEOUT = 10000;
private final CameraEnumerator cameraEnumerator;
private final CameraEventsHandler eventsHandler;
private final Handler uiThreadHandler;
private final CameraSession.CreateSessionCallback createSessionCallback =
new CameraSession.CreateSessionCallback() {
@Override
public void onDone(CameraSession session) {
checkIsOnCameraThread();
Logging.d(TAG,
"Create session done. Switch state: " + switchState
+ ". MediaRecorder state: " + mediaRecorderState);
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
synchronized (stateLock) {
capturerObserver.onCapturerStarted(true /* success */);
sessionOpening = false;
currentSession = session;
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
firstFrameObserved = false;
stateLock.notifyAll();
if (switchState == SwitchState.IN_PROGRESS) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
switchEventsHandler = null;
}
switchState = SwitchState.IDLE;
} else if (switchState == SwitchState.PENDING) {
switchState = SwitchState.IDLE;
switchCameraInternal(switchEventsHandler);
}
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE
|| mediaRecorderState == MediaRecorderState.ACTIVE_TO_IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderSuccess();
mediaRecorderEventsHandler = null;
}
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE) {
mediaRecorderState = MediaRecorderState.ACTIVE;
} else {
mediaRecorderState = MediaRecorderState.IDLE;
}
}
}
}
@Override
public void onFailure(CameraSession.FailureType failureType, String error) {
checkIsOnCameraThread();
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
synchronized (stateLock) {
capturerObserver.onCapturerStarted(false /* success */);
openAttemptsRemaining--;
if (openAttemptsRemaining <= 0) {
Logging.w(TAG, "Opening camera failed, passing: " + error);
sessionOpening = false;
stateLock.notifyAll();
if (switchState != SwitchState.IDLE) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(error);
switchEventsHandler = null;
}
switchState = SwitchState.IDLE;
}
if (mediaRecorderState != MediaRecorderState.IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderError(error);
mediaRecorderEventsHandler = null;
}
mediaRecorderState = MediaRecorderState.IDLE;
}
if (failureType == CameraSession.FailureType.DISCONNECTED) {
eventsHandler.onCameraDisconnected();
} else {
eventsHandler.onCameraError(error);
}
} else {
Logging.w(TAG, "Opening camera failed, retry: " + error);
createSessionInternal(OPEN_CAMERA_DELAY_MS, null /* mediaRecorder */);
}
}
}
};
private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
@Override
public void onCameraOpening() {
checkIsOnCameraThread();
synchronized (stateLock) {
if (currentSession != null) {
Logging.w(TAG, "onCameraOpening while session was open.");
return;
}
eventsHandler.onCameraOpening(cameraName);
}
}
@Override
public void onCameraError(CameraSession session, String error) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onCameraError from another session: " + error);
return;
}
eventsHandler.onCameraError(error);
stopCapture();
}
}
@Override
public void onCameraDisconnected(CameraSession session) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onCameraDisconnected from another session.");
return;
}
eventsHandler.onCameraDisconnected();
stopCapture();
}
}
@Override
public void onCameraClosed(CameraSession session) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession && currentSession != null) {
Logging.d(TAG, "onCameraClosed from another session.");
return;
}
eventsHandler.onCameraClosed();
}
}
@Override
public void onFrameCaptured(CameraSession session, VideoFrame frame) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onTextureFrameCaptured from another session.");
return;
}
if (!firstFrameObserved) {
eventsHandler.onFirstFrameAvailable();
firstFrameObserved = true;
}
cameraStatistics.addFrame();
capturerObserver.onFrameCaptured(frame);
}
}
@Override
public void onByteBufferFrameCaptured(
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onByteBufferFrameCaptured from another session.");
return;
}
if (!firstFrameObserved) {
eventsHandler.onFirstFrameAvailable();
firstFrameObserved = true;
}
cameraStatistics.addFrame();
capturerObserver.onByteBufferFrameCaptured(data, width, height, rotation, timestamp);
}
}
@Override
public void onTextureFrameCaptured(CameraSession session, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onTextureFrameCaptured from another session.");
surfaceHelper.returnTextureFrame();
return;
}
if (!firstFrameObserved) {
eventsHandler.onFirstFrameAvailable();
firstFrameObserved = true;
}
cameraStatistics.addFrame();
capturerObserver.onTextureFrameCaptured(
width, height, oesTextureId, transformMatrix, rotation, timestamp);
}
}
};
private final Runnable openCameraTimeoutRunnable = new Runnable() {
@Override
public void run() {
eventsHandler.onCameraError("Camera failed to start within timeout.");
}
};
// Initialized on initialize
// -------------------------
private Handler cameraThreadHandler;
private Context applicationContext;
private CapturerObserver capturerObserver;
private SurfaceTextureHelper surfaceHelper;
private final Object stateLock = new Object();
private boolean sessionOpening; /* guarded by stateLock */
private CameraSession currentSession; /* guarded by stateLock */
private String cameraName; /* guarded by stateLock */
private int width; /* guarded by stateLock */
private int height; /* guarded by stateLock */
private int framerate; /* guarded by stateLock */
private int openAttemptsRemaining; /* guarded by stateLock */
private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
// Valid from onDone call until stopCapture, otherwise null.
private CameraStatistics cameraStatistics; /* guarded by stateLock */
private boolean firstFrameObserved; /* guarded by stateLock */
// Variables used on camera thread - do not require stateLock synchronization.
private MediaRecorderState mediaRecorderState = MediaRecorderState.IDLE;
private MediaRecorderHandler mediaRecorderEventsHandler;
public CameraCapturer(
String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
if (eventsHandler == null) {
eventsHandler = new CameraEventsHandler() {
@Override
public void onCameraError(String errorDescription) {}
@Override
public void onCameraDisconnected() {}
@Override
public void onCameraFreezed(String errorDescription) {}
@Override
public void onCameraOpening(String cameraName) {}
@Override
public void onFirstFrameAvailable() {}
@Override
public void onCameraClosed() {}
};
}
this.eventsHandler = eventsHandler;
this.cameraEnumerator = cameraEnumerator;
this.cameraName = cameraName;
uiThreadHandler = new Handler(Looper.getMainLooper());
final String[] deviceNames = cameraEnumerator.getDeviceNames();
if (deviceNames.length == 0) {
throw new RuntimeException("No cameras attached.");
}
if (!Arrays.asList(deviceNames).contains(this.cameraName)) {
throw new IllegalArgumentException(
"Camera name " + this.cameraName + " does not match any known camera device.");
}
}
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver) {
this.applicationContext = applicationContext;
this.capturerObserver = capturerObserver;
this.surfaceHelper = surfaceTextureHelper;
this.cameraThreadHandler =
surfaceTextureHelper == null ? null : surfaceTextureHelper.getHandler();
}
@Override
public void startCapture(int width, int height, int framerate) {
Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
if (applicationContext == null) {
throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
}
synchronized (stateLock) {
if (sessionOpening || currentSession != null) {
Logging.w(TAG, "Session already open");
return;
}
this.width = width;
this.height = height;
this.framerate = framerate;
sessionOpening = true;
openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
createSessionInternal(0, null /* mediaRecorder */);
}
}
private void createSessionInternal(int delayMs, final MediaRecorder mediaRecorder) {
uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
cameraThreadHandler.postDelayed(new Runnable() {
@Override
public void run() {
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
surfaceHelper, mediaRecorder, cameraName, width, height, framerate);
}
}, delayMs);
}
@Override
public void stopCapture() {
Logging.d(TAG, "Stop capture");
synchronized (stateLock) {
while (sessionOpening) {
Logging.d(TAG, "Stop capture: Waiting for session to open");
ThreadUtils.waitUninterruptibly(stateLock);
}
if (currentSession != null) {
Logging.d(TAG, "Stop capture: Nulling session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
capturerObserver.onCapturerStopped();
} else {
Logging.d(TAG, "Stop capture: No session open");
}
}
Logging.d(TAG, "Stop capture done");
}
@Override
public void changeCaptureFormat(int width, int height, int framerate) {
Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
synchronized (stateLock) {
stopCapture();
startCapture(width, height, framerate);
}
}
@Override
public void dispose() {
Logging.d(TAG, "dispose");
stopCapture();
}
@Override
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
Logging.d(TAG, "switchCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
switchCameraInternal(switchEventsHandler);
}
});
}
@Override
public void addMediaRecorderToCamera(
final MediaRecorder mediaRecorder, final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "addMediaRecorderToCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(mediaRecorder, mediaRecoderEventsHandler);
}
});
}
@Override
public void removeMediaRecorderFromCamera(final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "removeMediaRecorderFromCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(null /* mediaRecorder */, mediaRecoderEventsHandler);
}
});
}
@Override
public boolean isScreencast() {
return false;
}
public void printStackTrace() {
Thread cameraThread = null;
if (cameraThreadHandler != null) {
cameraThread = cameraThreadHandler.getLooper().getThread();
}
if (cameraThread != null) {
StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
if (cameraStackTrace.length > 0) {
Logging.d(TAG, "CameraCapturer stack trace:");
for (StackTraceElement traceElem : cameraStackTrace) {
Logging.d(TAG, traceElem.toString());
}
}
}
}
private void reportCameraSwitchError(String error, CameraSwitchHandler switchEventsHandler) {
Logging.e(TAG, error);
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(error);
}
}
private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) {
Logging.d(TAG, "switchCamera internal");
final String[] deviceNames = cameraEnumerator.getDeviceNames();
if (deviceNames.length < 2) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
}
return;
}
synchronized (stateLock) {
if (switchState != SwitchState.IDLE) {
reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
return;
}
if (mediaRecorderState != MediaRecorderState.IDLE) {
reportCameraSwitchError("switchCamera: media recording is active", switchEventsHandler);
return;
}
if (!sessionOpening && currentSession == null) {
reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
return;
}
this.switchEventsHandler = switchEventsHandler;
if (sessionOpening) {
switchState = SwitchState.PENDING;
return;
} else {
switchState = SwitchState.IN_PROGRESS;
}
Logging.d(TAG, "switchCamera: Stopping session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
int cameraNameIndex = Arrays.asList(deviceNames).indexOf(cameraName);
cameraName = deviceNames[(cameraNameIndex + 1) % deviceNames.length];
sessionOpening = true;
openAttemptsRemaining = 1;
createSessionInternal(0, null /* mediaRecorder */);
}
Logging.d(TAG, "switchCamera done");
}
private void reportUpdateMediaRecorderError(
String error, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
Logging.e(TAG, error);
if (mediaRecoderEventsHandler != null) {
mediaRecoderEventsHandler.onMediaRecorderError(error);
}
}
private void updateMediaRecorderInternal(
MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
boolean addMediaRecorder = (mediaRecorder != null);
Logging.d(TAG,
"updateMediaRecoderInternal internal. State: " + mediaRecorderState
+ ". Switch state: " + switchState + ". Add MediaRecorder: " + addMediaRecorder);
synchronized (stateLock) {
if ((addMediaRecorder && mediaRecorderState != MediaRecorderState.IDLE)
|| (!addMediaRecorder && mediaRecorderState != MediaRecorderState.ACTIVE)) {
reportUpdateMediaRecorderError(
"Incorrect state for MediaRecorder update.", mediaRecoderEventsHandler);
return;
}
if (switchState != SwitchState.IDLE) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is switching.", mediaRecoderEventsHandler);
return;
}
if (currentSession == null) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is closed.", mediaRecoderEventsHandler);
return;
}
if (sessionOpening) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is still opening.", mediaRecoderEventsHandler);
return;
}
this.mediaRecorderEventsHandler = mediaRecoderEventsHandler;
mediaRecorderState =
addMediaRecorder ? MediaRecorderState.IDLE_TO_ACTIVE : MediaRecorderState.ACTIVE_TO_IDLE;
Logging.d(TAG, "updateMediaRecoder: Stopping session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
sessionOpening = true;
openAttemptsRemaining = 1;
createSessionInternal(0, mediaRecorder);
}
Logging.d(TAG, "updateMediaRecoderInternal done");
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
Logging.e(TAG, "Check is on camera thread failed.");
throw new RuntimeException("Not on camera thread.");
}
}
protected String getCameraName() {
synchronized (stateLock) {
return cameraName;
}
}
abstract protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
MediaRecorder mediaRecoder, String cameraName, int width, int height, int framerate);
}

View File

@ -0,0 +1,42 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
interface CameraSession {
enum FailureType { ERROR, DISCONNECTED }
// Callbacks are fired on the camera thread.
public interface CreateSessionCallback {
void onDone(CameraSession session);
void onFailure(FailureType failureType, String error);
}
// Events are fired on the camera thread.
public interface Events {
void onCameraOpening();
void onCameraError(CameraSession session, String error);
void onCameraDisconnected(CameraSession session);
void onCameraClosed(CameraSession session);
void onFrameCaptured(CameraSession session, VideoFrame frame);
// The old way of passing frames. Will be removed eventually.
void onByteBufferFrameCaptured(
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp);
}
/**
* Stops the capture. Waits until no more calls to capture observer will be made.
* If waitCameraStop is true, also waits for the camera to stop.
*/
void stop();
}

View File

@ -0,0 +1,95 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the
* bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the
* target bitrate by unacceptable margins.
*/
class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
// Change the bitrate at most once every three seconds.
private static final double BITRATE_ADJUSTMENT_SEC = 3.0;
// Maximum bitrate adjustment scale - no more than 4 times.
private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4;
// Amount of adjustment steps to reach maximum scale.
private static final int BITRATE_ADJUSTMENT_STEPS = 20;
private static final double BITS_PER_BYTE = 8.0;
// How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
private double deviationBytes = 0;
private double timeSinceLastAdjustmentMs = 0;
private int bitrateAdjustmentScaleExp = 0;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
// Rescale the accumulator level if the accumulator max decreases
deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
}
super.setTargets(targetBitrateBps, targetFps);
}
@Override
public void reportEncodedFrame(int size) {
if (targetFps == 0) {
return;
}
// Accumulate the difference between actual and expected frame sizes.
double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFps;
deviationBytes += (size - expectedBytesPerFrame);
timeSinceLastAdjustmentMs += 1000.0 / targetFps;
// Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
// shortfall of the target.
double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE;
// Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for
// bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle.
double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes;
deviationBytes = Math.min(deviationBytes, deviationCap);
deviationBytes = Math.max(deviationBytes, -deviationCap);
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
// from the target value.
if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) {
return;
}
if (deviationBytes > deviationThresholdBytes) {
// Encoder generates too high bitrate - need to reduce the scale.
int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5);
bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
// Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS.
// This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS);
deviationBytes = deviationThresholdBytes;
} else if (deviationBytes < -deviationThresholdBytes) {
// Encoder generates too low bitrate - need to increase the scale.
int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5);
bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
// Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS.
// This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS);
deviationBytes = -deviationThresholdBytes;
}
timeSinceLastAdjustmentMs = 0;
}
@Override
public int getAdjustedBitrateBps() {
return (int) (targetBitrateBps
* Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE,
(double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS));
}
}

View File

@ -0,0 +1,313 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import android.view.SurfaceHolder;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
/**
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
class EglBase10 extends EglBase {
// This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private final EGL10 egl;
private EGLContext eglContext;
private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
// EGL wrapper for an actual EGLContext.
public static class Context extends EglBase.Context {
private final EGLContext eglContext;
public Context(EGLContext eglContext) {
this.eglContext = eglContext;
}
}
// Create a new context with the specified config type, sharing data with sharedContext.
public EglBase10(Context sharedContext, int[] configAttributes) {
this.egl = (EGL10) EGLContext.getEGL();
eglDisplay = getEglDisplay();
eglConfig = getEglConfig(eglDisplay, configAttributes);
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
}
@Override
public void createSurface(Surface surface) {
/**
* We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
* couldn't actually take a Surface object until API 17. Older versions fortunately just call
* SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
*/
class FakeSurfaceHolder implements SurfaceHolder {
private final Surface surface;
FakeSurfaceHolder(Surface surface) {
this.surface = surface;
}
@Override
public void addCallback(Callback callback) {}
@Override
public void removeCallback(Callback callback) {}
@Override
public boolean isCreating() {
return false;
}
@Deprecated
@Override
public void setType(int i) {}
@Override
public void setFixedSize(int i, int i2) {}
@Override
public void setSizeFromLayout() {}
@Override
public void setFormat(int i) {}
@Override
public void setKeepScreenOn(boolean b) {}
@Override
public Canvas lockCanvas() {
return null;
}
@Override
public Canvas lockCanvas(Rect rect) {
return null;
}
@Override
public void unlockCanvasAndPost(Canvas canvas) {}
@Override
public Rect getSurfaceFrame() {
return null;
}
@Override
public Surface getSurface() {
return surface;
}
}
createSurfaceInternal(new FakeSurfaceHolder(surface));
}
// Create EGLSurface from the Android SurfaceTexture.
@Override
public void createSurface(SurfaceTexture surfaceTexture) {
createSurfaceInternal(surfaceTexture);
}
// Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
private void createSurfaceInternal(Object nativeWindow) {
if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
}
checkIsNotReleased();
if (eglSurface != EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL10.EGL_NONE};
eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException(
"Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError()));
}
}
// Create dummy 1x1 pixel buffer surface so the context can be made current.
@Override
public void createDummyPbufferSurface() {
createPbufferSurface(1, 1);
}
@Override
public void createPbufferSurface(int width, int height) {
checkIsNotReleased();
if (eglSurface != EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
+ height + ": 0x" + Integer.toHexString(egl.eglGetError()));
}
}
@Override
public org.webrtc.EglBase.Context getEglBaseContext() {
return new EglBase10.Context(eglContext);
}
@Override
public boolean hasSurface() {
return eglSurface != EGL10.EGL_NO_SURFACE;
}
@Override
public int surfaceWidth() {
final int widthArray[] = new int[1];
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
return widthArray[0];
}
@Override
public int surfaceHeight() {
final int heightArray[] = new int[1];
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
return heightArray[0];
}
@Override
public void releaseSurface() {
if (eglSurface != EGL10.EGL_NO_SURFACE) {
egl.eglDestroySurface(eglDisplay, eglSurface);
eglSurface = EGL10.EGL_NO_SURFACE;
}
}
private void checkIsNotReleased() {
if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
|| eglConfig == null) {
throw new RuntimeException("This object has been released");
}
}
@Override
public void release() {
checkIsNotReleased();
releaseSurface();
detachCurrent();
egl.eglDestroyContext(eglDisplay, eglContext);
egl.eglTerminate(eglDisplay);
eglContext = EGL10.EGL_NO_CONTEXT;
eglDisplay = EGL10.EGL_NO_DISPLAY;
eglConfig = null;
}
@Override
public void makeCurrent() {
checkIsNotReleased();
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't make current");
}
synchronized (EglBase.lock) {
if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException(
"eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
}
}
// Detach the current EGL context, so that it can be made current on another thread.
@Override
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!egl.eglMakeCurrent(
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
throw new RuntimeException(
"eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
}
}
@Override
public void swapBuffers() {
checkIsNotReleased();
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
egl.eglSwapBuffers(eglDisplay, eglSurface);
}
}
@Override
public void swapBuffers(long timeStampNs) {
// Setting presentation time is not supported for EGL 1.0.
swapBuffers();
}
// Return an EGLDisplay, or die trying.
private EGLDisplay getEglDisplay() {
EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException(
"Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError()));
}
int[] version = new int[2];
if (!egl.eglInitialize(eglDisplay, version)) {
throw new RuntimeException(
"Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError()));
}
return eglDisplay;
}
// Return an EGLConfig, or die trying.
private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
throw new RuntimeException(
"eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
if (numConfigs[0] <= 0) {
throw new RuntimeException("Unable to find any matching EGL config");
}
final EGLConfig eglConfig = configs[0];
if (eglConfig == null) {
throw new RuntimeException("eglChooseConfig returned null");
}
return eglConfig;
}
// Return an EGLConfig, or die trying.
private EGLContext createEglContext(
Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}
int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
EGLContext rootContext =
sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
final EGLContext eglContext;
synchronized (EglBase.lock) {
eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
}
if (eglContext == EGL10.EGL_NO_CONTEXT) {
throw new RuntimeException(
"Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError()));
}
return eglContext;
}
}

View File

@ -0,0 +1,266 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.view.Surface;
/**
* Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
@TargetApi(18)
class EglBase14 extends EglBase {
private static final String TAG = "EglBase14";
private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
private EGLContext eglContext;
private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
// time stamp on a surface is supported from 18 so we require 18.
public static boolean isEGL14Supported() {
Logging.d(TAG,
"SDK version: " + CURRENT_SDK_VERSION
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
}
public static class Context extends EglBase.Context {
private final android.opengl.EGLContext egl14Context;
public Context(android.opengl.EGLContext eglContext) {
this.egl14Context = eglContext;
}
}
// Create a new context with the specified config type, sharing data with sharedContext.
// |sharedContext| may be null.
public EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
eglDisplay = getEglDisplay();
eglConfig = getEglConfig(eglDisplay, configAttributes);
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
}
// Create EGLSurface from the Android Surface.
@Override
public void createSurface(Surface surface) {
createSurfaceInternal(surface);
}
// Create EGLSurface from the Android SurfaceTexture.
@Override
public void createSurface(SurfaceTexture surfaceTexture) {
createSurfaceInternal(surfaceTexture);
}
// Create EGLSurface from either Surface or SurfaceTexture.
private void createSurfaceInternal(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
}
checkIsNotReleased();
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_NONE};
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException(
"Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
@Override
public void createDummyPbufferSurface() {
createPbufferSurface(1, 1);
}
@Override
public void createPbufferSurface(int width, int height) {
checkIsNotReleased();
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
+ height + ": 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
@Override
public Context getEglBaseContext() {
return new EglBase14.Context(eglContext);
}
@Override
public boolean hasSurface() {
return eglSurface != EGL14.EGL_NO_SURFACE;
}
@Override
public int surfaceWidth() {
final int widthArray[] = new int[1];
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
return widthArray[0];
}
@Override
public int surfaceHeight() {
final int heightArray[] = new int[1];
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
return heightArray[0];
}
@Override
public void releaseSurface() {
if (eglSurface != EGL14.EGL_NO_SURFACE) {
EGL14.eglDestroySurface(eglDisplay, eglSurface);
eglSurface = EGL14.EGL_NO_SURFACE;
}
}
private void checkIsNotReleased() {
if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
|| eglConfig == null) {
throw new RuntimeException("This object has been released");
}
}
@Override
public void release() {
checkIsNotReleased();
releaseSurface();
detachCurrent();
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(eglDisplay);
eglContext = EGL14.EGL_NO_CONTEXT;
eglDisplay = EGL14.EGL_NO_DISPLAY;
eglConfig = null;
}
@Override
public void makeCurrent() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't make current");
}
synchronized (EglBase.lock) {
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException(
"eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
}
// Detach the current EGL context, so that it can be made current on another thread.
@Override
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!EGL14.eglMakeCurrent(
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
throw new RuntimeException(
"eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
}
@Override
public void swapBuffers() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
}
@Override
public void swapBuffers(long timeStampNs) {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
// See
// https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
}
// Return an EGLDisplay, or die trying.
private static EGLDisplay getEglDisplay() {
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException(
"Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
int[] version = new int[2];
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
throw new RuntimeException(
"Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
return eglDisplay;
}
// Return an EGLConfig, or die trying.
private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
throw new RuntimeException(
"eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
if (numConfigs[0] <= 0) {
throw new RuntimeException("Unable to find any matching EGL config");
}
final EGLConfig eglConfig = configs[0];
if (eglConfig == null) {
throw new RuntimeException("eglChooseConfig returned null");
}
return eglConfig;
}
// Return an EGLConfig, or die trying.
private static EGLContext createEglContext(
EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}
int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
EGLContext rootContext =
sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
final EGLContext eglContext;
synchronized (EglBase.lock) {
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
}
if (eglContext == EGL14.EGL_NO_CONTEXT) {
throw new RuntimeException(
"Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
return eglContext;
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with
* hardware codecs that assume the framerate never changes.
*/
class FramerateBitrateAdjuster extends BaseBitrateAdjuster {
private static final int INITIAL_FPS = 30;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
if (this.targetFps == 0) {
// Framerate-based bitrate adjustment always initializes to the same framerate.
targetFps = INITIAL_FPS;
}
super.setTargets(targetBitrateBps, targetFps);
this.targetBitrateBps *= INITIAL_FPS / this.targetFps;
}
}

View File

@ -0,0 +1,700 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaFormat;
import android.os.SystemClock;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.webrtc.ThreadUtils.ThreadChecker;
/** Android hardware video decoder. */
@TargetApi(16)
@SuppressWarnings("deprecation") // Cannot support API 16 without using deprecated methods.
class HardwareVideoDecoder
implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final String TAG = "HardwareVideoDecoder";
// TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
private static final String MEDIA_FORMAT_KEY_STRIDE = "stride";
private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height";
private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left";
private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right";
private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top";
private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
// MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after
// this timeout.
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
// WebRTC queues input frames quickly in the beginning on the call. Wait for input buffers with a
// long timeout (500 ms) to prevent this from causing the codec to return an error.
private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
// Dequeuing an output buffer will block until a buffer is available (up to 100 milliseconds).
// If this timeout is exceeded, the output thread will unblock and check if the decoder is still
// running. If it is, it will block on dequeue again. Otherwise, it will stop and release the
// MediaCodec.
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
private final String codecName;
private final VideoCodecType codecType;
private static class FrameInfo {
final long decodeStartTimeMs;
final int rotation;
FrameInfo(long decodeStartTimeMs, int rotation) {
this.decodeStartTimeMs = decodeStartTimeMs;
this.rotation = rotation;
}
}
private final BlockingDeque<FrameInfo> frameInfos;
private int colorFormat;
// Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats
// those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder
// thread and is immutable while the codec is running.
private Thread outputThread;
// Checker that ensures work is run on the output thread.
private ThreadChecker outputThreadChecker;
// Checker that ensures work is run on the decoder thread. The decoder thread is owned by the
// caller and must be used to call initDecode, decode, and release.
private ThreadChecker decoderThreadChecker;
private volatile boolean running = false;
private volatile Exception shutdownException = null;
// Prevents the decoder from being released before all output buffers have been released.
private final Object activeOutputBuffersLock = new Object();
private int activeOutputBuffers = 0; // Guarded by activeOutputBuffersLock
// Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
// or the output thread. Accesses should be protected with this lock.
private final Object dimensionLock = new Object();
private int width;
private int height;
private int stride;
private int sliceHeight;
// Whether the decoder has finished the first frame. The codec may not change output dimensions
// after delivering the first frame. Only accessed on the output thread while the decoder is
// running.
private boolean hasDecodedFirstFrame;
// Whether the decoder has seen a key frame. The first frame must be a key frame. Only accessed
// on the decoder thread.
private boolean keyFrameRequired;
private final EglBase.Context sharedContext;
// Valid and immutable while the decoder is running.
private SurfaceTextureHelper surfaceTextureHelper;
private Surface surface = null;
private static class DecodedTextureMetadata {
final int width;
final int height;
final int rotation;
final long presentationTimestampUs;
final Integer decodeTimeMs;
DecodedTextureMetadata(
int width, int height, int rotation, long presentationTimestampUs, Integer decodeTimeMs) {
this.width = width;
this.height = height;
this.rotation = rotation;
this.presentationTimestampUs = presentationTimestampUs;
this.decodeTimeMs = decodeTimeMs;
}
}
// Metadata for the last frame rendered to the texture. Only accessed on the texture helper's
// thread.
private DecodedTextureMetadata renderedTextureMetadata;
// Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid
// and immutable while the decoder is running.
private Callback callback;
// Valid and immutable while the decoder is running.
private MediaCodec codec = null;
HardwareVideoDecoder(
String codecName, VideoCodecType codecType, int colorFormat, EglBase.Context sharedContext) {
if (!isSupportedColorFormat(colorFormat)) {
throw new IllegalArgumentException("Unsupported color format: " + colorFormat);
}
this.codecName = codecName;
this.codecType = codecType;
this.colorFormat = colorFormat;
this.sharedContext = sharedContext;
this.frameInfos = new LinkedBlockingDeque<>();
}
@Override
public VideoCodecStatus initDecode(Settings settings, Callback callback) {
this.decoderThreadChecker = new ThreadChecker();
this.callback = callback;
if (sharedContext != null) {
surfaceTextureHelper = SurfaceTextureHelper.create("decoder-texture-thread", sharedContext);
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
surfaceTextureHelper.startListening(this);
}
return initDecodeInternal(settings.width, settings.height);
}
// Internal variant is used when restarting the codec due to reconfiguration.
private VideoCodecStatus initDecodeInternal(int width, int height) {
decoderThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "initDecodeInternal");
if (outputThread != null) {
Logging.e(TAG, "initDecodeInternal called while the codec is already running");
return VideoCodecStatus.ERROR;
}
// Note: it is not necessary to initialize dimensions under the lock, since the output thread
// is not running.
this.width = width;
this.height = height;
stride = width;
sliceHeight = height;
hasDecodedFirstFrame = false;
keyFrameRequired = true;
try {
codec = MediaCodec.createByCodecName(codecName);
} catch (IOException | IllegalArgumentException e) {
Logging.e(TAG, "Cannot create media decoder " + codecName);
return VideoCodecStatus.ERROR;
}
try {
MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
if (sharedContext == null) {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
}
codec.configure(format, surface, null, 0);
codec.start();
} catch (IllegalStateException e) {
Logging.e(TAG, "initDecode failed", e);
release();
return VideoCodecStatus.ERROR;
}
running = true;
outputThread = createOutputThread();
outputThread.start();
Logging.d(TAG, "initDecodeInternal done");
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) {
decoderThreadChecker.checkIsOnValidThread();
if (codec == null || callback == null) {
Logging.d(TAG, "decode uninitalized, codec: " + codec + ", callback: " + callback);
return VideoCodecStatus.UNINITIALIZED;
}
if (frame.buffer == null) {
Logging.e(TAG, "decode() - no input data");
return VideoCodecStatus.ERR_PARAMETER;
}
int size = frame.buffer.remaining();
if (size == 0) {
Logging.e(TAG, "decode() - input buffer empty");
return VideoCodecStatus.ERR_PARAMETER;
}
// Load dimensions from shared memory under the dimension lock.
int width, height;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
}
// Check if the resolution changed and reset the codec if necessary.
if (frame.encodedWidth * frame.encodedHeight > 0
&& (frame.encodedWidth != width || frame.encodedHeight != height)) {
VideoCodecStatus status = reinitDecode(frame.encodedWidth, frame.encodedHeight);
if (status != VideoCodecStatus.OK) {
return status;
}
}
if (keyFrameRequired) {
// Need to process a key frame first.
if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
Logging.e(TAG, "decode() - key frame required first");
return VideoCodecStatus.ERROR;
}
if (!frame.completeFrame) {
Logging.e(TAG, "decode() - complete frame required first");
return VideoCodecStatus.ERROR;
}
}
int index;
try {
index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueInputBuffer failed", e);
return VideoCodecStatus.ERROR;
}
if (index < 0) {
// Decoder is falling behind. No input buffers available.
// The decoder can't simply drop frames; it might lose a key frame.
Logging.e(TAG, "decode() - no HW buffers available; decoder falling behind");
return VideoCodecStatus.ERROR;
}
ByteBuffer buffer;
try {
buffer = codec.getInputBuffers()[index];
} catch (IllegalStateException e) {
Logging.e(TAG, "getInputBuffers failed", e);
return VideoCodecStatus.ERROR;
}
if (buffer.capacity() < size) {
Logging.e(TAG, "decode() - HW buffer too small");
return VideoCodecStatus.ERROR;
}
buffer.put(frame.buffer);
frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation));
try {
codec.queueInputBuffer(index, 0 /* offset */, size,
TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */);
} catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e);
frameInfos.pollLast();
return VideoCodecStatus.ERROR;
}
if (keyFrameRequired) {
keyFrameRequired = false;
}
return VideoCodecStatus.OK;
}
@Override
public boolean getPrefersLateDecoding() {
return true;
}
@Override
public String getImplementationName() {
return "HardwareVideoDecoder: " + codecName;
}
@Override
public VideoCodecStatus release() {
// TODO(sakal): This is not called on the correct thread but is still called synchronously.
// Re-enable the check once this is called on the correct thread.
// decoderThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "release");
VideoCodecStatus status = releaseInternal();
if (surface != null) {
surface.release();
surface = null;
surfaceTextureHelper.stopListening();
surfaceTextureHelper.dispose();
surfaceTextureHelper = null;
}
callback = null;
frameInfos.clear();
return status;
}
// Internal variant is used when restarting the codec due to reconfiguration.
private VideoCodecStatus releaseInternal() {
if (!running) {
Logging.d(TAG, "release: Decoder is not running.");
return VideoCodecStatus.OK;
}
try {
// The outputThread actually stops and releases the codec once running is false.
running = false;
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
// Log an exception to capture the stack trace and turn it into a TIMEOUT error.
Logging.e(TAG, "Media decoder release timeout", new RuntimeException());
return VideoCodecStatus.TIMEOUT;
}
if (shutdownException != null) {
// Log the exception and turn it into an error. Wrap the exception in a new exception to
// capture both the output thread's stack trace and this thread's stack trace.
Logging.e(TAG, "Media decoder release error", new RuntimeException(shutdownException));
shutdownException = null;
return VideoCodecStatus.ERROR;
}
} finally {
codec = null;
outputThread = null;
}
return VideoCodecStatus.OK;
}
private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
decoderThreadChecker.checkIsOnValidThread();
VideoCodecStatus status = releaseInternal();
if (status != VideoCodecStatus.OK) {
return status;
}
return initDecodeInternal(newWidth, newHeight);
}
private Thread createOutputThread() {
return new Thread("HardwareVideoDecoder.outputThread") {
@Override
public void run() {
outputThreadChecker = new ThreadChecker();
while (running) {
deliverDecodedFrame();
}
releaseCodecOnOutputThread();
}
};
}
private void deliverDecodedFrame() {
outputThreadChecker.checkIsOnValidThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
// Block until an output buffer is available (up to 100 milliseconds). If the timeout is
// exceeded, deliverDecodedFrame() will be called again on the next iteration of the output
// thread's loop. Blocking here prevents the output thread from busy-waiting while the codec
// is idle.
int result = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
reformat(codec.getOutputFormat());
return;
}
if (result < 0) {
Logging.v(TAG, "dequeueOutputBuffer returned " + result);
return;
}
FrameInfo frameInfo = frameInfos.poll();
Integer decodeTimeMs = null;
int rotation = 0;
if (frameInfo != null) {
decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs);
rotation = frameInfo.rotation;
}
hasDecodedFirstFrame = true;
if (surfaceTextureHelper != null) {
deliverTextureFrame(result, info, rotation, decodeTimeMs);
} else {
deliverByteFrame(result, info, rotation, decodeTimeMs);
}
} catch (IllegalStateException e) {
Logging.e(TAG, "deliverDecodedFrame failed", e);
}
}
private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
final int rotation, final Integer decodeTimeMs) {
// Load dimensions from shared memory under the dimension lock.
final int width, height;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
}
surfaceTextureHelper.getHandler().post(new Runnable() {
@Override
public void run() {
renderedTextureMetadata = new DecodedTextureMetadata(
width, height, rotation, info.presentationTimeUs, decodeTimeMs);
codec.releaseOutputBuffer(index, true);
}
});
}
@Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffer(
renderedTextureMetadata.width, renderedTextureMetadata.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
VideoFrame frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotation,
renderedTextureMetadata.presentationTimestampUs * 1000);
callback.onDecodedFrame(frame, renderedTextureMetadata.decodeTimeMs, null /* qp */);
frame.release();
}
private void deliverByteFrame(
int result, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) {
// Load dimensions from shared memory under the dimension lock.
int width, height, stride, sliceHeight;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
stride = this.stride;
sliceHeight = this.sliceHeight;
}
// Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
// bytes for each of the U and V channels.
if (info.size < width * height * 3 / 2) {
Logging.e(TAG, "Insufficient output buffer size: " + info.size);
return;
}
if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
// Some codecs (Exynos) report an incorrect stride. Correct it here.
// Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
// 2 * size / (3 * height).
stride = info.size * 2 / (height * 3);
}
ByteBuffer buffer = codec.getOutputBuffers()[result];
buffer.position(info.offset);
buffer.limit(info.offset + info.size);
buffer = buffer.slice();
final VideoFrame.Buffer frameBuffer;
if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
if (sliceHeight % 2 == 0) {
frameBuffer = wrapI420Buffer(buffer, result, stride, sliceHeight, width, height);
} else {
// WebRTC rounds chroma plane size conversions up so we have to repeat the last row.
frameBuffer = copyI420Buffer(buffer, result, stride, sliceHeight, width, height);
}
} else {
// All other supported color formats are NV12.
frameBuffer = wrapNV12Buffer(buffer, result, stride, sliceHeight, width, height);
}
long presentationTimeNs = info.presentationTimeUs * 1000;
VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);
// Note that qp is parsed on the C++ side.
callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
frame.release();
}
private VideoFrame.Buffer wrapNV12Buffer(ByteBuffer buffer, int outputBufferIndex, int stride,
int sliceHeight, int width, int height) {
synchronized (activeOutputBuffersLock) {
activeOutputBuffers++;
}
return new NV12Buffer(width, height, stride, sliceHeight, buffer, () -> {
codec.releaseOutputBuffer(outputBufferIndex, false);
synchronized (activeOutputBuffersLock) {
activeOutputBuffers--;
activeOutputBuffersLock.notifyAll();
}
});
}
private VideoFrame.Buffer copyI420Buffer(ByteBuffer buffer, int outputBufferIndex, int stride,
int sliceHeight, int width, int height) {
final int uvStride = stride / 2;
final int yPos = 0;
final int uPos = yPos + stride * sliceHeight;
final int uEnd = uPos + uvStride * (sliceHeight / 2);
final int vPos = uPos + uvStride * sliceHeight / 2;
final int vEnd = vPos + uvStride * (sliceHeight / 2);
VideoFrame.I420Buffer frameBuffer = I420BufferImpl.allocate(width, height);
ByteBuffer dataY = frameBuffer.getDataY();
dataY.position(0); // Ensure we are in the beginning.
buffer.position(yPos);
buffer.limit(uPos);
dataY.put(buffer);
dataY.position(0); // Go back to beginning.
ByteBuffer dataU = frameBuffer.getDataU();
dataU.position(0); // Ensure we are in the beginning.
buffer.position(uPos);
buffer.limit(uEnd);
dataU.put(buffer);
if (sliceHeight % 2 != 0) {
buffer.position(uEnd - uvStride); // Repeat the last row.
dataU.put(buffer);
}
dataU.position(0); // Go back to beginning.
ByteBuffer dataV = frameBuffer.getDataU();
dataV.position(0); // Ensure we are in the beginning.
buffer.position(vPos);
buffer.limit(vEnd);
dataV.put(buffer);
if (sliceHeight % 2 != 0) {
buffer.position(vEnd - uvStride); // Repeat the last row.
dataV.put(buffer);
}
dataV.position(0); // Go back to beginning.
codec.releaseOutputBuffer(outputBufferIndex, false);
return frameBuffer;
}
private VideoFrame.Buffer wrapI420Buffer(ByteBuffer buffer, int outputBufferIndex, int stride,
int sliceHeight, int width, int height) {
final int uvStride = stride / 2;
final int yPos = 0;
final int uPos = yPos + stride * sliceHeight;
final int uEnd = uPos + uvStride * (sliceHeight / 2);
final int vPos = uPos + uvStride * sliceHeight / 2;
final int vEnd = vPos + uvStride * (sliceHeight / 2);
synchronized (activeOutputBuffersLock) {
activeOutputBuffers++;
}
Runnable releaseCallback = () -> {
codec.releaseOutputBuffer(outputBufferIndex, false);
synchronized (activeOutputBuffersLock) {
activeOutputBuffers--;
activeOutputBuffersLock.notifyAll();
}
};
buffer.position(yPos);
buffer.limit(uPos);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(uEnd);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vEnd);
ByteBuffer dataV = buffer.slice();
return new I420BufferImpl(
width, height, dataY, stride, dataU, uvStride, dataV, uvStride, releaseCallback);
}
private void reformat(MediaFormat format) {
outputThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "Decoder format changed: " + format.toString());
final int newWidth;
final int newHeight;
if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT)
&& format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT)
&& format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM)
&& format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) {
newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT)
- format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT);
newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM)
- format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP);
} else {
newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
}
// Compare to existing width, height, and save values under the dimension lock.
synchronized (dimensionLock) {
if (hasDecodedFirstFrame && (width != newWidth || height != newHeight)) {
stopOnOutputThread(new RuntimeException("Unexpected size change. Configured " + width + "*"
+ height + ". New " + newWidth + "*" + newHeight));
return;
}
width = newWidth;
height = newHeight;
}
// Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
// color format updates.
if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
if (!isSupportedColorFormat(colorFormat)) {
stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
return;
}
}
// Save stride and sliceHeight under the dimension lock.
synchronized (dimensionLock) {
if (format.containsKey(MEDIA_FORMAT_KEY_STRIDE)) {
stride = format.getInteger(MEDIA_FORMAT_KEY_STRIDE);
}
if (format.containsKey(MEDIA_FORMAT_KEY_SLICE_HEIGHT)) {
sliceHeight = format.getInteger(MEDIA_FORMAT_KEY_SLICE_HEIGHT);
}
Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
}
}
private void releaseCodecOnOutputThread() {
outputThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "Releasing MediaCodec on output thread");
waitOutputBuffersReleasedOnOutputThread();
try {
codec.stop();
} catch (Exception e) {
Logging.e(TAG, "Media decoder stop failed", e);
}
try {
codec.release();
} catch (Exception e) {
Logging.e(TAG, "Media decoder release failed", e);
// Propagate exceptions caught during release back to the main thread.
shutdownException = e;
}
Logging.d(TAG, "Release on output thread done");
}
private void waitOutputBuffersReleasedOnOutputThread() {
outputThreadChecker.checkIsOnValidThread();
synchronized (activeOutputBuffersLock) {
while (activeOutputBuffers > 0) {
Logging.d(TAG, "Waiting for all frames to be released.");
try {
activeOutputBuffersLock.wait();
} catch (InterruptedException e) {
Logging.e(TAG, "Interrupted while waiting for output buffers to be released.", e);
return;
}
}
}
}
private void stopOnOutputThread(Exception e) {
outputThreadChecker.checkIsOnValidThread();
running = false;
shutdownException = e;
}
private boolean isSupportedColorFormat(int colorFormat) {
for (int supported : MediaCodecUtils.DECODER_COLOR_FORMATS) {
if (supported == colorFormat) {
return true;
}
}
return false;
}
}

View File

@ -0,0 +1,581 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.graphics.Matrix;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.opengl.GLES20;
import android.os.Bundle;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Deque;
import java.util.Map;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
/** Android hardware video encoder. */
@TargetApi(19)
@SuppressWarnings("deprecation") // Cannot support API level 19 without using deprecated methods.
class HardwareVideoEncoder implements VideoEncoder {
private static final String TAG = "HardwareVideoEncoder";
// Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
// in OMX_Video.h
private static final int VIDEO_ControlRateConstant = 2;
// Key associated with the bitrate control mode value (above). Not present as a MediaFormat
// constant until API level 21.
private static final String KEY_BITRATE_MODE = "bitrate-mode";
private static final int VIDEO_AVC_PROFILE_HIGH = 8;
private static final int VIDEO_AVC_LEVEL_3 = 0x100;
private static final int MAX_VIDEO_FRAMERATE = 30;
// See MAX_ENCODER_Q_SIZE in androidmediaencoder_jni.cc.
private static final int MAX_ENCODER_Q_SIZE = 2;
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
private final String codecName;
private final VideoCodecType codecType;
private final int colorFormat;
private final Map<String, String> params;
private final ColorFormat inputColorFormat;
// Base interval for generating key frames.
private final int keyFrameIntervalSec;
// Interval at which to force a key frame. Used to reduce color distortions caused by some
// Qualcomm video encoders.
private final long forcedKeyFrameNs;
// Presentation timestamp of the last requested (or forced) key frame.
private long lastKeyFrameNs;
private final BitrateAdjuster bitrateAdjuster;
private int adjustedBitrate;
// A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
// pre-populated with all the information that can't be sent through MediaCodec.
private final Deque<EncodedImage.Builder> outputBuilders;
// Thread that delivers encoded frames to the user callback.
private Thread outputThread;
// Whether the encoder is running. Volatile so that the output thread can watch this value and
// exit when the encoder stops.
private volatile boolean running = false;
// Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
// value to send exceptions thrown during release back to the encoder thread.
private volatile Exception shutdownException = null;
// Surface objects for texture-mode encoding.
// EGL context shared with the application. Used to access texture inputs.
private EglBase14.Context textureContext;
// EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
// input surface. Making this base current allows textures from the context to be drawn onto the
// surface.
private EglBase14 textureEglBase;
// Input surface for the codec. The encoder will draw input textures onto this surface.
private Surface textureInputSurface;
// Drawer used to draw input textures onto the codec's input surface.
private GlRectDrawer textureDrawer;
private MediaCodec codec;
private Callback callback;
private boolean automaticResizeOn;
private int width;
private int height;
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
private ByteBuffer configBuffer = null;
/**
* Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
* intervals, and bitrateAdjuster.
*
* @param codecName the hardware codec implementation to use
* @param codecType the type of the given video codec (eg. VP8, VP9, or H264)
* @param colorFormat color format used by the input buffer
* @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
* @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
* used to reduce distortion caused by some codec implementations
* @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
* desired bitrates
* @throws IllegalArgumentException if colorFormat is unsupported
*/
public HardwareVideoEncoder(String codecName, VideoCodecType codecType, int colorFormat,
Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
BitrateAdjuster bitrateAdjuster, EglBase14.Context textureContext) {
this.codecName = codecName;
this.codecType = codecType;
this.colorFormat = colorFormat;
this.params = params;
if (textureContext == null) {
this.inputColorFormat = ColorFormat.valueOf(colorFormat);
} else {
// ColorFormat copies bytes between buffers. It is not used in texture mode.
this.inputColorFormat = null;
}
this.keyFrameIntervalSec = keyFrameIntervalSec;
this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
this.bitrateAdjuster = bitrateAdjuster;
this.outputBuilders = new LinkedBlockingDeque<>();
this.textureContext = textureContext;
}
@Override
public VideoCodecStatus initEncode(Settings settings, Callback callback) {
automaticResizeOn = settings.automaticResizeOn;
return initEncodeInternal(
settings.width, settings.height, settings.startBitrate, settings.maxFramerate, callback);
}
private VideoCodecStatus initEncodeInternal(
int width, int height, int bitrateKbps, int fps, Callback callback) {
Logging.d(
TAG, "initEncode: " + width + " x " + height + ". @ " + bitrateKbps + "kbps. Fps: " + fps);
this.width = width;
this.height = height;
if (bitrateKbps != 0 && fps != 0) {
bitrateAdjuster.setTargets(bitrateKbps * 1000, fps);
}
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
this.callback = callback;
lastKeyFrameNs = -1;
try {
codec = MediaCodec.createByCodecName(codecName);
} catch (IOException | IllegalArgumentException e) {
Logging.e(TAG, "Cannot create media encoder " + codecName);
return VideoCodecStatus.ERROR;
}
try {
MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getAdjustedFramerate());
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
if (codecType == VideoCodecType.H264) {
String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID);
if (profileLevelId == null) {
profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1;
}
switch (profileLevelId) {
case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1:
format.setInteger("profile", VIDEO_AVC_PROFILE_HIGH);
format.setInteger("level", VIDEO_AVC_LEVEL_3);
break;
case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1:
break;
default:
Logging.w(TAG, "Unknown profile level id: " + profileLevelId);
}
}
Logging.d(TAG, "Format: " + format);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (textureContext != null) {
// Texture mode.
textureEglBase = new EglBase14(textureContext, EglBase.CONFIG_RECORDABLE);
textureInputSurface = codec.createInputSurface();
textureEglBase.createSurface(textureInputSurface);
textureDrawer = new GlRectDrawer();
}
codec.start();
} catch (IllegalStateException e) {
Logging.e(TAG, "initEncode failed", e);
release();
return VideoCodecStatus.ERROR;
}
running = true;
outputThread = createOutputThread();
outputThread.start();
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus release() {
try {
if (outputThread == null) {
return VideoCodecStatus.OK;
}
// The outputThread actually stops and releases the codec once running is false.
running = false;
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media encoder release timeout");
return VideoCodecStatus.TIMEOUT;
}
if (shutdownException != null) {
// Log the exception and turn it into an error.
Logging.e(TAG, "Media encoder release exception", shutdownException);
return VideoCodecStatus.ERROR;
}
} finally {
codec = null;
outputThread = null;
outputBuilders.clear();
if (textureDrawer != null) {
textureDrawer.release();
textureDrawer = null;
}
if (textureEglBase != null) {
textureEglBase.release();
textureEglBase = null;
}
if (textureInputSurface != null) {
textureInputSurface.release();
textureInputSurface = null;
}
}
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
if (codec == null) {
return VideoCodecStatus.UNINITIALIZED;
}
// If input resolution changed, restart the codec with the new resolution.
int frameWidth = videoFrame.getBuffer().getWidth();
int frameHeight = videoFrame.getBuffer().getHeight();
if (frameWidth != width || frameHeight != height) {
VideoCodecStatus status = resetCodec(frameWidth, frameHeight);
if (status != VideoCodecStatus.OK) {
return status;
}
}
if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
// Too many frames in the encoder. Drop this frame.
Logging.e(TAG, "Dropped frame, encoder queue full");
return VideoCodecStatus.OK; // See webrtc bug 2887.
}
boolean requestedKeyFrame = false;
for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
if (frameType == EncodedImage.FrameType.VideoFrameKey) {
requestedKeyFrame = true;
}
}
if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
requestKeyFrame(videoFrame.getTimestampNs());
}
VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
// Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
// subsampled at one byte per four pixels.
int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
EncodedImage.Builder builder = EncodedImage.builder()
.setCaptureTimeNs(videoFrame.getTimestampNs())
.setCompleteFrame(true)
.setEncodedWidth(videoFrame.getBuffer().getWidth())
.setEncodedHeight(videoFrame.getBuffer().getHeight())
.setRotation(videoFrame.getRotation());
outputBuilders.offer(builder);
if (textureContext != null) {
if (!(videoFrameBuffer instanceof VideoFrame.TextureBuffer)) {
Logging.e(TAG, "Cannot encode non-texture buffer in texture mode");
return VideoCodecStatus.ERROR;
}
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) videoFrameBuffer;
return encodeTextureBuffer(videoFrame, textureBuffer);
} else {
if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) {
Logging.w(TAG, "Encoding texture buffer in byte mode; this may be inefficient");
}
return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
}
}
private VideoCodecStatus encodeTextureBuffer(
VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) {
Matrix matrix = textureBuffer.getTransformMatrix();
float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix);
try {
textureEglBase.makeCurrent();
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
switch (textureBuffer.getType()) {
case OES:
textureDrawer.drawOes(textureBuffer.getTextureId(), transformationMatrix, width, height,
0, 0, width, height);
break;
case RGB:
textureDrawer.drawRgb(textureBuffer.getTextureId(), transformationMatrix, width, height,
0, 0, width, height);
break;
}
textureEglBase.swapBuffers(videoFrame.getTimestampNs());
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
return VideoCodecStatus.ERROR;
}
return VideoCodecStatus.OK;
}
private VideoCodecStatus encodeByteBuffer(
VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
// Frame timestamp rounded to the nearest microsecond.
long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
// No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
int index;
try {
index = codec.dequeueInputBuffer(0 /* timeout */);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueInputBuffer failed", e);
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
if (index == -1) {
// Encoder is falling behind. No input buffers available. Drop the frame.
Logging.e(TAG, "Dropped frame, no input buffers available");
return VideoCodecStatus.OK; // See webrtc bug 2887.
}
ByteBuffer buffer;
try {
buffer = codec.getInputBuffers()[index];
} catch (IllegalStateException e) {
Logging.e(TAG, "getInputBuffers failed", e);
return VideoCodecStatus.ERROR;
}
VideoFrame.I420Buffer i420 = videoFrameBuffer.toI420();
inputColorFormat.fillBufferFromI420(buffer, i420);
i420.release();
try {
codec.queueInputBuffer(
index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
} catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e);
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
// IllegalStateException thrown when the codec is in the wrong state.
return VideoCodecStatus.ERROR;
}
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripTimeMs) {
// No op.
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
if (framerate > MAX_VIDEO_FRAMERATE) {
framerate = MAX_VIDEO_FRAMERATE;
}
bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
return updateBitrate();
}
@Override
public ScalingSettings getScalingSettings() {
return new ScalingSettings(automaticResizeOn);
}
@Override
public String getImplementationName() {
return "HardwareVideoEncoder: " + codecName;
}
private VideoCodecStatus resetCodec(int newWidth, int newHeight) {
VideoCodecStatus status = release();
if (status != VideoCodecStatus.OK) {
return status;
}
// Zero bitrate and framerate indicate not to change the targets.
return initEncodeInternal(newWidth, newHeight, 0, 0, callback);
}
private boolean shouldForceKeyFrame(long presentationTimestampNs) {
return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
}
private void requestKeyFrame(long presentationTimestampNs) {
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
// indicate this in queueInputBuffer() below and guarantee _this_ frame
// be encoded as a key frame, but sadly that flag is ignored. Instead,
// we request a key frame "soon".
try {
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
codec.setParameters(b);
} catch (IllegalStateException e) {
Logging.e(TAG, "requestKeyFrame failed", e);
return;
}
lastKeyFrameNs = presentationTimestampNs;
}
private Thread createOutputThread() {
return new Thread() {
@Override
public void run() {
while (running) {
deliverEncodedImage();
}
releaseCodecOnOutputThread();
}
};
}
private void deliverEncodedImage() {
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
if (index < 0) {
return;
}
ByteBuffer codecOutputBuffer = codec.getOutputBuffers()[index];
codecOutputBuffer.position(info.offset);
codecOutputBuffer.limit(info.offset + info.size);
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
configBuffer = ByteBuffer.allocateDirect(info.size);
configBuffer.put(codecOutputBuffer);
} else {
bitrateAdjuster.reportEncodedFrame(info.size);
if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
updateBitrate();
}
ByteBuffer frameBuffer;
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame && codecType == VideoCodecType.H264) {
Logging.d(TAG,
"Prepending config frame of size " + configBuffer.capacity()
+ " to output buffer with offset " + info.offset + ", size " + info.size);
// For H.264 key frame prepend SPS and PPS NALs at the start.
frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
configBuffer.rewind();
frameBuffer.put(configBuffer);
} else {
frameBuffer = ByteBuffer.allocateDirect(info.size);
}
frameBuffer.put(codecOutputBuffer);
frameBuffer.rewind();
EncodedImage.FrameType frameType = EncodedImage.FrameType.VideoFrameDelta;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
frameType = EncodedImage.FrameType.VideoFrameKey;
}
EncodedImage.Builder builder = outputBuilders.poll();
builder.setBuffer(frameBuffer).setFrameType(frameType);
// TODO(mellem): Set codec-specific info.
callback.onEncodedFrame(builder.createEncodedImage(), new CodecSpecificInfo());
}
codec.releaseOutputBuffer(index, false);
} catch (IllegalStateException e) {
Logging.e(TAG, "deliverOutput failed", e);
}
}
private void releaseCodecOnOutputThread() {
Logging.d(TAG, "Releasing MediaCodec on output thread");
try {
codec.stop();
} catch (Exception e) {
Logging.e(TAG, "Media encoder stop failed", e);
}
try {
codec.release();
} catch (Exception e) {
Logging.e(TAG, "Media encoder release failed", e);
// Propagate exceptions caught during release back to the main thread.
shutdownException = e;
}
Logging.d(TAG, "Release on output thread done");
}
private VideoCodecStatus updateBitrate() {
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
try {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate);
codec.setParameters(params);
return VideoCodecStatus.OK;
} catch (IllegalStateException e) {
Logging.e(TAG, "updateBitrate failed", e);
return VideoCodecStatus.ERROR;
}
}
/**
* Enumeration of supported color formats used for MediaCodec's input.
*/
private static enum ColorFormat {
I420 {
@Override
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) {
buffer.put(i420.getDataY());
buffer.put(i420.getDataU());
buffer.put(i420.getDataV());
}
},
NV12 {
@Override
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) {
buffer.put(i420.getDataY());
// Interleave the bytes from the U and V portions, starting with U.
ByteBuffer u = i420.getDataU();
ByteBuffer v = i420.getDataV();
int i = 0;
while (u.hasRemaining() && v.hasRemaining()) {
buffer.put(u.get());
buffer.put(v.get());
}
}
};
abstract void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420);
static ColorFormat valueOf(int colorFormat) {
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
return I420;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
return NV12;
default:
throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
}
}
}
}

View File

@ -0,0 +1,44 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Class for holding the native pointer of a histogram. Since there is no way to destroy a
* histogram, please don't create unnecessary instances of this object. This class is thread safe.
*
* Usage example:
* private static final Histogram someMetricHistogram =
* Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50);
* someMetricHistogram.addSample(someVariable);
*/
class Histogram {
private final long handle;
private Histogram(long handle) {
this.handle = handle;
}
static public Histogram createCounts(String name, int min, int max, int bucketCount) {
return new Histogram(nativeCreateCounts(name, min, max, bucketCount));
}
static public Histogram createEnumeration(String name, int max) {
return new Histogram(nativeCreateEnumeration(name, max));
}
public void addSample(int sample) {
nativeAddSample(handle, sample);
}
private static native long nativeCreateCounts(String name, int min, int max, int bucketCount);
private static native long nativeCreateEnumeration(String name, int max);
private static native void nativeAddSample(long handle, int sample);
}

View File

@ -0,0 +1,140 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame.I420Buffer;
/** Implementation of an I420 VideoFrame buffer. */
class I420BufferImpl implements VideoFrame.I420Buffer {
private final int width;
private final int height;
private final ByteBuffer dataY;
private final ByteBuffer dataU;
private final ByteBuffer dataV;
private final int strideY;
private final int strideU;
private final int strideV;
private final Runnable releaseCallback;
private final Object refCountLock = new Object();
private int refCount;
/** Constructs an I420Buffer backed by existing data. */
I420BufferImpl(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
int strideU, ByteBuffer dataV, int strideV, Runnable releaseCallback) {
this.width = width;
this.height = height;
this.dataY = dataY;
this.dataU = dataU;
this.dataV = dataV;
this.strideY = strideY;
this.strideU = strideU;
this.strideV = strideV;
this.releaseCallback = releaseCallback;
this.refCount = 1;
}
/** Allocates an empty I420Buffer suitable for an image of the given dimensions. */
static I420BufferImpl allocate(int width, int height) {
int chromaHeight = (height + 1) / 2;
int strideUV = (width + 1) / 2;
int yPos = 0;
int uPos = yPos + width * height;
int vPos = uPos + strideUV * chromaHeight;
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height + 2 * strideUV * chromaHeight);
buffer.position(yPos);
buffer.limit(uPos);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(vPos);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + strideUV * chromaHeight);
ByteBuffer dataV = buffer.slice();
return new I420BufferImpl(width, height, dataY, width, dataU, strideUV, dataV, strideUV, null);
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public ByteBuffer getDataY() {
return dataY;
}
@Override
public ByteBuffer getDataU() {
return dataU;
}
@Override
public ByteBuffer getDataV() {
return dataV;
}
@Override
public int getStrideY() {
return strideY;
}
@Override
public int getStrideU() {
return strideU;
}
@Override
public int getStrideV() {
return strideV;
}
@Override
public I420Buffer toI420() {
retain();
return this;
}
@Override
public void retain() {
synchronized (refCountLock) {
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
if (--refCount == 0 && releaseCallback != null) {
releaseCallback.run();
}
}
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return VideoFrame.cropAndScaleI420(
this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
}
}

View File

@ -0,0 +1,18 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Class with static JNI helper functions that are used in many places. */
class JniCommon {
/** Functions to increment/decrement an rtc::RefCountInterface pointer. */
static native void nativeAddRef(long nativeRefCountedPointer);
static native void nativeReleaseRef(long nativeRefCountedPointer);
}

View File

@ -0,0 +1,78 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
/** Container class for static constants and helpers used with MediaCodec. */
@TargetApi(18)
class MediaCodecUtils {
private static final String TAG = "MediaCodecUtils";
// Prefixes for supported hardware encoder/decoder component names.
static final String EXYNOS_PREFIX = "OMX.Exynos.";
static final String INTEL_PREFIX = "OMX.Intel.";
static final String NVIDIA_PREFIX = "OMX.Nvidia.";
static final String QCOM_PREFIX = "OMX.qcom.";
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Color formats supported by hardware decoder - in order of preference.
static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
// Color formats supported by hardware encoder - in order of preference.
static final int[] ENCODER_COLOR_FORMATS = {
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
// Color formats supported by texture mode encoding - in order of preference.
static final int[] TEXTURE_COLOR_FORMATS = {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
static Integer selectColorFormat(int[] supportedColorFormats, CodecCapabilities capabilities) {
for (int supportedColorFormat : supportedColorFormats) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
return codecColorFormat;
}
}
}
return null;
}
static boolean codecSupportsType(MediaCodecInfo info, VideoCodecType type) {
for (String mimeType : info.getSupportedTypes()) {
if (type.mimeType().equals(mimeType)) {
return true;
}
}
return false;
}
private MediaCodecUtils() {
// This class should not be instantiated.
}
}

View File

@ -0,0 +1,83 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
public class NV12Buffer implements VideoFrame.Buffer {
private final int width;
private final int height;
private final int stride;
private final int sliceHeight;
private final ByteBuffer buffer;
private final Runnable releaseCallback;
private final Object refCountLock = new Object();
private int refCount;
public NV12Buffer(int width, int height, int stride, int sliceHeight, ByteBuffer buffer,
Runnable releaseCallback) {
this.width = width;
this.height = height;
this.stride = stride;
this.sliceHeight = sliceHeight;
this.buffer = buffer;
this.releaseCallback = releaseCallback;
refCount = 1;
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public VideoFrame.I420Buffer toI420() {
return (VideoFrame.I420Buffer) cropAndScale(0, 0, width, height, width, height);
}
@Override
public void retain() {
synchronized (refCountLock) {
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
if (--refCount == 0 && releaseCallback != null) {
releaseCallback.run();
}
}
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, buffer, width,
height, stride, sliceHeight, newBuffer.getDataY(), newBuffer.getStrideY(),
newBuffer.getDataU(), newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
return newBuffer;
}
private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
int scaleWidth, int scaleHeight, ByteBuffer src, int srcWidth, int srcHeight, int srcStride,
int srcSliceHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, int dstStrideU,
ByteBuffer dstV, int dstStrideV);
}

View File

@ -0,0 +1,77 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
public class NV21Buffer implements VideoFrame.Buffer {
private final byte[] data;
private final int width;
private final int height;
private final Runnable releaseCallback;
private final Object refCountLock = new Object();
private int refCount = 1;
public NV21Buffer(byte[] data, int width, int height, Runnable releaseCallback) {
this.data = data;
this.width = width;
this.height = height;
this.releaseCallback = releaseCallback;
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public VideoFrame.I420Buffer toI420() {
// Cropping converts the frame to I420. Just crop and scale to the whole image.
return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
}
@Override
public void retain() {
synchronized (refCountLock) {
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
if (--refCount == 0 && releaseCallback != null) {
releaseCallback.run();
}
}
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
return newBuffer;
}
private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
}

View File

@ -0,0 +1,139 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Matrix;
import java.nio.ByteBuffer;
/**
* Android texture buffer backed by a SurfaceTextureHelper's texture. The buffer calls
* |releaseCallback| when it is released.
*/
class TextureBufferImpl implements VideoFrame.TextureBuffer {
private final int width;
private final int height;
private final Type type;
private final int id;
private final Matrix transformMatrix;
private final SurfaceTextureHelper surfaceTextureHelper;
private final Runnable releaseCallback;
private final Object refCountLock = new Object();
private int refCount;
public TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
SurfaceTextureHelper surfaceTextureHelper, Runnable releaseCallback) {
this.width = width;
this.height = height;
this.type = type;
this.id = id;
this.transformMatrix = transformMatrix;
this.surfaceTextureHelper = surfaceTextureHelper;
this.releaseCallback = releaseCallback;
this.refCount = 1; // Creator implicitly holds a reference.
}
@Override
public VideoFrame.TextureBuffer.Type getType() {
return type;
}
@Override
public int getTextureId() {
return id;
}
@Override
public Matrix getTransformMatrix() {
return transformMatrix;
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public VideoFrame.I420Buffer toI420() {
if (type == Type.RGB) {
throw new RuntimeException("toI420 for RGB frames not implemented yet");
}
// SurfaceTextureHelper requires a stride that is divisible by 8. Round width up.
// See SurfaceTextureHelper for details on the size and format.
int stride = ((width + 7) / 8) * 8;
int uvHeight = (height + 1) / 2;
// Due to the layout used by SurfaceTextureHelper, vPos + stride * uvHeight would overrun the
// buffer. Add one row at the bottom to compensate for this. There will never be data in the
// extra row, but now other code does not have to deal with v stride * v height exceeding the
// buffer's capacity.
int size = stride * (height + uvHeight + 1);
ByteBuffer buffer = ByteBuffer.allocateDirect(size);
surfaceTextureHelper.textureToYUV(buffer, width, height, stride, id,
RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transformMatrix));
int yPos = 0;
int uPos = yPos + stride * height;
// Rows of U and V alternate in the buffer, so V data starts after the first row of U.
int vPos = uPos + stride / 2;
buffer.position(yPos);
buffer.limit(yPos + stride * height);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(uPos + stride * uvHeight);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + stride * uvHeight);
ByteBuffer dataV = buffer.slice();
// SurfaceTextureHelper uses the same stride for Y, U, and V data.
return new I420BufferImpl(width, height, dataY, stride, dataU, stride, dataV, stride, null);
}
@Override
public void retain() {
synchronized (refCountLock) {
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
if (--refCount == 0 && releaseCallback != null) {
releaseCallback.run();
}
}
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
retain();
Matrix newMatrix = new Matrix(transformMatrix);
newMatrix.postScale(cropWidth / (float) width, cropHeight / (float) height);
newMatrix.postTranslate(cropX / (float) width, cropY / (float) height);
return new TextureBufferImpl(
scaleWidth, scaleHeight, type, id, newMatrix, surfaceTextureHelper, new Runnable() {
@Override
public void run() {
release();
}
});
}
}

View File

@ -0,0 +1,28 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Enumeration of supported video codec types. */
enum VideoCodecType {
VP8("video/x-vnd.on2.vp8"),
VP9("video/x-vnd.on2.vp9"),
H264("video/avc");
private final String mimeType;
private VideoCodecType(String mimeType) {
this.mimeType = mimeType;
}
String mimeType() {
return mimeType;
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* VideoDecoder callback that calls VideoDecoderWrapper.OnDecodedFrame for the decoded frames.
*/
class VideoDecoderWrapperCallback implements VideoDecoder.Callback {
private final long nativeDecoder;
public VideoDecoderWrapperCallback(long nativeDecoder) {
this.nativeDecoder = nativeDecoder;
}
@Override
public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) {
nativeOnDecodedFrame(nativeDecoder, frame, decodeTimeMs, qp);
}
private native static void nativeOnDecodedFrame(
long nativeDecoder, VideoFrame frame, Integer decodeTimeMs, Integer qp);
}

View File

@ -0,0 +1,35 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/**
* VideoEncoder callback that calls VideoEncoderWrapper.OnEncodedFrame for the Encoded frames.
*/
class VideoEncoderWrapperCallback implements VideoEncoder.Callback {
private final long nativeEncoder;
public VideoEncoderWrapperCallback(long nativeEncoder) {
this.nativeEncoder = nativeEncoder;
}
@Override
public void onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo info) {
nativeOnEncodedFrame(nativeEncoder, frame.buffer, frame.encodedWidth, frame.encodedHeight,
frame.captureTimeNs, frame.frameType.getNative(), frame.rotation, frame.completeFrame,
frame.qp);
}
private native static void nativeOnEncodedFrame(long nativeEncoder, ByteBuffer buffer,
int encodedWidth, int encodedHeight, long captureTimeNs, int frameType, int rotation,
boolean completeFrame, Integer qp);
}

View File

@ -0,0 +1,106 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/**
* This class wraps a webrtc::I420BufferInterface into a VideoFrame.I420Buffer.
*/
class WrappedNativeI420Buffer implements VideoFrame.I420Buffer {
private final int width;
private final int height;
private final ByteBuffer dataY;
private final int strideY;
private final ByteBuffer dataU;
private final int strideU;
private final ByteBuffer dataV;
private final int strideV;
private final long nativeBuffer;
WrappedNativeI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
int strideU, ByteBuffer dataV, int strideV, long nativeBuffer) {
this.width = width;
this.height = height;
this.dataY = dataY;
this.strideY = strideY;
this.dataU = dataU;
this.strideU = strideU;
this.dataV = dataV;
this.strideV = strideV;
this.nativeBuffer = nativeBuffer;
retain();
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public ByteBuffer getDataY() {
return dataY;
}
@Override
public ByteBuffer getDataU() {
return dataU;
}
@Override
public ByteBuffer getDataV() {
return dataV;
}
@Override
public int getStrideY() {
return strideY;
}
@Override
public int getStrideU() {
return strideU;
}
@Override
public int getStrideV() {
return strideV;
}
@Override
public VideoFrame.I420Buffer toI420() {
retain();
return this;
}
@Override
public void retain() {
JniCommon.nativeAddRef(nativeBuffer);
}
@Override
public void release() {
JniCommon.nativeReleaseRef(nativeBuffer);
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return VideoFrame.cropAndScaleI420(
this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
}
}

View File

@ -0,0 +1,223 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
/**
* Class for converting OES textures to a YUV ByteBuffer. It should be constructed on a thread with
* an active EGL context, and only be used from that thread.
*/
class YuvConverter {
// Vertex coordinates in Normalized Device Coordinates, i.e.
// (-1, -1) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer TEXTURE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
// clang-format off
private static final String VERTEX_SHADER =
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform samplerExternalOES oesTex;\n"
// Difference in texture coordinate corresponding to one
// sub-pixel in the x direction.
+ "uniform vec2 xUnit;\n"
// Color conversion coefficients, including constant term
+ "uniform vec4 coeffs;\n"
+ "\n"
+ "void main() {\n"
// Since the alpha read from the texture is always 1, this could
// be written as a mat4 x vec4 multiply. However, that seems to
// give a worse framerate, possibly because the additional
// multiplies by 1.0 consume resources. TODO(nisse): Could also
// try to do it as a vec3 x mat3x4, followed by an add in of a
// constant vector.
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ "}\n";
// clang-format on
private final GlTextureFrameBuffer textureFrameBuffer;
private final GlShader shader;
private final int texMatrixLoc;
private final int xUnitLoc;
private final int coeffsLoc;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
private boolean released = false;
/**
* This class should be constructed on a thread that has an active EGL context.
*/
public YuvConverter() {
threadChecker.checkIsOnValidThread();
textureFrameBuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
shader.useProgram();
texMatrixLoc = shader.getUniformLocation("texMatrix");
xUnitLoc = shader.getUniformLocation("xUnit");
coeffsLoc = shader.getUniformLocation("coeffs");
GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes.
shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
// If the width is not a multiple of 4 pixels, the texture
// will be scaled up slightly and clipped at the right border.
shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
}
public void convert(ByteBuffer buf, int width, int height, int stride, int srcTextureId,
float[] transformMatrix) {
threadChecker.checkIsOnValidThread();
if (released) {
throw new IllegalStateException("YuvConverter.convert called on released object");
}
// We draw into a buffer laid out like
//
// +---------+
// | |
// | Y |
// | |
// | |
// +----+----+
// | U | V |
// | | |
// +----+----+
//
// In memory, we use the same stride for all of Y, U and V. The
// U data starts at offset |height| * |stride| from the Y data,
// and the V data starts at at offset |stride/2| from the U
// data, with rows of U and V data alternating.
//
// Now, it would have made sense to allocate a pixel buffer with
// a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
// EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
// unsupported by devices. So do the following hack: Allocate an
// RGBA buffer, of width |stride|/4. To render each of these
// large pixels, sample the texture at 4 different x coordinates
// and store the results in the four components.
//
// Since the V data needs to start on a boundary of such a
// larger pixel, it is not sufficient that |stride| is even, it
// has to be a multiple of 8 pixels.
if (stride % 8 != 0) {
throw new IllegalArgumentException("Invalid stride, must be a multiple of 8");
}
if (stride < width) {
throw new IllegalArgumentException("Invalid stride, must >= width");
}
int y_width = (width + 3) / 4;
int uv_width = (width + 7) / 8;
int uv_height = (height + 1) / 2;
int total_height = height + uv_height;
int size = stride * total_height;
if (buf.capacity() < size) {
throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
}
// Produce a frame buffer starting at top-left corner, not
// bottom-left.
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix());
final int frameBufferWidth = stride / 4;
final int frameBufferHeight = total_height;
textureFrameBuffer.setSize(frameBufferWidth, frameBufferHeight);
// Bind our framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureFrameBuffer.getFrameBufferId());
GlUtil.checkNoGLES2Error("glBindFramebuffer");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, srcTextureId);
GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
// Draw Y
GLES20.glViewport(0, 0, y_width, height);
// Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
GLES20.glUniform2f(xUnitLoc, transformMatrix[0] / width, transformMatrix[1] / width);
// Y'UV444 to RGB888, see
// https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
// We use the ITU-R coefficients for U and V */
GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Draw U
GLES20.glViewport(0, height, uv_width, uv_height);
// Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
GLES20.glUniform2f(
xUnitLoc, 2.0f * transformMatrix[0] / width, 2.0f * transformMatrix[1] / width);
GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Draw V
GLES20.glViewport(stride / 8, height, uv_width, uv_height);
GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glReadPixels(
0, 0, frameBufferWidth, frameBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
GlUtil.checkNoGLES2Error("YuvConverter.convert");
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
// Unbind texture. Reportedly needed on some devices to get
// the texture updated from the camera.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
public void release() {
threadChecker.checkIsOnValidThread();
released = true;
shader.release();
textureFrameBuffer.release();
}
}

15
sdk/android/src/jni/DEPS Normal file
View File

@ -0,0 +1,15 @@
include_rules = [
"+third_party/libyuv",
"+webrtc/call/callfactoryinterface.h",
"+webrtc/common_video",
"+webrtc/logging/rtc_event_log/rtc_event_log_factory_interface.h",
"+webrtc/media/base",
"+webrtc/media/engine",
"+webrtc/modules/audio_processing/include/audio_processing.h",
"+webrtc/modules/include",
"+webrtc/modules/utility/include/jvm_android.h",
"+webrtc/modules/video_coding",
"+webrtc/pc",
"+webrtc/system_wrappers/include",
"+webrtc/voice_engine/include/voe_base.h",
]

View File

@ -0,0 +1,4 @@
per-file androidhistogram_jni.cc=sakal@webrtc.org
per-file androidmetrics_jni.cc=sakal@webrtc.org
per-file androidvideotracksource.*=sakal@webrtc.org
per-file androidvideotracksource_jni.cc=sakal@webrtc.org

View File

@ -0,0 +1,57 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <map>
#include <memory>
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/system_wrappers/include/metrics.h"
// Enables collection of native histograms and creating them.
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(jlong,
Histogram_nativeCreateCounts,
JNIEnv* jni,
jclass,
jstring j_name,
jint min,
jint max,
jint buckets) {
std::string name = JavaToStdString(jni, j_name);
return jlongFromPointer(
metrics::HistogramFactoryGetCounts(name, min, max, buckets));
}
JNI_FUNCTION_DECLARATION(jlong,
Histogram_nativeCreateEnumeration,
JNIEnv* jni,
jclass,
jstring j_name,
jint max) {
std::string name = JavaToStdString(jni, j_name);
return jlongFromPointer(metrics::HistogramFactoryGetEnumeration(name, max));
}
JNI_FUNCTION_DECLARATION(void,
Histogram_nativeAddSample,
JNIEnv* jni,
jclass,
jlong histogram,
jint sample) {
if (histogram) {
HistogramAdd(reinterpret_cast<metrics::Histogram*>(histogram), sample);
}
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,89 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_ANDROIDMEDIACODECCOMMON_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_ANDROIDMEDIACODECCOMMON_H_
#include <android/log.h>
#include <string>
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/thread.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Uncomment this define to enable verbose logging for every encoded/decoded
// video frame.
//#define TRACK_BUFFER_TIMING
#define TAG_COMMON "MediaCodecVideo"
// Color formats supported by encoder or decoder - should include all
// colors from supportedColorList in MediaCodecVideoEncoder.java and
// MediaCodecVideoDecoder.java. Supported color format set in encoder
// and decoder could be different.
enum COLOR_FORMATTYPE {
COLOR_FormatYUV420Planar = 0x13,
COLOR_FormatYUV420SemiPlanar = 0x15,
COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
// This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
// but requires some (16, 32?) byte alignment.
COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02,
COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
};
// Arbitrary interval to poll the codec for new outputs.
enum { kMediaCodecPollMs = 10 };
// Arbitrary interval to poll at when there should be no more frames.
enum { kMediaCodecPollNoFramesMs = 100 };
// Media codec maximum output buffer ready timeout.
enum { kMediaCodecTimeoutMs = 1000 };
// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
enum { kMediaCodecStatisticsIntervalMs = 3000 };
// Maximum amount of pending frames for VP8 decoder.
enum { kMaxPendingFramesVp8 = 1 };
// Maximum amount of pending frames for VP9 decoder.
enum { kMaxPendingFramesVp9 = 1 };
// Maximum amount of pending frames for H.264 decoder.
enum { kMaxPendingFramesH264 = 4 };
// Maximum amount of decoded frames for which per-frame logging is enabled.
enum { kMaxDecodedLogFrames = 10 };
// Maximum amount of encoded frames for which per-frame logging is enabled.
enum { kMaxEncodedLogFrames = 10 };
static inline void AllowBlockingCalls() {
rtc::Thread* current_thread = rtc::Thread::Current();
if (current_thread != NULL)
current_thread->SetAllowBlockingCalls(true);
}
// Checks for any Java exception, prints stack backtrace and clears
// currently thrown exception.
static inline bool CheckException(JNIEnv* jni) {
if (jni->ExceptionCheck()) {
LOG_TAG(rtc::LS_ERROR, TAG_COMMON) << "Java JNI exception.";
jni->ExceptionDescribe();
jni->ExceptionClear();
return true;
}
return false;
}
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_ANDROIDMEDIACODECCOMMON_H_

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,42 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_ANDROIDMEDIADECODER_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_ANDROIDMEDIADECODER_JNI_H_
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/media/engine/webrtcvideodecoderfactory.h"
namespace webrtc {
namespace jni {
// Implementation of Android MediaCodec based decoder factory.
class MediaCodecVideoDecoderFactory
: public cricket::WebRtcVideoDecoderFactory {
public:
MediaCodecVideoDecoderFactory();
virtual ~MediaCodecVideoDecoderFactory();
void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
// WebRtcVideoDecoderFactory implementation.
VideoDecoder* CreateVideoDecoder(VideoCodecType type) override;
void DestroyVideoDecoder(VideoDecoder* decoder) override;
private:
jobject egl_context_;
std::vector<VideoCodecType> supported_codec_types_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_ANDROIDMEDIADECODER_JNI_H_

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,47 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_ANDROIDMEDIAENCODER_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_ANDROIDMEDIAENCODER_JNI_H_
#include <vector>
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
namespace webrtc {
namespace jni {
// Implementation of Android MediaCodec based encoder factory.
class MediaCodecVideoEncoderFactory
: public cricket::WebRtcVideoEncoderFactory {
public:
MediaCodecVideoEncoderFactory();
virtual ~MediaCodecVideoEncoderFactory();
void SetEGLContext(JNIEnv* jni, jobject egl_context);
// WebRtcVideoEncoderFactory implementation.
VideoEncoder* CreateVideoEncoder(const cricket::VideoCodec& codec) override;
const std::vector<cricket::VideoCodec>& supported_codecs() const override;
void DestroyVideoEncoder(VideoEncoder* encoder) override;
private:
jobject egl_context_;
// Empty if platform support is lacking, const after ctor returns.
std::vector<cricket::VideoCodec> supported_codecs_;
std::vector<cricket::VideoCodec> supported_codecs_with_h264_hp_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_ANDROIDMEDIAENCODER_JNI_H_

View File

@ -0,0 +1,65 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <map>
#include <memory>
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/system_wrappers/include/metrics.h"
#include "webrtc/system_wrappers/include/metrics_default.h"
// Enables collection of native histograms and creating them.
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(void, Metrics_nativeEnable, JNIEnv* jni, jclass) {
metrics::Enable();
}
// Gets and clears native histograms.
JNI_FUNCTION_DECLARATION(jobject,
Metrics_nativeGetAndReset,
JNIEnv* jni,
jclass) {
jclass j_metrics_class = jni->FindClass("org/webrtc/Metrics");
jmethodID j_add =
GetMethodID(jni, j_metrics_class, "add",
"(Ljava/lang/String;Lorg/webrtc/Metrics$HistogramInfo;)V");
jclass j_info_class = jni->FindClass("org/webrtc/Metrics$HistogramInfo");
jmethodID j_add_sample = GetMethodID(jni, j_info_class, "addSample", "(II)V");
// Create |Metrics|.
jobject j_metrics = jni->NewObject(
j_metrics_class, GetMethodID(jni, j_metrics_class, "<init>", "()V"));
std::map<std::string, std::unique_ptr<metrics::SampleInfo>> histograms;
metrics::GetAndReset(&histograms);
for (const auto& kv : histograms) {
// Create and add samples to |HistogramInfo|.
jobject j_info = jni->NewObject(
j_info_class, GetMethodID(jni, j_info_class, "<init>", "(III)V"),
kv.second->min, kv.second->max,
static_cast<int>(kv.second->bucket_count));
for (const auto& sample : kv.second->samples) {
jni->CallVoidMethod(j_info, j_add_sample, sample.first, sample.second);
}
// Add |HistogramInfo| to |Metrics|.
jstring j_name = jni->NewStringUTF(kv.first.c_str());
jni->CallVoidMethod(j_metrics, j_add, j_name, j_info);
jni->DeleteLocalRef(j_name);
jni->DeleteLocalRef(j_info);
}
CHECK_EXCEPTION(jni);
return j_metrics;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,14 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// TODO(deadbeef): Remove this file when clients are updated to new include
// path.
#include "webrtc/sdk/android/src/jni/pc/androidnetworkmonitor_jni.h"

View File

@ -0,0 +1,211 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/androidvideotracksource.h"
#include <utility>
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace {
// MediaCodec wants resolution to be divisible by 2.
const int kRequiredResolutionAlignment = 2;
}
namespace webrtc {
namespace jni {
AndroidVideoTrackSource::AndroidVideoTrackSource(
rtc::Thread* signaling_thread,
JNIEnv* jni,
jobject j_surface_texture_helper,
bool is_screencast)
: AdaptedVideoTrackSource(kRequiredResolutionAlignment),
signaling_thread_(signaling_thread),
surface_texture_helper_(new rtc::RefCountedObject<SurfaceTextureHelper>(
jni,
j_surface_texture_helper)),
video_buffer_factory_(jni),
is_screencast_(is_screencast) {
LOG(LS_INFO) << "AndroidVideoTrackSource ctor";
camera_thread_checker_.DetachFromThread();
jclass j_video_frame_buffer_class =
FindClass(jni, "org/webrtc/VideoFrame$Buffer");
j_crop_and_scale_id_ =
jni->GetMethodID(j_video_frame_buffer_class, "cropAndScale",
"(IIIIII)Lorg/webrtc/VideoFrame$Buffer;");
}
void AndroidVideoTrackSource::SetState(SourceState state) {
if (rtc::Thread::Current() != signaling_thread_) {
invoker_.AsyncInvoke<void>(
RTC_FROM_HERE, signaling_thread_,
rtc::Bind(&AndroidVideoTrackSource::SetState, this, state));
return;
}
if (state_ != state) {
state_ = state;
FireOnChanged();
}
}
void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
int length,
int width,
int height,
VideoRotation rotation,
int64_t timestamp_ns) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
&adapted_height, &crop_width, &crop_height, &crop_x,
&crop_y)) {
return;
}
const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
const uint8_t* uv_plane = y_plane + width * height;
const int uv_width = (width + 1) / 2;
RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));
// Can only crop at even pixels.
crop_x &= ~1;
crop_y &= ~1;
// Crop just by modifying pointers.
y_plane += width * crop_y + crop_x;
uv_plane += uv_width * crop_y + crop_x;
rtc::scoped_refptr<I420Buffer> buffer =
buffer_pool_.CreateBuffer(adapted_width, adapted_height);
nv12toi420_scaler_.NV12ToI420Scale(
y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
buffer->MutableDataY(), buffer->StrideY(),
// Swap U and V, since we have NV21, not NV12.
buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
buffer->StrideU(), buffer->width(), buffer->height());
OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnTextureFrameCaptured(
int width,
int height,
VideoRotation rotation,
int64_t timestamp_ns,
const NativeHandleImpl& handle) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
&adapted_height, &crop_width, &crop_height, &crop_x,
&crop_y)) {
surface_texture_helper_->ReturnTextureFrame();
return;
}
Matrix matrix = handle.sampling_matrix;
matrix.Crop(crop_width / static_cast<float>(width),
crop_height / static_cast<float>(height),
crop_x / static_cast<float>(width),
crop_y / static_cast<float>(height));
// Note that apply_rotation() may change under our feet, so we should only
// check once.
if (apply_rotation()) {
if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) {
std::swap(adapted_width, adapted_height);
}
matrix.Rotate(rotation);
rotation = kVideoRotation_0;
}
OnFrame(VideoFrame(surface_texture_helper_->CreateTextureFrame(
adapted_width, adapted_height,
NativeHandleImpl(handle.oes_texture_id, matrix)),
rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnFrameCaptured(JNIEnv* jni,
int width,
int height,
int64_t timestamp_ns,
VideoRotation rotation,
jobject j_video_frame_buffer) {
RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());
int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
int64_t translated_camera_time_us =
timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());
int adapted_width;
int adapted_height;
int crop_width;
int crop_height;
int crop_x;
int crop_y;
if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
&adapted_height, &crop_width, &crop_height, &crop_x,
&crop_y)) {
return;
}
jobject j_adapted_video_frame_buffer = jni->CallObjectMethod(
j_video_frame_buffer, j_crop_and_scale_id_, crop_x, crop_y, crop_width,
crop_height, adapted_width, adapted_height);
rtc::scoped_refptr<VideoFrameBuffer> buffer =
video_buffer_factory_.WrapBuffer(jni, j_adapted_video_frame_buffer);
// AdaptedVideoTrackSource handles applying rotation for I420 frames.
if (apply_rotation()) {
buffer = buffer->ToI420();
}
OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));
}
void AndroidVideoTrackSource::OnOutputFormatRequest(int width,
int height,
int fps) {
cricket::VideoFormat format(width, height,
cricket::VideoFormat::FpsToInterval(fps), 0);
video_adapter()->OnOutputFormatRequest(format);
}
} // namespace webrtc
} // namespace webrtc

View File

@ -0,0 +1,96 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
#define WEBRTC_API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_
#include <jni.h>
#include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/media/base/adaptedvideotracksource.h"
#include "webrtc/rtc_base/asyncinvoker.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/thread_checker.h"
#include "webrtc/rtc_base/timestampaligner.h"
#include "webrtc/sdk/android/src/jni/native_handle_impl.h"
#include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h"
namespace webrtc {
namespace jni {
class AndroidVideoTrackSource : public rtc::AdaptedVideoTrackSource {
public:
AndroidVideoTrackSource(rtc::Thread* signaling_thread,
JNIEnv* jni,
jobject j_surface_texture_helper,
bool is_screencast = false);
bool is_screencast() const override { return is_screencast_; }
// Indicates that the encoder should denoise video before encoding it.
// If it is not set, the default configuration is used which is different
// depending on video codec.
rtc::Optional<bool> needs_denoising() const override {
return rtc::Optional<bool>(false);
}
// Called by the native capture observer
void SetState(SourceState state);
SourceState state() const override { return state_; }
bool remote() const override { return false; }
void OnByteBufferFrameCaptured(const void* frame_data,
int length,
int width,
int height,
VideoRotation rotation,
int64_t timestamp_ns);
void OnTextureFrameCaptured(int width,
int height,
VideoRotation rotation,
int64_t timestamp_ns,
const NativeHandleImpl& handle);
void OnFrameCaptured(JNIEnv* jni,
int width,
int height,
int64_t timestamp_ns,
VideoRotation rotation,
jobject j_video_frame_buffer);
void OnOutputFormatRequest(int width, int height, int fps);
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper() {
return surface_texture_helper_;
}
private:
rtc::Thread* signaling_thread_;
rtc::AsyncInvoker invoker_;
rtc::ThreadChecker camera_thread_checker_;
SourceState state_;
rtc::TimestampAligner timestamp_aligner_;
NV12ToI420Scaler nv12toi420_scaler_;
I420BufferPool buffer_pool_;
rtc::scoped_refptr<SurfaceTextureHelper> surface_texture_helper_;
AndroidVideoBufferFactory video_buffer_factory_;
const bool is_screencast_;
jmethodID j_crop_and_scale_id_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_API_ANDROID_JNI_ANDROIDVIDEOTRACKSOURCE_H_

View File

@ -0,0 +1,132 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/api/videosourceproxy.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/androidvideotracksource.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace {
static VideoRotation jintToVideoRotation(jint rotation) {
RTC_DCHECK(rotation == 0 || rotation == 90 || rotation == 180 ||
rotation == 270);
return static_cast<VideoRotation>(rotation);
}
} // namespace
namespace jni {
static AndroidVideoTrackSource* AndroidVideoTrackSourceFromJavaProxy(
jlong j_proxy) {
auto proxy_source = reinterpret_cast<VideoTrackSourceProxy*>(j_proxy);
return reinterpret_cast<AndroidVideoTrackSource*>(proxy_source->internal());
}
JNI_FUNCTION_DECLARATION(
void,
AndroidVideoTrackSourceObserver_nativeOnByteBufferFrameCaptured,
JNIEnv* jni,
jclass,
jlong j_source,
jbyteArray j_frame,
jint length,
jint width,
jint height,
jint rotation,
jlong timestamp) {
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
jbyte* bytes = jni->GetByteArrayElements(j_frame, nullptr);
source->OnByteBufferFrameCaptured(bytes, length, width, height,
jintToVideoRotation(rotation), timestamp);
jni->ReleaseByteArrayElements(j_frame, bytes, JNI_ABORT);
}
JNI_FUNCTION_DECLARATION(
void,
AndroidVideoTrackSourceObserver_nativeOnTextureFrameCaptured,
JNIEnv* jni,
jclass,
jlong j_source,
jint j_width,
jint j_height,
jint j_oes_texture_id,
jfloatArray j_transform_matrix,
jint j_rotation,
jlong j_timestamp) {
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnTextureFrameCaptured(
j_width, j_height, jintToVideoRotation(j_rotation), j_timestamp,
NativeHandleImpl(jni, j_oes_texture_id, j_transform_matrix));
}
JNI_FUNCTION_DECLARATION(void,
AndroidVideoTrackSourceObserver_nativeOnFrameCaptured,
JNIEnv* jni,
jclass,
jlong j_source,
jint j_width,
jint j_height,
jint j_rotation,
jlong j_timestamp_ns,
jobject j_video_frame_buffer) {
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnFrameCaptured(jni, j_width, j_height, j_timestamp_ns,
jintToVideoRotation(j_rotation),
j_video_frame_buffer);
}
JNI_FUNCTION_DECLARATION(void,
AndroidVideoTrackSourceObserver_nativeCapturerStarted,
JNIEnv* jni,
jclass,
jlong j_source,
jboolean j_success) {
LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStarted";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->SetState(j_success ? AndroidVideoTrackSource::SourceState::kLive
: AndroidVideoTrackSource::SourceState::kEnded);
}
JNI_FUNCTION_DECLARATION(void,
AndroidVideoTrackSourceObserver_nativeCapturerStopped,
JNIEnv* jni,
jclass,
jlong j_source) {
LOG(LS_INFO) << "AndroidVideoTrackSourceObserve_nativeCapturerStopped";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->SetState(AndroidVideoTrackSource::SourceState::kEnded);
}
JNI_FUNCTION_DECLARATION(void,
VideoSource_nativeAdaptOutputFormat,
JNIEnv* jni,
jclass,
jlong j_source,
jint j_width,
jint j_height,
jint j_fps) {
LOG(LS_INFO) << "VideoSource_nativeAdaptOutputFormat";
AndroidVideoTrackSource* source =
AndroidVideoTrackSourceFromJavaProxy(j_source);
source->OnOutputFormatRequest(j_width, j_height, j_fps);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,165 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// ClassReferenceHolder holds global reference to Java classes in app/webrtc.
class ClassReferenceHolder {
public:
explicit ClassReferenceHolder(JNIEnv* jni);
~ClassReferenceHolder();
void FreeReferences(JNIEnv* jni);
jclass GetClass(const std::string& name);
private:
void LoadClass(JNIEnv* jni, const std::string& name);
std::map<std::string, jclass> classes_;
};
// Allocated in LoadGlobalClassReferenceHolder(),
// freed in FreeGlobalClassReferenceHolder().
static ClassReferenceHolder* g_class_reference_holder = nullptr;
void LoadGlobalClassReferenceHolder() {
RTC_CHECK(g_class_reference_holder == nullptr);
g_class_reference_holder = new ClassReferenceHolder(GetEnv());
}
void FreeGlobalClassReferenceHolder() {
g_class_reference_holder->FreeReferences(AttachCurrentThreadIfNeeded());
delete g_class_reference_holder;
g_class_reference_holder = nullptr;
}
ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
LoadClass(jni, "android/graphics/SurfaceTexture");
LoadClass(jni, "java/lang/Boolean");
LoadClass(jni, "java/lang/Double");
LoadClass(jni, "java/lang/Integer");
LoadClass(jni, "java/lang/Long");
LoadClass(jni, "java/lang/String");
LoadClass(jni, "java/math/BigInteger");
LoadClass(jni, "java/nio/ByteBuffer");
LoadClass(jni, "java/util/ArrayList");
LoadClass(jni, "java/util/LinkedHashMap");
LoadClass(jni, "org/webrtc/AudioTrack");
LoadClass(jni, "org/webrtc/Camera1Enumerator");
LoadClass(jni, "org/webrtc/Camera2Enumerator");
LoadClass(jni, "org/webrtc/CameraEnumerationAndroid");
LoadClass(jni, "org/webrtc/DataChannel");
LoadClass(jni, "org/webrtc/DataChannel$Buffer");
LoadClass(jni, "org/webrtc/DataChannel$Init");
LoadClass(jni, "org/webrtc/DataChannel$State");
LoadClass(jni, "org/webrtc/EglBase");
LoadClass(jni, "org/webrtc/EglBase$Context");
LoadClass(jni, "org/webrtc/EglBase14$Context");
LoadClass(jni, "org/webrtc/EncodedImage");
LoadClass(jni, "org/webrtc/EncodedImage$FrameType");
LoadClass(jni, "org/webrtc/IceCandidate");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedOutputBuffer");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecodedTextureBuffer");
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
LoadClass(jni, "org/webrtc/MediaSource$State");
LoadClass(jni, "org/webrtc/MediaStream");
LoadClass(jni, "org/webrtc/MediaStreamTrack$MediaType");
LoadClass(jni, "org/webrtc/MediaStreamTrack$State");
LoadClass(jni, "org/webrtc/NetworkMonitor");
LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType");
LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$IPAddress");
LoadClass(jni, "org/webrtc/NetworkMonitorAutoDetect$NetworkInformation");
LoadClass(jni, "org/webrtc/PeerConnection$BundlePolicy");
LoadClass(jni, "org/webrtc/PeerConnection$CandidateNetworkPolicy");
LoadClass(jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy");
LoadClass(jni, "org/webrtc/PeerConnection$IceConnectionState");
LoadClass(jni, "org/webrtc/PeerConnection$IceGatheringState");
LoadClass(jni, "org/webrtc/PeerConnection$IceTransportsType");
LoadClass(jni, "org/webrtc/PeerConnection$KeyType");
LoadClass(jni, "org/webrtc/PeerConnection$RtcpMuxPolicy");
LoadClass(jni, "org/webrtc/PeerConnection$SignalingState");
LoadClass(jni, "org/webrtc/PeerConnection$TcpCandidatePolicy");
LoadClass(jni, "org/webrtc/PeerConnection$TlsCertPolicy");
LoadClass(jni, "org/webrtc/PeerConnectionFactory");
LoadClass(jni, "org/webrtc/RTCStats");
LoadClass(jni, "org/webrtc/RTCStatsReport");
LoadClass(jni, "org/webrtc/RtpReceiver");
LoadClass(jni, "org/webrtc/RtpSender");
LoadClass(jni, "org/webrtc/SessionDescription");
LoadClass(jni, "org/webrtc/SessionDescription$Type");
LoadClass(jni, "org/webrtc/StatsReport");
LoadClass(jni, "org/webrtc/StatsReport$Value");
LoadClass(jni, "org/webrtc/SurfaceTextureHelper");
LoadClass(jni, "org/webrtc/VideoCapturer");
LoadClass(jni, "org/webrtc/VideoCodecInfo");
LoadClass(jni, "org/webrtc/VideoCodecStatus");
LoadClass(jni, "org/webrtc/VideoDecoder$Settings");
LoadClass(jni, "org/webrtc/VideoDecoderWrapperCallback");
LoadClass(jni, "org/webrtc/VideoEncoder");
LoadClass(jni, "org/webrtc/VideoEncoder$BitrateAllocation");
LoadClass(jni, "org/webrtc/VideoEncoder$EncodeInfo");
LoadClass(jni, "org/webrtc/VideoEncoder$ScalingSettings");
LoadClass(jni, "org/webrtc/VideoEncoder$Settings");
LoadClass(jni, "org/webrtc/VideoEncoderWrapperCallback");
LoadClass(jni, "org/webrtc/VideoFrame");
LoadClass(jni, "org/webrtc/VideoFrame$Buffer");
LoadClass(jni, "org/webrtc/VideoFrame$I420Buffer");
LoadClass(jni, "org/webrtc/VideoFrame$TextureBuffer");
LoadClass(jni, "org/webrtc/VideoRenderer$I420Frame");
LoadClass(jni, "org/webrtc/VideoSink");
LoadClass(jni, "org/webrtc/VideoTrack");
LoadClass(jni, "org/webrtc/WrappedNativeI420Buffer");
}
ClassReferenceHolder::~ClassReferenceHolder() {
RTC_CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
}
void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
it != classes_.end(); ++it) {
jni->DeleteGlobalRef(it->second);
}
classes_.clear();
}
jclass ClassReferenceHolder::GetClass(const std::string& name) {
std::map<std::string, jclass>::iterator it = classes_.find(name);
RTC_CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
return it->second;
}
void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
jclass localRef = jni->FindClass(name.c_str());
CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
RTC_CHECK(localRef) << name;
jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
RTC_CHECK(globalRef) << name;
bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
RTC_CHECK(inserted) << "Duplicate class name: " << name;
}
// Returns a global reference guaranteed to be valid for the lifetime of the
// process.
jclass FindClass(JNIEnv* jni, const char* name) {
return g_class_reference_holder->GetClass(name);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,46 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// Android's FindClass() is trickier than usual because the app-specific
// ClassLoader is not consulted when there is no app-specific frame on the
// stack. Consequently, we only look up all classes once in app/webrtc.
// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_CLASSREFERENCEHOLDER_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_CLASSREFERENCEHOLDER_H_
#include <jni.h>
#include <map>
#include <string>
namespace webrtc {
namespace jni {
// LoadGlobalClassReferenceHolder must be called in JNI_OnLoad.
void LoadGlobalClassReferenceHolder();
// FreeGlobalClassReferenceHolder must be called in JNI_UnLoad.
void FreeGlobalClassReferenceHolder();
// Returns a global reference guaranteed to be valid for the lifetime of the
// process.
jclass FindClass(JNIEnv* jni, const char* name);
} // namespace jni
} // namespace webrtc
// TODO(magjed): Remove once external clients are updated.
namespace webrtc_jni {
using webrtc::jni::LoadGlobalClassReferenceHolder;
using webrtc::jni::FreeGlobalClassReferenceHolder;
} // namespace webrtc_jni
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_CLASSREFERENCEHOLDER_H_

View File

@ -0,0 +1,62 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "third_party/libyuv/include/libyuv/convert_from.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/logging.h"
namespace webrtc {
namespace jni {
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_FileVideoCapturer_nativeI420ToNV21(JNIEnv* jni,
jclass,
jbyteArray j_src_buffer,
jint width,
jint height,
jbyteArray j_dst_buffer) {
size_t src_size = jni->GetArrayLength(j_src_buffer);
size_t dst_size = jni->GetArrayLength(j_dst_buffer);
int src_stride = width;
int dst_stride = width;
RTC_CHECK_GE(src_size, src_stride * height * 3 / 2);
RTC_CHECK_GE(dst_size, dst_stride * height * 3 / 2);
jbyte* src_bytes = jni->GetByteArrayElements(j_src_buffer, 0);
uint8_t* src = reinterpret_cast<uint8_t*>(src_bytes);
jbyte* dst_bytes = jni->GetByteArrayElements(j_dst_buffer, 0);
uint8_t* dst = reinterpret_cast<uint8_t*>(dst_bytes);
uint8_t* src_y = src;
size_t src_stride_y = src_stride;
uint8_t* src_u = src + src_stride * height;
size_t src_stride_u = src_stride / 2;
uint8_t* src_v = src + src_stride * height * 5 / 4;
size_t src_stride_v = src_stride / 2;
uint8_t* dst_y = dst;
size_t dst_stride_y = dst_stride;
size_t dst_stride_uv = dst_stride;
uint8_t* dst_uv = dst + dst_stride * height;
int ret = libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v,
src_stride_v, dst_y, dst_stride_y, dst_uv,
dst_stride_uv, width, height);
jni->ReleaseByteArrayElements(j_src_buffer, src_bytes, 0);
jni->ReleaseByteArrayElements(j_dst_buffer, dst_bytes, 0);
if (ret) {
LOG(LS_ERROR) << "Error converting I420 frame to NV21: " << ret;
}
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,34 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/rtc_base/refcount.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc_jni {
JNI_FUNCTION_DECLARATION(void,
JniCommon_nativeAddRef,
JNIEnv* jni,
jclass,
jlong j_native_ref_counted_pointer) {
reinterpret_cast<rtc::RefCountInterface*>(j_native_ref_counted_pointer)
->AddRef();
}
JNI_FUNCTION_DECLARATION(void,
JniCommon_nativeReleaseRef,
JNIEnv* jni,
jclass,
jlong j_native_ref_counted_pointer) {
reinterpret_cast<rtc::RefCountInterface*>(j_native_ref_counted_pointer)
->Release();
}
} // namespace webrtc_jni

View File

@ -0,0 +1,437 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include <asm/unistd.h>
#include <sys/prctl.h>
#include <sys/syscall.h>
#include <unistd.h>
#include <vector>
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace jni {
static JavaVM* g_jvm = nullptr;
static pthread_once_t g_jni_ptr_once = PTHREAD_ONCE_INIT;
// Key for per-thread JNIEnv* data. Non-NULL in threads attached to |g_jvm| by
// AttachCurrentThreadIfNeeded(), NULL in unattached threads and threads that
// were attached by the JVM because of a Java->native call.
static pthread_key_t g_jni_ptr;
JavaVM *GetJVM() {
RTC_CHECK(g_jvm) << "JNI_OnLoad failed to run?";
return g_jvm;
}
// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
JNIEnv* GetEnv() {
void* env = nullptr;
jint status = g_jvm->GetEnv(&env, JNI_VERSION_1_6);
RTC_CHECK(((env != nullptr) && (status == JNI_OK)) ||
((env == nullptr) && (status == JNI_EDETACHED)))
<< "Unexpected GetEnv return: " << status << ":" << env;
return reinterpret_cast<JNIEnv*>(env);
}
static void ThreadDestructor(void* prev_jni_ptr) {
// This function only runs on threads where |g_jni_ptr| is non-NULL, meaning
// we were responsible for originally attaching the thread, so are responsible
// for detaching it now. However, because some JVM implementations (notably
// Oracle's http://goo.gl/eHApYT) also use the pthread_key_create mechanism,
// the JVMs accounting info for this thread may already be wiped out by the
// time this is called. Thus it may appear we are already detached even though
// it was our responsibility to detach! Oh well.
if (!GetEnv())
return;
RTC_CHECK(GetEnv() == prev_jni_ptr)
<< "Detaching from another thread: " << prev_jni_ptr << ":" << GetEnv();
jint status = g_jvm->DetachCurrentThread();
RTC_CHECK(status == JNI_OK) << "Failed to detach thread: " << status;
RTC_CHECK(!GetEnv()) << "Detaching was a successful no-op???";
}
static void CreateJNIPtrKey() {
RTC_CHECK(!pthread_key_create(&g_jni_ptr, &ThreadDestructor))
<< "pthread_key_create";
}
jint InitGlobalJniVariables(JavaVM *jvm) {
RTC_CHECK(!g_jvm) << "InitGlobalJniVariables!";
g_jvm = jvm;
RTC_CHECK(g_jvm) << "InitGlobalJniVariables handed NULL?";
RTC_CHECK(!pthread_once(&g_jni_ptr_once, &CreateJNIPtrKey)) << "pthread_once";
JNIEnv* jni = nullptr;
if (jvm->GetEnv(reinterpret_cast<void**>(&jni), JNI_VERSION_1_6) != JNI_OK)
return -1;
return JNI_VERSION_1_6;
}
// Return thread ID as a string.
static std::string GetThreadId() {
char buf[21]; // Big enough to hold a kuint64max plus terminating NULL.
RTC_CHECK_LT(snprintf(buf, sizeof(buf), "%ld",
static_cast<long>(syscall(__NR_gettid))),
sizeof(buf))
<< "Thread id is bigger than uint64??";
return std::string(buf);
}
// Return the current thread's name.
static std::string GetThreadName() {
char name[17] = {0};
if (prctl(PR_GET_NAME, name) != 0)
return std::string("<noname>");
return std::string(name);
}
// Return a |JNIEnv*| usable on this thread. Attaches to |g_jvm| if necessary.
JNIEnv* AttachCurrentThreadIfNeeded() {
JNIEnv* jni = GetEnv();
if (jni)
return jni;
RTC_CHECK(!pthread_getspecific(g_jni_ptr))
<< "TLS has a JNIEnv* but not attached?";
std::string name(GetThreadName() + " - " + GetThreadId());
JavaVMAttachArgs args;
args.version = JNI_VERSION_1_6;
args.name = &name[0];
args.group = nullptr;
// Deal with difference in signatures between Oracle's jni.h and Android's.
#ifdef _JAVASOFT_JNI_H_ // Oracle's jni.h violates the JNI spec!
void* env = nullptr;
#else
JNIEnv* env = nullptr;
#endif
RTC_CHECK(!g_jvm->AttachCurrentThread(&env, &args))
<< "Failed to attach thread";
RTC_CHECK(env) << "AttachCurrentThread handed back NULL!";
jni = reinterpret_cast<JNIEnv*>(env);
RTC_CHECK(!pthread_setspecific(g_jni_ptr, jni)) << "pthread_setspecific";
return jni;
}
// Return a |jlong| that will correctly convert back to |ptr|. This is needed
// because the alternative (of silently passing a 32-bit pointer to a vararg
// function expecting a 64-bit param) picks up garbage in the high 32 bits.
jlong jlongFromPointer(void* ptr) {
static_assert(sizeof(intptr_t) <= sizeof(jlong),
"Time to rethink the use of jlongs");
// Going through intptr_t to be obvious about the definedness of the
// conversion from pointer to integral type. intptr_t to jlong is a standard
// widening by the static_assert above.
jlong ret = reinterpret_cast<intptr_t>(ptr);
RTC_DCHECK(reinterpret_cast<void*>(ret) == ptr);
return ret;
}
// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
// found object/class/method/field is non-null.
jmethodID GetMethodID(
JNIEnv* jni, jclass c, const std::string& name, const char* signature) {
jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
CHECK_EXCEPTION(jni) << "error during GetMethodID: " << name << ", "
<< signature;
RTC_CHECK(m) << name << ", " << signature;
return m;
}
jmethodID GetStaticMethodID(
JNIEnv* jni, jclass c, const char* name, const char* signature) {
jmethodID m = jni->GetStaticMethodID(c, name, signature);
CHECK_EXCEPTION(jni) << "error during GetStaticMethodID: " << name << ", "
<< signature;
RTC_CHECK(m) << name << ", " << signature;
return m;
}
jfieldID GetFieldID(
JNIEnv* jni, jclass c, const char* name, const char* signature) {
jfieldID f = jni->GetFieldID(c, name, signature);
CHECK_EXCEPTION(jni) << "error during GetFieldID";
RTC_CHECK(f) << name << ", " << signature;
return f;
}
jfieldID GetStaticFieldID(JNIEnv* jni,
jclass c,
const char* name,
const char* signature) {
jfieldID f = jni->GetStaticFieldID(c, name, signature);
CHECK_EXCEPTION(jni) << "error during GetStaticFieldID";
RTC_CHECK(f) << name << ", " << signature;
return f;
}
jclass GetObjectClass(JNIEnv* jni, jobject object) {
jclass c = jni->GetObjectClass(object);
CHECK_EXCEPTION(jni) << "error during GetObjectClass";
RTC_CHECK(c) << "GetObjectClass returned NULL";
return c;
}
jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id) {
jobject o = jni->GetObjectField(object, id);
CHECK_EXCEPTION(jni) << "error during GetObjectField";
RTC_CHECK(!IsNull(jni, o)) << "GetObjectField returned NULL";
return o;
}
jobject GetStaticObjectField(JNIEnv* jni, jclass c, jfieldID id) {
jobject o = jni->GetStaticObjectField(c, id);
CHECK_EXCEPTION(jni) << "error during GetStaticObjectField";
RTC_CHECK(!IsNull(jni, o)) << "GetStaticObjectField returned NULL";
return o;
}
jobject GetNullableObjectField(JNIEnv* jni, jobject object, jfieldID id) {
jobject o = jni->GetObjectField(object, id);
CHECK_EXCEPTION(jni) << "error during GetObjectField";
return o;
}
jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id) {
return static_cast<jstring>(GetObjectField(jni, object, id));
}
jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id) {
jlong l = jni->GetLongField(object, id);
CHECK_EXCEPTION(jni) << "error during GetLongField";
return l;
}
jint GetIntField(JNIEnv* jni, jobject object, jfieldID id) {
jint i = jni->GetIntField(object, id);
CHECK_EXCEPTION(jni) << "error during GetIntField";
return i;
}
bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id) {
jboolean b = jni->GetBooleanField(object, id);
CHECK_EXCEPTION(jni) << "error during GetBooleanField";
return b;
}
bool IsNull(JNIEnv* jni, jobject obj) {
return jni->IsSameObject(obj, nullptr);
}
// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native) {
jstring jstr = jni->NewStringUTF(native.c_str());
CHECK_EXCEPTION(jni) << "error during NewStringUTF";
return jstr;
}
// Given a jstring, reinterprets it to a new native string.
std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
// Invoke String.getBytes(String charsetName) method to convert |j_string|
// to a byte array.
const jclass string_class = GetObjectClass(jni, j_string);
const jmethodID get_bytes =
GetMethodID(jni, string_class, "getBytes", "(Ljava/lang/String;)[B");
const jstring charset_name = jni->NewStringUTF("ISO-8859-1");
CHECK_EXCEPTION(jni) << "error during NewStringUTF";
const jbyteArray j_byte_array =
(jbyteArray)jni->CallObjectMethod(j_string, get_bytes, charset_name);
CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
const size_t len = jni->GetArrayLength(j_byte_array);
CHECK_EXCEPTION(jni) << "error during GetArrayLength";
std::vector<char> buf(len);
jni->GetByteArrayRegion(j_byte_array, 0, len,
reinterpret_cast<jbyte*>(&buf[0]));
CHECK_EXCEPTION(jni) << "error during GetByteArrayRegion";
return std::string(buf.begin(), buf.end());
}
// Given a list of jstrings, reinterprets it to a new vector of native strings.
std::vector<std::string> JavaToStdVectorStrings(JNIEnv* jni, jobject list) {
std::vector<std::string> converted_list;
if (list != nullptr) {
for (jobject str : Iterable(jni, list)) {
converted_list.push_back(
JavaToStdString(jni, reinterpret_cast<jstring>(str)));
}
}
return converted_list;
}
// Return the (singleton) Java Enum object corresponding to |index|;
jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
const std::string& state_class_name, int index) {
jmethodID state_values_id = GetStaticMethodID(
jni, state_class, "values", ("()[L" + state_class_name + ";").c_str());
jobjectArray state_values = static_cast<jobjectArray>(
jni->CallStaticObjectMethod(state_class, state_values_id));
CHECK_EXCEPTION(jni) << "error during CallStaticObjectMethod";
jobject ret = jni->GetObjectArrayElement(state_values, index);
CHECK_EXCEPTION(jni) << "error during GetObjectArrayElement";
return ret;
}
jobject JavaEnumFromIndexAndClassName(JNIEnv* jni,
const std::string& state_class_fragment,
int index) {
const std::string state_class = "org/webrtc/" + state_class_fragment;
return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
state_class, index);
}
std::string GetJavaEnumName(JNIEnv* jni,
const std::string& className,
jobject j_enum) {
jclass enumClass = FindClass(jni, className.c_str());
jmethodID nameMethod =
GetMethodID(jni, enumClass, "name", "()Ljava/lang/String;");
jstring name =
reinterpret_cast<jstring>(jni->CallObjectMethod(j_enum, nameMethod));
CHECK_EXCEPTION(jni) << "error during CallObjectMethod for " << className
<< ".name";
return JavaToStdString(jni, name);
}
std::map<std::string, std::string> JavaToStdMapStrings(JNIEnv* jni,
jobject j_map) {
jclass map_class = jni->FindClass("java/util/Map");
jclass set_class = jni->FindClass("java/util/Set");
jclass iterator_class = jni->FindClass("java/util/Iterator");
jclass entry_class = jni->FindClass("java/util/Map$Entry");
jmethodID entry_set_method =
jni->GetMethodID(map_class, "entrySet", "()Ljava/util/Set;");
jmethodID iterator_method =
jni->GetMethodID(set_class, "iterator", "()Ljava/util/Iterator;");
jmethodID has_next_method =
jni->GetMethodID(iterator_class, "hasNext", "()Z");
jmethodID next_method =
jni->GetMethodID(iterator_class, "next", "()Ljava/lang/Object;");
jmethodID get_key_method =
jni->GetMethodID(entry_class, "getKey", "()Ljava/lang/Object;");
jmethodID get_value_method =
jni->GetMethodID(entry_class, "getValue", "()Ljava/lang/Object;");
jobject j_entry_set = jni->CallObjectMethod(j_map, entry_set_method);
jobject j_iterator = jni->CallObjectMethod(j_entry_set, iterator_method);
std::map<std::string, std::string> result;
while (jni->CallBooleanMethod(j_iterator, has_next_method)) {
jobject j_entry = jni->CallObjectMethod(j_iterator, next_method);
jstring j_key =
static_cast<jstring>(jni->CallObjectMethod(j_entry, get_key_method));
jstring j_value =
static_cast<jstring>(jni->CallObjectMethod(j_entry, get_value_method));
result[JavaToStdString(jni, j_key)] = JavaToStdString(jni, j_value);
}
return result;
}
jobject NewGlobalRef(JNIEnv* jni, jobject o) {
jobject ret = jni->NewGlobalRef(o);
CHECK_EXCEPTION(jni) << "error during NewGlobalRef";
RTC_CHECK(ret);
return ret;
}
void DeleteGlobalRef(JNIEnv* jni, jobject o) {
jni->DeleteGlobalRef(o);
CHECK_EXCEPTION(jni) << "error during DeleteGlobalRef";
}
// Scope Java local references to the lifetime of this object. Use in all C++
// callbacks (i.e. entry points that don't originate in a Java callstack
// through a "native" method call).
ScopedLocalRefFrame::ScopedLocalRefFrame(JNIEnv* jni) : jni_(jni) {
RTC_CHECK(!jni_->PushLocalFrame(0)) << "Failed to PushLocalFrame";
}
ScopedLocalRefFrame::~ScopedLocalRefFrame() {
jni_->PopLocalFrame(nullptr);
}
// Creates an iterator representing the end of any collection.
Iterable::Iterator::Iterator() : iterator_(nullptr) {}
// Creates an iterator pointing to the beginning of the specified collection.
Iterable::Iterator::Iterator(JNIEnv* jni, jobject iterable) : jni_(jni) {
jclass j_class = GetObjectClass(jni, iterable);
jmethodID iterator_id =
GetMethodID(jni, j_class, "iterator", "()Ljava/util/Iterator;");
iterator_ = jni->CallObjectMethod(iterable, iterator_id);
CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
RTC_CHECK(iterator_ != nullptr);
jclass iterator_class = GetObjectClass(jni, iterator_);
has_next_id_ = GetMethodID(jni, iterator_class, "hasNext", "()Z");
next_id_ = GetMethodID(jni, iterator_class, "next", "()Ljava/lang/Object;");
// Start at the first element in the collection.
++(*this);
}
// Move constructor - necessary to be able to return iterator types from
// functions.
Iterable::Iterator::Iterator(Iterator&& other)
: jni_(std::move(other.jni_)),
iterator_(std::move(other.iterator_)),
value_(std::move(other.value_)),
has_next_id_(std::move(other.has_next_id_)),
next_id_(std::move(other.next_id_)),
thread_checker_(std::move(other.thread_checker_)){};
// Advances the iterator one step.
Iterable::Iterator& Iterable::Iterator::operator++() {
RTC_CHECK(thread_checker_.CalledOnValidThread());
if (AtEnd()) {
// Can't move past the end.
return *this;
}
bool has_next = jni_->CallBooleanMethod(iterator_, has_next_id_);
CHECK_EXCEPTION(jni_) << "error during CallBooleanMethod";
if (!has_next) {
iterator_ = nullptr;
value_ = nullptr;
return *this;
}
value_ = jni_->CallObjectMethod(iterator_, next_id_);
CHECK_EXCEPTION(jni_) << "error during CallObjectMethod";
return *this;
}
// Provides a way to compare the iterator with itself and with the end iterator.
// Note: all other comparison results are undefined, just like for C++ input
// iterators.
bool Iterable::Iterator::operator==(const Iterable::Iterator& other) {
// Two different active iterators should never be compared.
RTC_DCHECK(this == &other || AtEnd() || other.AtEnd());
return AtEnd() == other.AtEnd();
}
jobject Iterable::Iterator::operator*() {
RTC_CHECK(!AtEnd());
return value_;
}
bool Iterable::Iterator::AtEnd() const {
RTC_CHECK(thread_checker_.CalledOnValidThread());
return jni_ == nullptr || IsNull(jni_, iterator_);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,229 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This file contain convenience functions and classes for JNI.
// Before using any of the methods, InitGlobalJniVariables must be called.
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_
#include <jni.h>
#include <map>
#include <string>
#include <vector>
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/constructormagic.h"
#include "webrtc/rtc_base/thread_checker.h"
// Abort the process if |jni| has a Java exception pending.
// This macros uses the comma operator to execute ExceptionDescribe
// and ExceptionClear ignoring their return values and sending ""
// to the error stream.
#define CHECK_EXCEPTION(jni) \
RTC_CHECK(!jni->ExceptionCheck()) \
<< (jni->ExceptionDescribe(), jni->ExceptionClear(), "")
// Helper that calls ptr->Release() and aborts the process with a useful
// message if that didn't actually delete *ptr because of extra refcounts.
#define CHECK_RELEASE(ptr) \
RTC_CHECK_EQ(0, (ptr)->Release()) << "Unexpected refcount."
// Convenience macro defining JNI-accessible methods in the org.webrtc package.
// Eliminates unnecessary boilerplate and line-wraps, reducing visual clutter.
#define JNI_FUNCTION_DECLARATION(rettype, name, ...) \
extern "C" JNIEXPORT rettype JNICALL Java_org_webrtc_##name(__VA_ARGS__)
namespace webrtc {
namespace jni {
jint InitGlobalJniVariables(JavaVM *jvm);
// Return a |JNIEnv*| usable on this thread or NULL if this thread is detached.
JNIEnv* GetEnv();
JavaVM *GetJVM();
// Return a |JNIEnv*| usable on this thread. Attaches to |g_jvm| if necessary.
JNIEnv* AttachCurrentThreadIfNeeded();
// Return a |jlong| that will correctly convert back to |ptr|. This is needed
// because the alternative (of silently passing a 32-bit pointer to a vararg
// function expecting a 64-bit param) picks up garbage in the high 32 bits.
jlong jlongFromPointer(void* ptr);
// JNIEnv-helper methods that RTC_CHECK success: no Java exception thrown and
// found object/class/method/field is non-null.
jmethodID GetMethodID(
JNIEnv* jni, jclass c, const std::string& name, const char* signature);
jmethodID GetStaticMethodID(
JNIEnv* jni, jclass c, const char* name, const char* signature);
jfieldID GetFieldID(JNIEnv* jni, jclass c, const char* name,
const char* signature);
jfieldID GetStaticFieldID(JNIEnv* jni,
jclass c,
const char* name,
const char* signature);
jclass GetObjectClass(JNIEnv* jni, jobject object);
// Throws an exception if the object field is null.
jobject GetObjectField(JNIEnv* jni, jobject object, jfieldID id);
jobject GetStaticObjectField(JNIEnv* jni, jclass c, jfieldID id);
jobject GetNullableObjectField(JNIEnv* jni, jobject object, jfieldID id);
jstring GetStringField(JNIEnv* jni, jobject object, jfieldID id);
jlong GetLongField(JNIEnv* jni, jobject object, jfieldID id);
jint GetIntField(JNIEnv* jni, jobject object, jfieldID id);
bool GetBooleanField(JNIEnv* jni, jobject object, jfieldID id);
// Returns true if |obj| == null in Java.
bool IsNull(JNIEnv* jni, jobject obj);
// Given a UTF-8 encoded |native| string return a new (UTF-16) jstring.
jstring JavaStringFromStdString(JNIEnv* jni, const std::string& native);
// Given a (UTF-16) jstring return a new UTF-8 native string.
std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
// Given a List of (UTF-16) jstrings
// return a new vector of UTF-8 native strings.
std::vector<std::string> JavaToStdVectorStrings(JNIEnv* jni, jobject list);
// Return the (singleton) Java Enum object corresponding to |index|;
jobject JavaEnumFromIndex(JNIEnv* jni, jclass state_class,
const std::string& state_class_name, int index);
// Return the (singleton) Java Enum object corresponding to |index|;
// |state_class_fragment| is something like "MediaSource$State".
jobject JavaEnumFromIndexAndClassName(JNIEnv* jni,
const std::string& state_class_fragment,
int index);
// Parses Map<String, String> to std::map<std::string, std::string>.
std::map<std::string, std::string> JavaToStdMapStrings(JNIEnv* jni,
jobject j_map);
// Returns the name of a Java enum.
std::string GetJavaEnumName(JNIEnv* jni,
const std::string& className,
jobject j_enum);
jobject NewGlobalRef(JNIEnv* jni, jobject o);
void DeleteGlobalRef(JNIEnv* jni, jobject o);
// Scope Java local references to the lifetime of this object. Use in all C++
// callbacks (i.e. entry points that don't originate in a Java callstack
// through a "native" method call).
class ScopedLocalRefFrame {
public:
explicit ScopedLocalRefFrame(JNIEnv* jni);
~ScopedLocalRefFrame();
private:
JNIEnv* jni_;
};
// Scoped holder for global Java refs.
template<class T> // T is jclass, jobject, jintArray, etc.
class ScopedGlobalRef {
public:
ScopedGlobalRef(JNIEnv* jni, T obj)
: obj_(static_cast<T>(jni->NewGlobalRef(obj))) {}
~ScopedGlobalRef() {
DeleteGlobalRef(AttachCurrentThreadIfNeeded(), obj_);
}
T operator*() const {
return obj_;
}
private:
T obj_;
};
// Provides a convenient way to iterate over a Java Iterable using the
// C++ range-for loop.
// E.g. for (jobject value : Iterable(jni, j_iterable)) { ... }
// Note: Since Java iterators cannot be duplicated, the iterator class is not
// copyable to prevent creating multiple C++ iterators that refer to the same
// Java iterator.
class Iterable {
public:
Iterable(JNIEnv* jni, jobject iterable) : jni_(jni), iterable_(iterable) {}
class Iterator {
public:
// Creates an iterator representing the end of any collection.
Iterator();
// Creates an iterator pointing to the beginning of the specified
// collection.
Iterator(JNIEnv* jni, jobject iterable);
// Move constructor - necessary to be able to return iterator types from
// functions.
Iterator(Iterator&& other);
// Move assignment should not be used.
Iterator& operator=(Iterator&&) = delete;
// Advances the iterator one step.
Iterator& operator++();
// Provides a way to compare the iterator with itself and with the end
// iterator.
// Note: all other comparison results are undefined, just like for C++ input
// iterators.
bool operator==(const Iterator& other);
bool operator!=(const Iterator& other) { return !(*this == other); }
jobject operator*();
private:
bool AtEnd() const;
JNIEnv* jni_ = nullptr;
jobject iterator_ = nullptr;
jobject value_ = nullptr;
jmethodID has_next_id_ = nullptr;
jmethodID next_id_ = nullptr;
rtc::ThreadChecker thread_checker_;
RTC_DISALLOW_COPY_AND_ASSIGN(Iterator);
};
Iterable::Iterator begin() { return Iterable::Iterator(jni_, iterable_); }
Iterable::Iterator end() { return Iterable::Iterator(); }
private:
JNIEnv* jni_;
jobject iterable_;
RTC_DISALLOW_COPY_AND_ASSIGN(Iterable);
};
} // namespace jni
} // namespace webrtc
// TODO(magjed): Remove once external clients are updated.
namespace webrtc_jni {
using webrtc::jni::AttachCurrentThreadIfNeeded;
using webrtc::jni::InitGlobalJniVariables;
} // namespace webrtc_jni
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_JNI_HELPERS_H_

View File

@ -0,0 +1,40 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#undef JNIEXPORT
#define JNIEXPORT __attribute__((visibility("default")))
#include "webrtc/rtc_base/ssladapter.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM *jvm, void *reserved) {
jint ret = InitGlobalJniVariables(jvm);
RTC_DCHECK_GE(ret, 0);
if (ret < 0)
return -1;
RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
LoadGlobalClassReferenceHolder();
return ret;
}
extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM *jvm, void *reserved) {
FreeGlobalClassReferenceHolder();
RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,512 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/native_handle_impl.h"
#include <memory>
#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/rtc_base/bind.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/keep_ref_until_done.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/scoped_ref_ptr.h"
#include "webrtc/rtc_base/timeutils.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/wrapped_native_i420_buffer.h"
#include "webrtc/system_wrappers/include/aligned_malloc.h"
namespace webrtc {
namespace jni {
namespace {
class AndroidVideoI420Buffer : public I420BufferInterface {
public:
// Wraps an existing reference to a Java VideoBuffer. Retain will not be
// called but release will be called when the C++ object is destroyed.
static rtc::scoped_refptr<AndroidVideoI420Buffer> WrapReference(
JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
protected:
AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
// Should not be called directly. Wraps a reference. Use
// AndroidVideoI420Buffer::WrapReference instead for clarity.
AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
~AndroidVideoI420Buffer();
private:
const uint8_t* DataY() const override { return data_y_; }
const uint8_t* DataU() const override { return data_u_; }
const uint8_t* DataV() const override { return data_v_; }
int StrideY() const override { return stride_y_; }
int StrideU() const override { return stride_u_; }
int StrideV() const override { return stride_v_; }
int width() const override { return width_; }
int height() const override { return height_; }
const jmethodID j_release_id_;
const int width_;
const int height_;
// Holds a VideoFrame.I420Buffer.
const ScopedGlobalRef<jobject> j_video_frame_buffer_;
const uint8_t* data_y_;
const uint8_t* data_u_;
const uint8_t* data_v_;
int stride_y_;
int stride_u_;
int stride_v_;
};
rtc::scoped_refptr<AndroidVideoI420Buffer>
AndroidVideoI420Buffer::WrapReference(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer) {
return new rtc::RefCountedObject<AndroidVideoI420Buffer>(
jni, j_release_id, width, height, j_video_frame_buffer);
}
AndroidVideoI420Buffer::AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: AndroidVideoI420Buffer(jni,
j_release_id,
width,
height,
j_video_frame_buffer) {
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
}
AndroidVideoI420Buffer::AndroidVideoI420Buffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: j_release_id_(j_release_id),
width_(width),
height_(height),
j_video_frame_buffer_(jni, j_video_frame_buffer) {
jclass j_video_frame_i420_buffer_class =
FindClass(jni, "org/webrtc/VideoFrame$I420Buffer");
jmethodID j_get_data_y_id = jni->GetMethodID(
j_video_frame_i420_buffer_class, "getDataY", "()Ljava/nio/ByteBuffer;");
jmethodID j_get_data_u_id = jni->GetMethodID(
j_video_frame_i420_buffer_class, "getDataU", "()Ljava/nio/ByteBuffer;");
jmethodID j_get_data_v_id = jni->GetMethodID(
j_video_frame_i420_buffer_class, "getDataV", "()Ljava/nio/ByteBuffer;");
jmethodID j_get_stride_y_id =
jni->GetMethodID(j_video_frame_i420_buffer_class, "getStrideY", "()I");
jmethodID j_get_stride_u_id =
jni->GetMethodID(j_video_frame_i420_buffer_class, "getStrideU", "()I");
jmethodID j_get_stride_v_id =
jni->GetMethodID(j_video_frame_i420_buffer_class, "getStrideV", "()I");
jobject j_data_y =
jni->CallObjectMethod(j_video_frame_buffer, j_get_data_y_id);
jobject j_data_u =
jni->CallObjectMethod(j_video_frame_buffer, j_get_data_u_id);
jobject j_data_v =
jni->CallObjectMethod(j_video_frame_buffer, j_get_data_v_id);
data_y_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_y));
data_u_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_u));
data_v_ = static_cast<const uint8_t*>(jni->GetDirectBufferAddress(j_data_v));
stride_y_ = jni->CallIntMethod(j_video_frame_buffer, j_get_stride_y_id);
stride_u_ = jni->CallIntMethod(j_video_frame_buffer, j_get_stride_u_id);
stride_v_ = jni->CallIntMethod(j_video_frame_buffer, j_get_stride_v_id);
}
AndroidVideoI420Buffer::~AndroidVideoI420Buffer() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->CallVoidMethod(*j_video_frame_buffer_, j_release_id_);
}
} // namespace
Matrix::Matrix(JNIEnv* jni, jfloatArray a) {
RTC_CHECK_EQ(16, jni->GetArrayLength(a));
jfloat* ptr = jni->GetFloatArrayElements(a, nullptr);
for (int i = 0; i < 16; ++i) {
elem_[i] = ptr[i];
}
jni->ReleaseFloatArrayElements(a, ptr, 0);
}
jfloatArray Matrix::ToJava(JNIEnv* jni) const {
jfloatArray matrix = jni->NewFloatArray(16);
jni->SetFloatArrayRegion(matrix, 0, 16, elem_);
return matrix;
}
void Matrix::Rotate(VideoRotation rotation) {
// Texture coordinates are in the range 0 to 1. The transformation of the last
// row in each rotation matrix is needed for proper translation, e.g, to
// mirror x, we don't replace x by -x, but by 1-x.
switch (rotation) {
case kVideoRotation_0:
break;
case kVideoRotation_90: {
const float ROTATE_90[16] =
{ elem_[4], elem_[5], elem_[6], elem_[7],
-elem_[0], -elem_[1], -elem_[2], -elem_[3],
elem_[8], elem_[9], elem_[10], elem_[11],
elem_[0] + elem_[12], elem_[1] + elem_[13],
elem_[2] + elem_[14], elem_[3] + elem_[15]};
memcpy(elem_, ROTATE_90, sizeof(elem_));
} break;
case kVideoRotation_180: {
const float ROTATE_180[16] =
{ -elem_[0], -elem_[1], -elem_[2], -elem_[3],
-elem_[4], -elem_[5], -elem_[6], -elem_[7],
elem_[8], elem_[9], elem_[10], elem_[11],
elem_[0] + elem_[4] + elem_[12], elem_[1] + elem_[5] + elem_[13],
elem_[2] + elem_[6] + elem_[14], elem_[3] + elem_[11]+ elem_[15]};
memcpy(elem_, ROTATE_180, sizeof(elem_));
} break;
case kVideoRotation_270: {
const float ROTATE_270[16] =
{ -elem_[4], -elem_[5], -elem_[6], -elem_[7],
elem_[0], elem_[1], elem_[2], elem_[3],
elem_[8], elem_[9], elem_[10], elem_[11],
elem_[4] + elem_[12], elem_[5] + elem_[13],
elem_[6] + elem_[14], elem_[7] + elem_[15]};
memcpy(elem_, ROTATE_270, sizeof(elem_));
} break;
}
}
// Calculates result = a * b, in column-major order.
void Matrix::Multiply(const float a[16], const float b[16], float result[16]) {
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
float sum = 0;
for (int k = 0; k < 4; ++k) {
sum += a[k * 4 + j] * b[i * 4 + k];
}
result[i * 4 + j] = sum;
}
}
}
// Center crop by keeping xFraction of the width and yFraction of the height,
// so e.g. cropping from 640x480 to 640x360 would use
// xFraction=1, yFraction=360/480.
void Matrix::Crop(float xFraction,
float yFraction,
float xOffset,
float yOffset) {
const float crop_matrix[16] =
{xFraction, 0, 0, 0,
0, yFraction, 0, 0,
0, 0, 1, 0,
xOffset, yOffset, 0, 1};
const Matrix old = *this;
Multiply(crop_matrix, old.elem_, this->elem_);
}
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
static const int kBufferAlignment = 64;
NativeHandleImpl::NativeHandleImpl(int id, const Matrix& matrix)
: oes_texture_id(id), sampling_matrix(matrix) {}
NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
jint j_oes_texture_id,
jfloatArray j_transform_matrix)
: oes_texture_id(j_oes_texture_id),
sampling_matrix(jni, j_transform_matrix) {}
AndroidTextureBuffer::AndroidTextureBuffer(
int width,
int height,
const NativeHandleImpl& native_handle,
jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used)
: width_(width),
height_(height),
native_handle_(native_handle),
surface_texture_helper_(surface_texture_helper),
no_longer_used_cb_(no_longer_used) {}
AndroidTextureBuffer::~AndroidTextureBuffer() {
no_longer_used_cb_();
}
VideoFrameBuffer::Type AndroidTextureBuffer::type() const {
return Type::kNative;
}
NativeHandleImpl AndroidTextureBuffer::native_handle_impl() const {
return native_handle_;
}
int AndroidTextureBuffer::width() const {
return width_;
}
int AndroidTextureBuffer::height() const {
return height_;
}
rtc::scoped_refptr<I420BufferInterface> AndroidTextureBuffer::ToI420() {
int uv_width = (width() + 7) / 8;
int stride = 8 * uv_width;
int uv_height = (height() + 1) / 2;
size_t size = stride * (height() + uv_height);
// The data is owned by the frame, and the normal case is that the
// data is deleted by the frame's destructor callback.
//
// TODO(nisse): Use an I420BufferPool. We then need to extend that
// class, and I420Buffer, to support our memory layout.
// TODO(nisse): Depending on
// system_wrappers/include/aligned_malloc.h violate current DEPS
// rules. We get away for now only because it is indirectly included
// by i420_buffer.h
std::unique_ptr<uint8_t, AlignedFreeDeleter> yuv_data(
static_cast<uint8_t*>(AlignedMalloc(size, kBufferAlignment)));
// See YuvConverter.java for the required layout.
uint8_t* y_data = yuv_data.get();
uint8_t* u_data = y_data + height() * stride;
uint8_t* v_data = u_data + stride/2;
rtc::scoped_refptr<I420BufferInterface> copy = webrtc::WrapI420Buffer(
width(), height(), y_data, stride, u_data, stride, v_data, stride,
rtc::Bind(&AlignedFree, yuv_data.release()));
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jmethodID transform_mid = GetMethodID(
jni,
GetObjectClass(jni, surface_texture_helper_),
"textureToYUV",
"(Ljava/nio/ByteBuffer;IIII[F)V");
jobject byte_buffer = jni->NewDirectByteBuffer(y_data, size);
jfloatArray sampling_matrix = native_handle_.sampling_matrix.ToJava(jni);
jni->CallVoidMethod(surface_texture_helper_,
transform_mid,
byte_buffer, width(), height(), stride,
native_handle_.oes_texture_id, sampling_matrix);
CHECK_EXCEPTION(jni) << "textureToYUV throwed an exception";
return copy;
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBuffer::WrapReference(
JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer) {
return new rtc::RefCountedObject<AndroidVideoBuffer>(
jni, j_release_id, width, height, j_video_frame_buffer);
}
AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: AndroidVideoBuffer(jni,
j_release_id,
width,
height,
j_video_frame_buffer) {
jni->CallVoidMethod(j_video_frame_buffer, j_retain_id);
}
AndroidVideoBuffer::AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer)
: j_release_id_(j_release_id),
width_(width),
height_(height),
j_video_frame_buffer_(jni, j_video_frame_buffer) {}
AndroidVideoBuffer::~AndroidVideoBuffer() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->CallVoidMethod(*j_video_frame_buffer_, j_release_id_);
}
jobject AndroidVideoBuffer::video_frame_buffer() const {
return *j_video_frame_buffer_;
}
VideoFrameBuffer::Type AndroidVideoBuffer::type() const {
return Type::kNative;
}
int AndroidVideoBuffer::width() const {
return width_;
}
int AndroidVideoBuffer::height() const {
return height_;
}
rtc::scoped_refptr<I420BufferInterface> AndroidVideoBuffer::ToI420() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jclass j_video_frame_buffer_class =
FindClass(jni, "org/webrtc/VideoFrame$Buffer");
jmethodID j_to_i420_id =
jni->GetMethodID(j_video_frame_buffer_class, "toI420",
"()Lorg/webrtc/VideoFrame$I420Buffer;");
jobject j_i420_buffer =
jni->CallObjectMethod(*j_video_frame_buffer_, j_to_i420_id);
// We don't need to retain the buffer because toI420 returns a new object that
// we are assumed to take the ownership of.
return AndroidVideoI420Buffer::WrapReference(jni, j_release_id_, width_,
height_, j_i420_buffer);
}
jobject AndroidVideoBuffer::ToJavaI420Frame(JNIEnv* jni, int rotation) {
jclass j_byte_buffer_class = jni->FindClass("java/nio/ByteBuffer");
jclass j_i420_frame_class =
FindClass(jni, "org/webrtc/VideoRenderer$I420Frame");
jmethodID j_i420_frame_ctor_id = GetMethodID(
jni, j_i420_frame_class, "<init>", "(ILorg/webrtc/VideoFrame$Buffer;J)V");
// Java code just uses the native frame to hold a reference to the buffer so
// this is okay.
VideoFrame* native_frame =
new VideoFrame(this, 0 /* timestamp */, 0 /* render_time_ms */,
VideoRotation::kVideoRotation_0 /* rotation */);
return jni->NewObject(j_i420_frame_class, j_i420_frame_ctor_id, rotation,
*j_video_frame_buffer_, jlongFromPointer(native_frame));
}
AndroidVideoBufferFactory::AndroidVideoBufferFactory(JNIEnv* jni)
: j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")),
j_get_buffer_id_(GetMethodID(jni,
*j_video_frame_class_,
"getBuffer",
"()Lorg/webrtc/VideoFrame$Buffer;")),
j_get_rotation_id_(
GetMethodID(jni, *j_video_frame_class_, "getRotation", "()I")),
j_get_timestamp_ns_id_(
GetMethodID(jni, *j_video_frame_class_, "getTimestampNs", "()J")),
j_video_frame_buffer_class_(
jni,
FindClass(jni, "org/webrtc/VideoFrame$Buffer")),
j_retain_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "retain", "()V")),
j_release_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "release", "()V")),
j_get_width_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "getWidth", "()I")),
j_get_height_id_(
GetMethodID(jni, *j_video_frame_buffer_class_, "getHeight", "()I")) {}
VideoFrame AndroidVideoBufferFactory::CreateFrame(
JNIEnv* jni,
jobject j_video_frame,
uint32_t timestamp_rtp) const {
jobject j_video_frame_buffer =
jni->CallObjectMethod(j_video_frame, j_get_buffer_id_);
int rotation = jni->CallIntMethod(j_video_frame, j_get_rotation_id_);
uint32_t timestamp_ns =
jni->CallLongMethod(j_video_frame, j_get_timestamp_ns_id_);
rtc::scoped_refptr<AndroidVideoBuffer> buffer =
CreateBuffer(jni, j_video_frame_buffer);
return VideoFrame(buffer, timestamp_rtp,
timestamp_ns / rtc::kNumNanosecsPerMillisec,
static_cast<VideoRotation>(rotation));
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::WrapBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const {
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
return AndroidVideoBuffer::WrapReference(jni, j_release_id_, width, height,
j_video_frame_buffer);
}
rtc::scoped_refptr<AndroidVideoBuffer> AndroidVideoBufferFactory::CreateBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const {
int width = jni->CallIntMethod(j_video_frame_buffer, j_get_width_id_);
int height = jni->CallIntMethod(j_video_frame_buffer, j_get_height_id_);
return new rtc::RefCountedObject<AndroidVideoBuffer>(
jni, j_retain_id_, j_release_id_, width, height, j_video_frame_buffer);
}
JavaVideoFrameFactory::JavaVideoFrameFactory(JNIEnv* jni)
: j_video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")) {
j_video_frame_constructor_id_ =
GetMethodID(jni, *j_video_frame_class_, "<init>",
"(Lorg/webrtc/VideoFrame$Buffer;IJ)V");
}
static bool IsJavaVideoBuffer(rtc::scoped_refptr<VideoFrameBuffer> buffer) {
if (buffer->type() != VideoFrameBuffer::Type::kNative) {
return false;
}
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(buffer.get());
return android_buffer->android_type() ==
AndroidVideoFrameBuffer::AndroidType::kJavaBuffer;
}
jobject JavaVideoFrameFactory::ToJavaFrame(JNIEnv* jni,
const VideoFrame& frame) const {
rtc::scoped_refptr<VideoFrameBuffer> buffer = frame.video_frame_buffer();
jobject j_buffer;
if (IsJavaVideoBuffer(buffer)) {
RTC_DCHECK(buffer->type() == VideoFrameBuffer::Type::kNative);
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(buffer.get());
RTC_DCHECK(android_buffer->android_type() ==
AndroidVideoFrameBuffer::AndroidType::kJavaBuffer);
AndroidVideoBuffer* android_video_buffer =
static_cast<AndroidVideoBuffer*>(android_buffer);
j_buffer = android_video_buffer->video_frame_buffer();
} else {
j_buffer = WrapI420Buffer(jni, buffer->ToI420());
}
return jni->NewObject(
*j_video_frame_class_, j_video_frame_constructor_id_, j_buffer,
static_cast<jint>(frame.rotation()),
static_cast<jlong>(frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec));
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,197 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_
#include <jni.h>
#include "webrtc/api/video/video_frame.h"
#include "webrtc/api/video/video_frame_buffer.h"
#include "webrtc/api/video/video_rotation.h"
#include "webrtc/rtc_base/callback.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Open gl texture matrix, in column-major order. Operations are
// in-place.
class Matrix {
public:
Matrix(JNIEnv* jni, jfloatArray a);
static Matrix fromAndroidGraphicsMatrix(JNIEnv* jni, jobject j_matrix);
jfloatArray ToJava(JNIEnv* jni) const;
// Crop arguments are relative to original size.
void Crop(float cropped_width,
float cropped_height,
float crop_x,
float crop_y);
void Rotate(VideoRotation rotation);
private:
Matrix() {}
static void Multiply(const float a[16], const float b[16], float result[16]);
float elem_[16];
};
// Wrapper for texture object.
struct NativeHandleImpl {
NativeHandleImpl(JNIEnv* jni,
jint j_oes_texture_id,
jfloatArray j_transform_matrix);
NativeHandleImpl(int id, const Matrix& matrix);
const int oes_texture_id;
Matrix sampling_matrix;
};
// Base class to differentiate between the old texture frames and the new
// Java-based frames.
// TODO(sakal): Remove this and AndroidTextureBuffer once they are no longer
// needed.
class AndroidVideoFrameBuffer : public VideoFrameBuffer {
public:
enum class AndroidType { kTextureBuffer, kJavaBuffer };
virtual AndroidType android_type() = 0;
};
class AndroidTextureBuffer : public AndroidVideoFrameBuffer {
public:
AndroidTextureBuffer(int width,
int height,
const NativeHandleImpl& native_handle,
jobject surface_texture_helper,
const rtc::Callback0<void>& no_longer_used);
~AndroidTextureBuffer();
NativeHandleImpl native_handle_impl() const;
private:
Type type() const override;
int width() const override;
int height() const override;
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
AndroidType android_type() override { return AndroidType::kTextureBuffer; }
const int width_;
const int height_;
NativeHandleImpl native_handle_;
// Raw object pointer, relying on the caller, i.e.,
// AndroidVideoCapturerJni or the C++ SurfaceTextureHelper, to keep
// a global reference. TODO(nisse): Make this a reference to the C++
// SurfaceTextureHelper instead, but that requires some refactoring
// of AndroidVideoCapturerJni.
jobject surface_texture_helper_;
rtc::Callback0<void> no_longer_used_cb_;
};
class AndroidVideoBuffer : public AndroidVideoFrameBuffer {
public:
// Wraps an existing reference to a Java VideoBuffer. Retain will not be
// called but release will be called when the C++ object is destroyed.
static rtc::scoped_refptr<AndroidVideoBuffer> WrapReference(
JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_retain_id,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
// Should not be called directly. Wraps a reference. Use
// AndroidVideoBuffer::WrapReference instead for clarity.
AndroidVideoBuffer(JNIEnv* jni,
jmethodID j_release_id,
int width,
int height,
jobject j_video_frame_buffer);
~AndroidVideoBuffer() override;
jobject video_frame_buffer() const;
// Returns an instance of VideoRenderer.I420Frame (deprecated)
jobject ToJavaI420Frame(JNIEnv* jni, int rotation);
private:
Type type() const override;
int width() const override;
int height() const override;
rtc::scoped_refptr<I420BufferInterface> ToI420() override;
AndroidType android_type() override { return AndroidType::kJavaBuffer; }
const jmethodID j_release_id_;
const int width_;
const int height_;
// Holds a VideoFrame.Buffer.
const ScopedGlobalRef<jobject> j_video_frame_buffer_;
};
class AndroidVideoBufferFactory {
public:
explicit AndroidVideoBufferFactory(JNIEnv* jni);
VideoFrame CreateFrame(JNIEnv* jni,
jobject j_video_frame,
uint32_t timestamp_rtp) const;
// Wraps a buffer to AndroidVideoBuffer without incrementing the reference
// count.
rtc::scoped_refptr<AndroidVideoBuffer> WrapBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const;
rtc::scoped_refptr<AndroidVideoBuffer> CreateBuffer(
JNIEnv* jni,
jobject j_video_frame_buffer) const;
private:
ScopedGlobalRef<jclass> j_video_frame_class_;
jmethodID j_get_buffer_id_;
jmethodID j_get_rotation_id_;
jmethodID j_get_timestamp_ns_id_;
ScopedGlobalRef<jclass> j_video_frame_buffer_class_;
jmethodID j_retain_id_;
jmethodID j_release_id_;
jmethodID j_get_width_id_;
jmethodID j_get_height_id_;
};
class JavaVideoFrameFactory {
public:
JavaVideoFrameFactory(JNIEnv* jni);
jobject ToJavaFrame(JNIEnv* jni, const VideoFrame& frame) const;
private:
ScopedGlobalRef<jclass> j_video_frame_class_;
jmethodID j_video_frame_constructor_id_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_NATIVE_HANDLE_IMPL_H_

View File

@ -0,0 +1,78 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include <vector>
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/scale.h"
#include "webrtc/rtc_base/checks.h"
namespace webrtc {
namespace jni {
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_NV12Buffer_nativeCropAndScale(JNIEnv* jni,
jclass,
jint crop_x,
jint crop_y,
jint crop_width,
jint crop_height,
jint scale_width,
jint scale_height,
jobject j_src,
jint src_width,
jint src_height,
jint src_stride,
jint src_slice_height,
jobject j_dst_y,
jint dst_stride_y,
jobject j_dst_u,
jint dst_stride_u,
jobject j_dst_v,
jint dst_stride_v) {
const int src_stride_y = src_stride;
const int src_stride_uv = src_stride;
const int crop_chroma_x = crop_x / 2;
const int crop_chroma_y = crop_y / 2;
const int crop_chroma_width = (crop_width + 1) / 2;
const int crop_chroma_height = (crop_height + 1) / 2;
const int tmp_stride_u = crop_chroma_width;
const int tmp_stride_v = crop_chroma_width;
const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v);
uint8_t const* src_y =
static_cast<uint8_t const*>(jni->GetDirectBufferAddress(j_src));
uint8_t const* src_uv = src_y + src_slice_height * src_stride_y;
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
// Crop using pointer arithmetic.
src_y += crop_x + crop_y * src_stride_y;
src_uv += crop_chroma_x + crop_chroma_y * src_stride_uv;
std::vector<uint8_t> tmp_buffer(tmp_size);
uint8_t* tmp_u = tmp_buffer.data();
uint8_t* tmp_v = tmp_u + crop_chroma_height * tmp_stride_u;
libyuv::SplitUVPlane(src_uv, src_stride_uv, tmp_u, tmp_stride_u, tmp_v,
tmp_stride_v, crop_chroma_width, crop_chroma_height);
libyuv::I420Scale(src_y, src_stride_y, tmp_u, tmp_stride_u, tmp_v,
tmp_stride_v, crop_width, crop_height, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, scale_width,
scale_height, libyuv::kFilterBox);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,75 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include <vector>
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/scale.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/rtc_base/checks.h"
namespace webrtc {
namespace jni {
extern "C" JNIEXPORT void JNICALL
Java_org_webrtc_NV21Buffer_nativeCropAndScale(JNIEnv* jni,
jclass,
jint crop_x,
jint crop_y,
jint crop_width,
jint crop_height,
jint scale_width,
jint scale_height,
jbyteArray j_src,
jint src_width,
jint src_height,
jobject j_dst_y,
jint dst_stride_y,
jobject j_dst_u,
jint dst_stride_u,
jobject j_dst_v,
jint dst_stride_v) {
const int src_stride_y = src_width;
const int src_stride_uv = src_width;
const int crop_chroma_x = crop_x / 2;
const int crop_chroma_y = crop_y / 2;
const int crop_chroma_width = (crop_width + 1) / 2;
const int crop_chroma_height = (crop_height + 1) / 2;
const int tmp_stride_u = crop_chroma_width;
const int tmp_stride_v = crop_chroma_width;
const int tmp_size = crop_chroma_height * (tmp_stride_u + tmp_stride_v);
jboolean was_copy;
jbyte* src_bytes = jni->GetByteArrayElements(j_src, &was_copy);
RTC_DCHECK(!was_copy);
uint8_t const* src_y = reinterpret_cast<uint8_t const*>(src_bytes);
uint8_t const* src_uv = src_y + src_height * src_stride_y;
uint8_t* dst_y = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_y));
uint8_t* dst_u = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_u));
uint8_t* dst_v = static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_v));
// Crop using pointer arithmetic.
src_y += crop_x + crop_y * src_stride_y;
src_uv += crop_chroma_x + crop_chroma_y * src_stride_uv;
NV12ToI420Scaler scaler;
// U- and V-planes are swapped because this is NV21 not NV12.
scaler.NV12ToI420Scale(src_y, src_stride_y, src_uv, src_stride_uv, crop_width,
crop_height, dst_y, dst_stride_y, dst_v, dst_stride_v,
dst_u, dst_stride_u, scale_width, scale_height);
jni->ReleaseByteArrayElements(j_src, src_bytes, JNI_ABORT);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1 @@
deadbeef@webrtc.org

View File

@ -0,0 +1,447 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/androidnetworkmonitor_jni.h"
#include <dlfcn.h>
// This was added in Lollipop to dlfcn.h
#define RTLD_NOLOAD 4
#include "webrtc/rtc_base/bind.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/ipaddress.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
enum AndroidSdkVersion {
SDK_VERSION_LOLLIPOP = 21,
SDK_VERSION_MARSHMALLOW = 23
};
int AndroidNetworkMonitor::android_sdk_int_ = 0;
static NetworkType GetNetworkTypeFromJava(JNIEnv* jni, jobject j_network_type) {
std::string enum_name =
GetJavaEnumName(jni, "org/webrtc/NetworkMonitorAutoDetect$ConnectionType",
j_network_type);
if (enum_name == "CONNECTION_UNKNOWN") {
return NetworkType::NETWORK_UNKNOWN;
}
if (enum_name == "CONNECTION_ETHERNET") {
return NetworkType::NETWORK_ETHERNET;
}
if (enum_name == "CONNECTION_WIFI") {
return NetworkType::NETWORK_WIFI;
}
if (enum_name == "CONNECTION_4G") {
return NetworkType::NETWORK_4G;
}
if (enum_name == "CONNECTION_3G") {
return NetworkType::NETWORK_3G;
}
if (enum_name == "CONNECTION_2G") {
return NetworkType::NETWORK_2G;
}
if (enum_name == "CONNECTION_UNKNOWN_CELLULAR") {
return NetworkType::NETWORK_UNKNOWN_CELLULAR;
}
if (enum_name == "CONNECTION_BLUETOOTH") {
return NetworkType::NETWORK_BLUETOOTH;
}
if (enum_name == "CONNECTION_NONE") {
return NetworkType::NETWORK_NONE;
}
RTC_NOTREACHED();
return NetworkType::NETWORK_UNKNOWN;
}
static rtc::AdapterType AdapterTypeFromNetworkType(NetworkType network_type) {
switch (network_type) {
case NETWORK_UNKNOWN:
return rtc::ADAPTER_TYPE_UNKNOWN;
case NETWORK_ETHERNET:
return rtc::ADAPTER_TYPE_ETHERNET;
case NETWORK_WIFI:
return rtc::ADAPTER_TYPE_WIFI;
case NETWORK_4G:
case NETWORK_3G:
case NETWORK_2G:
case NETWORK_UNKNOWN_CELLULAR:
return rtc::ADAPTER_TYPE_CELLULAR;
case NETWORK_BLUETOOTH:
// There is no corresponding mapping for bluetooth networks.
// Map it to VPN for now.
return rtc::ADAPTER_TYPE_VPN;
default:
RTC_NOTREACHED() << "Invalid network type " << network_type;
return rtc::ADAPTER_TYPE_UNKNOWN;
}
}
static rtc::IPAddress GetIPAddressFromJava(JNIEnv* jni, jobject j_ip_address) {
jclass j_ip_address_class = GetObjectClass(jni, j_ip_address);
jfieldID j_address_id = GetFieldID(jni, j_ip_address_class, "address", "[B");
jbyteArray j_addresses =
static_cast<jbyteArray>(GetObjectField(jni, j_ip_address, j_address_id));
size_t address_length = jni->GetArrayLength(j_addresses);
jbyte* addr_array = jni->GetByteArrayElements(j_addresses, nullptr);
CHECK_EXCEPTION(jni) << "Error during GetIPAddressFromJava";
if (address_length == 4) {
// IP4
struct in_addr ip4_addr;
memcpy(&ip4_addr.s_addr, addr_array, 4);
jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
return rtc::IPAddress(ip4_addr);
}
// IP6
RTC_CHECK(address_length == 16);
struct in6_addr ip6_addr;
memcpy(ip6_addr.s6_addr, addr_array, address_length);
jni->ReleaseByteArrayElements(j_addresses, addr_array, JNI_ABORT);
return rtc::IPAddress(ip6_addr);
}
static void GetIPAddressesFromJava(JNIEnv* jni,
jobjectArray j_ip_addresses,
std::vector<rtc::IPAddress>* ip_addresses) {
ip_addresses->clear();
size_t num_addresses = jni->GetArrayLength(j_ip_addresses);
CHECK_EXCEPTION(jni) << "Error during GetArrayLength";
for (size_t i = 0; i < num_addresses; ++i) {
jobject j_ip_address = jni->GetObjectArrayElement(j_ip_addresses, i);
CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
rtc::IPAddress ip = GetIPAddressFromJava(jni, j_ip_address);
ip_addresses->push_back(ip);
}
}
static NetworkInformation GetNetworkInformationFromJava(
JNIEnv* jni,
jobject j_network_info) {
jclass j_network_info_class = GetObjectClass(jni, j_network_info);
jfieldID j_interface_name_id =
GetFieldID(jni, j_network_info_class, "name", "Ljava/lang/String;");
jfieldID j_handle_id = GetFieldID(jni, j_network_info_class, "handle", "J");
jfieldID j_type_id =
GetFieldID(jni, j_network_info_class, "type",
"Lorg/webrtc/NetworkMonitorAutoDetect$ConnectionType;");
jfieldID j_ip_addresses_id =
GetFieldID(jni, j_network_info_class, "ipAddresses",
"[Lorg/webrtc/NetworkMonitorAutoDetect$IPAddress;");
NetworkInformation network_info;
network_info.interface_name = JavaToStdString(
jni, GetStringField(jni, j_network_info, j_interface_name_id));
network_info.handle = static_cast<NetworkHandle>(
GetLongField(jni, j_network_info, j_handle_id));
network_info.type = GetNetworkTypeFromJava(
jni, GetObjectField(jni, j_network_info, j_type_id));
jobjectArray j_ip_addresses = static_cast<jobjectArray>(
GetObjectField(jni, j_network_info, j_ip_addresses_id));
GetIPAddressesFromJava(jni, j_ip_addresses, &network_info.ip_addresses);
return network_info;
}
std::string NetworkInformation::ToString() const {
std::stringstream ss;
ss << "NetInfo[name " << interface_name << "; handle " << handle << "; type "
<< type << "; address";
for (const rtc::IPAddress address : ip_addresses) {
ss << " " << address.ToString();
}
ss << "]";
return ss.str();
}
AndroidNetworkMonitor::AndroidNetworkMonitor()
: j_network_monitor_class_(jni(),
FindClass(jni(), "org/webrtc/NetworkMonitor")),
j_network_monitor_(
jni(),
jni()->CallStaticObjectMethod(
*j_network_monitor_class_,
GetStaticMethodID(jni(),
*j_network_monitor_class_,
"getInstance",
"()Lorg/webrtc/NetworkMonitor;"))) {
CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.init";
if (android_sdk_int_ <= 0) {
jmethodID m = GetStaticMethodID(jni(), *j_network_monitor_class_,
"androidSdkInt", "()I");
android_sdk_int_ = jni()->CallStaticIntMethod(*j_network_monitor_class_, m);
CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.androidSdkInt";
}
}
void AndroidNetworkMonitor::Start() {
RTC_CHECK(thread_checker_.CalledOnValidThread());
if (started_) {
return;
}
started_ = true;
// This is kind of magic behavior, but doing this allows the SocketServer to
// use this as a NetworkBinder to bind sockets on a particular network when
// it creates sockets.
worker_thread()->socketserver()->set_network_binder(this);
jmethodID m =
GetMethodID(jni(), *j_network_monitor_class_, "startMonitoring", "(J)V");
jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
CHECK_EXCEPTION(jni()) << "Error during CallVoidMethod";
}
void AndroidNetworkMonitor::Stop() {
RTC_CHECK(thread_checker_.CalledOnValidThread());
if (!started_) {
return;
}
started_ = false;
// Once the network monitor stops, it will clear all network information and
// it won't find the network handle to bind anyway.
if (worker_thread()->socketserver()->network_binder() == this) {
worker_thread()->socketserver()->set_network_binder(nullptr);
}
jmethodID m =
GetMethodID(jni(), *j_network_monitor_class_, "stopMonitoring", "(J)V");
jni()->CallVoidMethod(*j_network_monitor_, m, jlongFromPointer(this));
CHECK_EXCEPTION(jni()) << "Error during NetworkMonitor.stopMonitoring";
network_handle_by_address_.clear();
network_info_by_handle_.clear();
}
// The implementation is largely taken from UDPSocketPosix::BindToNetwork in
// https://cs.chromium.org/chromium/src/net/udp/udp_socket_posix.cc
rtc::NetworkBindingResult AndroidNetworkMonitor::BindSocketToNetwork(
int socket_fd,
const rtc::IPAddress& address) {
RTC_CHECK(thread_checker_.CalledOnValidThread());
if (socket_fd == 0 /* NETWORK_UNSPECIFIED */) {
return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
}
jmethodID network_binding_supported_id = GetMethodID(
jni(), *j_network_monitor_class_, "networkBindingSupported", "()Z");
// Android prior to Lollipop didn't have support for binding sockets to
// networks. This may also occur if there is no connectivity manager service.
bool network_binding_supported = jni()->CallBooleanMethod(
*j_network_monitor_, network_binding_supported_id);
CHECK_EXCEPTION(jni())
<< "Error during NetworkMonitor.networkBindingSupported";
if (!network_binding_supported) {
LOG(LS_WARNING) << "BindSocketToNetwork is not supported on this platform "
<< "(Android SDK: " << android_sdk_int_ << ")";
return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
}
auto iter = network_handle_by_address_.find(address);
if (iter == network_handle_by_address_.end()) {
return rtc::NetworkBindingResult::ADDRESS_NOT_FOUND;
}
NetworkHandle network_handle = iter->second;
int rv = 0;
if (android_sdk_int_ >= SDK_VERSION_MARSHMALLOW) {
// See declaration of android_setsocknetwork() here:
// http://androidxref.com/6.0.0_r1/xref/development/ndk/platforms/android-M/include/android/multinetwork.h#65
// Function cannot be called directly as it will cause app to fail to load
// on pre-marshmallow devices.
typedef int (*MarshmallowSetNetworkForSocket)(NetworkHandle net,
int socket);
static MarshmallowSetNetworkForSocket marshmallowSetNetworkForSocket;
// This is not thread-safe, but we are running this only on the worker
// thread.
if (!marshmallowSetNetworkForSocket) {
const std::string android_native_lib_path = "libandroid.so";
void* lib = dlopen(android_native_lib_path.c_str(), RTLD_NOW);
if (lib == nullptr) {
LOG(LS_ERROR) << "Library " << android_native_lib_path << " not found!";
return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
}
marshmallowSetNetworkForSocket =
reinterpret_cast<MarshmallowSetNetworkForSocket>(
dlsym(lib, "android_setsocknetwork"));
}
if (!marshmallowSetNetworkForSocket) {
LOG(LS_ERROR) << "Symbol marshmallowSetNetworkForSocket is not found";
return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
}
rv = marshmallowSetNetworkForSocket(network_handle, socket_fd);
} else {
// NOTE: This relies on Android implementation details, but it won't change
// because Lollipop is already released.
typedef int (*LollipopSetNetworkForSocket)(unsigned net, int socket);
static LollipopSetNetworkForSocket lollipopSetNetworkForSocket;
// This is not threadsafe, but we are running this only on the worker
// thread.
if (!lollipopSetNetworkForSocket) {
// Android's netd client library should always be loaded in our address
// space as it shims libc functions like connect().
const std::string net_library_path = "libnetd_client.so";
// Use RTLD_NOW to match Android's prior loading of the library:
// http://androidxref.com/6.0.0_r5/xref/bionic/libc/bionic/NetdClient.cpp#37
// Use RTLD_NOLOAD to assert that the library is already loaded and
// avoid doing any disk IO.
void* lib = dlopen(net_library_path.c_str(), RTLD_NOW | RTLD_NOLOAD);
if (lib == nullptr) {
LOG(LS_ERROR) << "Library " << net_library_path << " not found!";
return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
}
lollipopSetNetworkForSocket =
reinterpret_cast<LollipopSetNetworkForSocket>(
dlsym(lib, "setNetworkForSocket"));
}
if (!lollipopSetNetworkForSocket) {
LOG(LS_ERROR) << "Symbol lollipopSetNetworkForSocket is not found ";
return rtc::NetworkBindingResult::NOT_IMPLEMENTED;
}
rv = lollipopSetNetworkForSocket(network_handle, socket_fd);
}
// If |network| has since disconnected, |rv| will be ENONET. Surface this as
// ERR_NETWORK_CHANGED, rather than MapSystemError(ENONET) which gives back
// the less descriptive ERR_FAILED.
if (rv == 0) {
return rtc::NetworkBindingResult::SUCCESS;
}
if (rv == ENONET) {
return rtc::NetworkBindingResult::NETWORK_CHANGED;
}
return rtc::NetworkBindingResult::FAILURE;
}
void AndroidNetworkMonitor::OnNetworkConnected(
const NetworkInformation& network_info) {
worker_thread()->Invoke<void>(
RTC_FROM_HERE, rtc::Bind(&AndroidNetworkMonitor::OnNetworkConnected_w,
this, network_info));
// Fire SignalNetworksChanged to update the list of networks.
OnNetworksChanged();
}
void AndroidNetworkMonitor::OnNetworkConnected_w(
const NetworkInformation& network_info) {
LOG(LS_INFO) << "Network connected: " << network_info.ToString();
adapter_type_by_name_[network_info.interface_name] =
AdapterTypeFromNetworkType(network_info.type);
network_info_by_handle_[network_info.handle] = network_info;
for (const rtc::IPAddress& address : network_info.ip_addresses) {
network_handle_by_address_[address] = network_info.handle;
}
}
void AndroidNetworkMonitor::OnNetworkDisconnected(NetworkHandle handle) {
LOG(LS_INFO) << "Network disconnected for handle " << handle;
worker_thread()->Invoke<void>(
RTC_FROM_HERE,
rtc::Bind(&AndroidNetworkMonitor::OnNetworkDisconnected_w, this, handle));
}
void AndroidNetworkMonitor::OnNetworkDisconnected_w(NetworkHandle handle) {
auto iter = network_info_by_handle_.find(handle);
if (iter != network_info_by_handle_.end()) {
for (const rtc::IPAddress& address : iter->second.ip_addresses) {
network_handle_by_address_.erase(address);
}
network_info_by_handle_.erase(iter);
}
}
void AndroidNetworkMonitor::SetNetworkInfos(
const std::vector<NetworkInformation>& network_infos) {
RTC_CHECK(thread_checker_.CalledOnValidThread());
network_handle_by_address_.clear();
network_info_by_handle_.clear();
LOG(LS_INFO) << "Android network monitor found " << network_infos.size()
<< " networks";
for (NetworkInformation network : network_infos) {
OnNetworkConnected_w(network);
}
}
rtc::AdapterType AndroidNetworkMonitor::GetAdapterType(
const std::string& if_name) {
auto iter = adapter_type_by_name_.find(if_name);
rtc::AdapterType type = (iter == adapter_type_by_name_.end())
? rtc::ADAPTER_TYPE_UNKNOWN
: iter->second;
if (type == rtc::ADAPTER_TYPE_UNKNOWN) {
LOG(LS_WARNING) << "Get an unknown type for the interface " << if_name;
}
return type;
}
rtc::NetworkMonitorInterface*
AndroidNetworkMonitorFactory::CreateNetworkMonitor() {
return new AndroidNetworkMonitor();
}
JNI_FUNCTION_DECLARATION(void,
NetworkMonitor_nativeNotifyConnectionTypeChanged,
JNIEnv* jni,
jobject j_monitor,
jlong j_native_monitor) {
rtc::NetworkMonitorInterface* network_monitor =
reinterpret_cast<rtc::NetworkMonitorInterface*>(j_native_monitor);
network_monitor->OnNetworksChanged();
}
JNI_FUNCTION_DECLARATION(void,
NetworkMonitor_nativeNotifyOfActiveNetworkList,
JNIEnv* jni,
jobject j_monitor,
jlong j_native_monitor,
jobjectArray j_network_infos) {
AndroidNetworkMonitor* network_monitor =
reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
std::vector<NetworkInformation> network_infos;
size_t num_networks = jni->GetArrayLength(j_network_infos);
for (size_t i = 0; i < num_networks; ++i) {
jobject j_network_info = jni->GetObjectArrayElement(j_network_infos, i);
CHECK_EXCEPTION(jni) << "Error during GetObjectArrayElement";
network_infos.push_back(GetNetworkInformationFromJava(jni, j_network_info));
}
network_monitor->SetNetworkInfos(network_infos);
}
JNI_FUNCTION_DECLARATION(void,
NetworkMonitor_nativeNotifyOfNetworkConnect,
JNIEnv* jni,
jobject j_monitor,
jlong j_native_monitor,
jobject j_network_info) {
AndroidNetworkMonitor* network_monitor =
reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
NetworkInformation network_info =
GetNetworkInformationFromJava(jni, j_network_info);
network_monitor->OnNetworkConnected(network_info);
}
JNI_FUNCTION_DECLARATION(void,
NetworkMonitor_nativeNotifyOfNetworkDisconnect,
JNIEnv* jni,
jobject j_monitor,
jlong j_native_monitor,
jlong network_handle) {
AndroidNetworkMonitor* network_monitor =
reinterpret_cast<AndroidNetworkMonitor*>(j_native_monitor);
network_monitor->OnNetworkDisconnected(
static_cast<NetworkHandle>(network_handle));
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,107 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_ANDROIDNETWORKMONITOR_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_ANDROIDNETWORKMONITOR_JNI_H_
#include <stdint.h>
#include <map>
#include <string>
#include <vector>
#include "webrtc/rtc_base/networkmonitor.h"
#include "webrtc/rtc_base/thread_checker.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
typedef int64_t NetworkHandle;
// c++ equivalent of java NetworkMonitorAutoDetect.ConnectionType.
enum NetworkType {
NETWORK_UNKNOWN,
NETWORK_ETHERNET,
NETWORK_WIFI,
NETWORK_4G,
NETWORK_3G,
NETWORK_2G,
NETWORK_UNKNOWN_CELLULAR,
NETWORK_BLUETOOTH,
NETWORK_NONE
};
// The information is collected from Android OS so that the native code can get
// the network type and handle (Android network ID) for each interface.
struct NetworkInformation {
std::string interface_name;
NetworkHandle handle;
NetworkType type;
std::vector<rtc::IPAddress> ip_addresses;
std::string ToString() const;
};
class AndroidNetworkMonitor : public rtc::NetworkMonitorBase,
public rtc::NetworkBinderInterface {
public:
AndroidNetworkMonitor();
// TODO(sakal): Remove once down stream dependencies have been updated.
static void SetAndroidContext(JNIEnv* jni, jobject context) {}
void Start() override;
void Stop() override;
rtc::NetworkBindingResult BindSocketToNetwork(
int socket_fd,
const rtc::IPAddress& address) override;
rtc::AdapterType GetAdapterType(const std::string& if_name) override;
void OnNetworkConnected(const NetworkInformation& network_info);
void OnNetworkDisconnected(NetworkHandle network_handle);
// Always expected to be called on the network thread.
void SetNetworkInfos(const std::vector<NetworkInformation>& network_infos);
private:
static jobject application_context_;
static int android_sdk_int_;
JNIEnv* jni() { return AttachCurrentThreadIfNeeded(); }
void OnNetworkConnected_w(const NetworkInformation& network_info);
void OnNetworkDisconnected_w(NetworkHandle network_handle);
ScopedGlobalRef<jclass> j_network_monitor_class_;
ScopedGlobalRef<jobject> j_network_monitor_;
rtc::ThreadChecker thread_checker_;
bool started_ = false;
std::map<std::string, rtc::AdapterType> adapter_type_by_name_;
std::map<rtc::IPAddress, NetworkHandle> network_handle_by_address_;
std::map<NetworkHandle, NetworkInformation> network_info_by_handle_;
};
class AndroidNetworkMonitorFactory : public rtc::NetworkMonitorFactory {
public:
AndroidNetworkMonitorFactory() {}
rtc::NetworkMonitorInterface* CreateNetworkMonitor() override;
};
} // namespace jni
} // namespace webrtc
// TODO(magjed): Remove once external clients are updated.
namespace webrtc_jni {
using webrtc::jni::AndroidNetworkMonitor;
using webrtc::jni::AndroidNetworkMonitorFactory;
} // namespace webrtc_jni
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_ANDROIDNETWORKMONITOR_JNI_H_

View File

@ -0,0 +1,28 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/audio_jni.h"
#include "webrtc/api/audio_codecs/builtin_audio_decoder_factory.h"
#include "webrtc/api/audio_codecs/builtin_audio_encoder_factory.h"
namespace webrtc {
namespace jni {
rtc::scoped_refptr<AudioDecoderFactory> CreateAudioDecoderFactory() {
return CreateBuiltinAudioDecoderFactory();
}
rtc::scoped_refptr<AudioEncoderFactory> CreateAudioEncoderFactory() {
return CreateBuiltinAudioEncoderFactory();
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,30 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_AUDIO_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_AUDIO_JNI_H_
// Adding 'nogncheck' to disable the gn include headers check.
// We don't want this target depend on audio related targets
#include "webrtc/api/audio_codecs/audio_decoder_factory.h" // nogncheck
#include "webrtc/api/audio_codecs/audio_encoder_factory.h" // nogncheck
#include "webrtc/rtc_base/scoped_ref_ptr.h"
namespace webrtc {
namespace jni {
rtc::scoped_refptr<AudioDecoderFactory> CreateAudioDecoderFactory();
rtc::scoped_refptr<AudioEncoderFactory> CreateAudioEncoderFactory();
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_AUDIO_JNI_H_

View File

@ -0,0 +1,29 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(void,
AudioTrack_nativeSetVolume,
JNIEnv*,
jclass,
jlong j_p,
jdouble volume) {
rtc::scoped_refptr<AudioSourceInterface> source(
reinterpret_cast<AudioTrackInterface*>(j_p)->GetSource());
source->SetVolume(volume);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,81 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/rtc_base/logsinks.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(jlong,
CallSessionFileRotatingLogSink_nativeAddSink,
JNIEnv* jni,
jclass,
jstring j_dirPath,
jint j_maxFileSize,
jint j_severity) {
std::string dir_path = JavaToStdString(jni, j_dirPath);
rtc::CallSessionFileRotatingLogSink* sink =
new rtc::CallSessionFileRotatingLogSink(dir_path, j_maxFileSize);
if (!sink->Init()) {
LOG_V(rtc::LoggingSeverity::LS_WARNING)
<< "Failed to init CallSessionFileRotatingLogSink for path "
<< dir_path;
delete sink;
return 0;
}
rtc::LogMessage::AddLogToStream(
sink, static_cast<rtc::LoggingSeverity>(j_severity));
return (jlong)sink;
}
JNI_FUNCTION_DECLARATION(void,
CallSessionFileRotatingLogSink_nativeDeleteSink,
JNIEnv* jni,
jclass,
jlong j_sink) {
rtc::CallSessionFileRotatingLogSink* sink =
reinterpret_cast<rtc::CallSessionFileRotatingLogSink*>(j_sink);
rtc::LogMessage::RemoveLogToStream(sink);
delete sink;
}
JNI_FUNCTION_DECLARATION(jbyteArray,
CallSessionFileRotatingLogSink_nativeGetLogData,
JNIEnv* jni,
jclass,
jstring j_dirPath) {
std::string dir_path = JavaToStdString(jni, j_dirPath);
std::unique_ptr<rtc::CallSessionFileRotatingStream> stream(
new rtc::CallSessionFileRotatingStream(dir_path));
if (!stream->Open()) {
LOG_V(rtc::LoggingSeverity::LS_WARNING)
<< "Failed to open CallSessionFileRotatingStream for path " << dir_path;
return jni->NewByteArray(0);
}
size_t log_size = 0;
if (!stream->GetSize(&log_size) || log_size == 0) {
LOG_V(rtc::LoggingSeverity::LS_WARNING)
<< "CallSessionFileRotatingStream returns 0 size for path " << dir_path;
return jni->NewByteArray(0);
}
size_t read = 0;
std::unique_ptr<jbyte> buffer(static_cast<jbyte*>(malloc(log_size)));
stream->ReadAll(buffer.get(), log_size, &read, nullptr);
jbyteArray result = jni->NewByteArray(read);
jni->SetByteArrayRegion(result, 0, read, buffer.get());
return result;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,101 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "webrtc/api/datachannelinterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/pc/datachannelobserver_jni.h"
namespace webrtc {
namespace jni {
static DataChannelInterface* ExtractNativeDC(JNIEnv* jni, jobject j_dc) {
jfieldID native_dc_id =
GetFieldID(jni, GetObjectClass(jni, j_dc), "nativeDataChannel", "J");
jlong j_d = GetLongField(jni, j_dc, native_dc_id);
return reinterpret_cast<DataChannelInterface*>(j_d);
}
JNI_FUNCTION_DECLARATION(jlong,
DataChannel_registerObserverNative,
JNIEnv* jni,
jobject j_dc,
jobject j_observer) {
std::unique_ptr<DataChannelObserverJni> observer(
new DataChannelObserverJni(jni, j_observer));
ExtractNativeDC(jni, j_dc)->RegisterObserver(observer.get());
return jlongFromPointer(observer.release());
}
JNI_FUNCTION_DECLARATION(void,
DataChannel_unregisterObserverNative,
JNIEnv* jni,
jobject j_dc,
jlong native_observer) {
ExtractNativeDC(jni, j_dc)->UnregisterObserver();
delete reinterpret_cast<DataChannelObserverJni*>(native_observer);
}
JNI_FUNCTION_DECLARATION(jstring,
DataChannel_label,
JNIEnv* jni,
jobject j_dc) {
return JavaStringFromStdString(jni, ExtractNativeDC(jni, j_dc)->label());
}
JNI_FUNCTION_DECLARATION(jint, DataChannel_id, JNIEnv* jni, jobject j_dc) {
int id = ExtractNativeDC(jni, j_dc)->id();
RTC_CHECK_LE(id, std::numeric_limits<int32_t>::max())
<< "id overflowed jint!";
return static_cast<jint>(id);
}
JNI_FUNCTION_DECLARATION(jobject,
DataChannel_state,
JNIEnv* jni,
jobject j_dc) {
return JavaEnumFromIndexAndClassName(jni, "DataChannel$State",
ExtractNativeDC(jni, j_dc)->state());
}
JNI_FUNCTION_DECLARATION(jlong,
DataChannel_bufferedAmount,
JNIEnv* jni,
jobject j_dc) {
uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max())
<< "buffered_amount overflowed jlong!";
return static_cast<jlong>(buffered_amount);
}
JNI_FUNCTION_DECLARATION(void, DataChannel_close, JNIEnv* jni, jobject j_dc) {
ExtractNativeDC(jni, j_dc)->Close();
}
JNI_FUNCTION_DECLARATION(jboolean,
DataChannel_sendNative,
JNIEnv* jni,
jobject j_dc,
jbyteArray data,
jboolean binary) {
jbyte* bytes = jni->GetByteArrayElements(data, NULL);
bool ret = ExtractNativeDC(jni, j_dc)->Send(DataBuffer(
rtc::CopyOnWriteBuffer(bytes, jni->GetArrayLength(data)), binary));
jni->ReleaseByteArrayElements(data, bytes, JNI_ABORT);
return ret;
}
JNI_FUNCTION_DECLARATION(void, DataChannel_dispose, JNIEnv* jni, jobject j_dc) {
CHECK_RELEASE(ExtractNativeDC(jni, j_dc));
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,67 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/datachannelobserver_jni.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace jni {
// Convenience, used since callbacks occur on the signaling thread, which may
// be a non-Java thread.
static JNIEnv* jni() {
return AttachCurrentThreadIfNeeded();
}
DataChannelObserverJni::DataChannelObserverJni(JNIEnv* jni, jobject j_observer)
: j_observer_global_(jni, j_observer),
j_observer_class_(jni, GetObjectClass(jni, j_observer)),
j_buffer_class_(jni, FindClass(jni, "org/webrtc/DataChannel$Buffer")),
j_on_buffered_amount_change_mid_(GetMethodID(jni,
*j_observer_class_,
"onBufferedAmountChange",
"(J)V")),
j_on_state_change_mid_(
GetMethodID(jni, *j_observer_class_, "onStateChange", "()V")),
j_on_message_mid_(GetMethodID(jni,
*j_observer_class_,
"onMessage",
"(Lorg/webrtc/DataChannel$Buffer;)V")),
j_buffer_ctor_(GetMethodID(jni,
*j_buffer_class_,
"<init>",
"(Ljava/nio/ByteBuffer;Z)V")) {}
void DataChannelObserverJni::OnBufferedAmountChange(uint64_t previous_amount) {
ScopedLocalRefFrame local_ref_frame(jni());
jni()->CallVoidMethod(*j_observer_global_, j_on_buffered_amount_change_mid_,
previous_amount);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void DataChannelObserverJni::OnStateChange() {
ScopedLocalRefFrame local_ref_frame(jni());
jni()->CallVoidMethod(*j_observer_global_, j_on_state_change_mid_);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void DataChannelObserverJni::OnMessage(const DataBuffer& buffer) {
ScopedLocalRefFrame local_ref_frame(jni());
jobject byte_buffer = jni()->NewDirectByteBuffer(
const_cast<char*>(buffer.data.data<char>()), buffer.data.size());
jobject j_buffer = jni()->NewObject(*j_buffer_class_, j_buffer_ctor_,
byte_buffer, buffer.binary);
jni()->CallVoidMethod(*j_observer_global_, j_on_message_mid_, j_buffer);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,44 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_DATACHANNELOBSERVER_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_DATACHANNELOBSERVER_JNI_H_
#include "webrtc/api/datachannelinterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Adapter for a Java DataChannel$Observer presenting a C++ DataChannelObserver
// and dispatching the callback from C++ back to Java.
class DataChannelObserverJni : public DataChannelObserver {
public:
DataChannelObserverJni(JNIEnv* jni, jobject j_observer);
virtual ~DataChannelObserverJni() {}
void OnBufferedAmountChange(uint64_t previous_amount) override;
void OnStateChange() override;
void OnMessage(const DataBuffer& buffer) override;
private:
const ScopedGlobalRef<jobject> j_observer_global_;
const ScopedGlobalRef<jclass> j_observer_class_;
const ScopedGlobalRef<jclass> j_buffer_class_;
const jmethodID j_on_buffered_amount_change_mid_;
const jmethodID j_on_state_change_mid_;
const jmethodID j_on_message_mid_;
const jmethodID j_buffer_ctor_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_DATACHANNELOBSERVER_JNI_H_

View File

@ -0,0 +1,67 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/dtmfsenderinterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(jboolean,
DtmfSender_nativeCanInsertDtmf,
JNIEnv* jni,
jclass,
jlong j_dtmf_sender_pointer) {
return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
->CanInsertDtmf();
}
JNI_FUNCTION_DECLARATION(jboolean,
DtmfSender_nativeInsertDtmf,
JNIEnv* jni,
jclass,
jlong j_dtmf_sender_pointer,
jstring tones,
jint duration,
jint inter_tone_gap) {
return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
->InsertDtmf(JavaToStdString(jni, tones), duration, inter_tone_gap);
}
JNI_FUNCTION_DECLARATION(jstring,
DtmfSender_nativeTones,
JNIEnv* jni,
jclass,
jlong j_dtmf_sender_pointer) {
return JavaStringFromStdString(
jni,
reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)->tones());
}
JNI_FUNCTION_DECLARATION(jint,
DtmfSender_nativeDuration,
JNIEnv* jni,
jclass,
jlong j_dtmf_sender_pointer) {
return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
->duration();
}
JNI_FUNCTION_DECLARATION(jint,
DtmfSender_nativeInterToneGap,
JNIEnv* jni,
jclass,
jlong j_dtmf_sender_pointer) {
return reinterpret_cast<DtmfSenderInterface*>(j_dtmf_sender_pointer)
->inter_tone_gap();
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,711 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/java_native_conversion.h"
#include <string>
#include "webrtc/pc/webrtcsdp.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace jni {
DataChannelInit JavaToNativeDataChannelInit(JNIEnv* jni, jobject j_init) {
DataChannelInit init;
jclass j_init_class = FindClass(jni, "org/webrtc/DataChannel$Init");
jfieldID ordered_id = GetFieldID(jni, j_init_class, "ordered", "Z");
jfieldID max_retransmit_time_id =
GetFieldID(jni, j_init_class, "maxRetransmitTimeMs", "I");
jfieldID max_retransmits_id =
GetFieldID(jni, j_init_class, "maxRetransmits", "I");
jfieldID protocol_id =
GetFieldID(jni, j_init_class, "protocol", "Ljava/lang/String;");
jfieldID negotiated_id = GetFieldID(jni, j_init_class, "negotiated", "Z");
jfieldID id_id = GetFieldID(jni, j_init_class, "id", "I");
init.ordered = GetBooleanField(jni, j_init, ordered_id);
init.maxRetransmitTime = GetIntField(jni, j_init, max_retransmit_time_id);
init.maxRetransmits = GetIntField(jni, j_init, max_retransmits_id);
init.protocol =
JavaToStdString(jni, GetStringField(jni, j_init, protocol_id));
init.negotiated = GetBooleanField(jni, j_init, negotiated_id);
init.id = GetIntField(jni, j_init, id_id);
return init;
}
jobject NativeToJavaMediaType(JNIEnv* jni, cricket::MediaType media_type) {
jclass j_media_type_class =
FindClass(jni, "org/webrtc/MediaStreamTrack$MediaType");
const char* media_type_str = nullptr;
switch (media_type) {
case cricket::MEDIA_TYPE_AUDIO:
media_type_str = "MEDIA_TYPE_AUDIO";
break;
case cricket::MEDIA_TYPE_VIDEO:
media_type_str = "MEDIA_TYPE_VIDEO";
break;
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
break;
}
jfieldID j_media_type_fid =
GetStaticFieldID(jni, j_media_type_class, media_type_str,
"Lorg/webrtc/MediaStreamTrack$MediaType;");
return GetStaticObjectField(jni, j_media_type_class, j_media_type_fid);
}
cricket::MediaType JavaToNativeMediaType(JNIEnv* jni, jobject j_media_type) {
jclass j_media_type_class =
FindClass(jni, "org/webrtc/MediaStreamTrack$MediaType");
jmethodID j_name_id =
GetMethodID(jni, j_media_type_class, "name", "()Ljava/lang/String;");
jstring j_type_string =
(jstring)jni->CallObjectMethod(j_media_type, j_name_id);
CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
std::string type_string = JavaToStdString(jni, j_type_string);
RTC_DCHECK(type_string == "MEDIA_TYPE_AUDIO" ||
type_string == "MEDIA_TYPE_VIDEO")
<< "Media type: " << type_string;
return type_string == "MEDIA_TYPE_AUDIO" ? cricket::MEDIA_TYPE_AUDIO
: cricket::MEDIA_TYPE_VIDEO;
}
cricket::Candidate JavaToNativeCandidate(JNIEnv* jni, jobject j_candidate) {
jclass j_candidate_class = GetObjectClass(jni, j_candidate);
jfieldID j_sdp_mid_id =
GetFieldID(jni, j_candidate_class, "sdpMid", "Ljava/lang/String;");
std::string sdp_mid =
JavaToStdString(jni, GetStringField(jni, j_candidate, j_sdp_mid_id));
jfieldID j_sdp_id =
GetFieldID(jni, j_candidate_class, "sdp", "Ljava/lang/String;");
std::string sdp =
JavaToStdString(jni, GetStringField(jni, j_candidate, j_sdp_id));
cricket::Candidate candidate;
if (!SdpDeserializeCandidate(sdp_mid, sdp, &candidate, NULL)) {
LOG(LS_ERROR) << "SdpDescrializeCandidate failed with sdp " << sdp;
}
return candidate;
}
jobject NativeToJavaCandidate(JNIEnv* jni,
jclass* candidate_class,
const cricket::Candidate& candidate) {
std::string sdp = SdpSerializeCandidate(candidate);
RTC_CHECK(!sdp.empty()) << "got an empty ICE candidate";
jmethodID ctor = GetMethodID(jni, *candidate_class, "<init>",
"(Ljava/lang/String;ILjava/lang/String;)V");
jstring j_mid = JavaStringFromStdString(jni, candidate.transport_name());
jstring j_sdp = JavaStringFromStdString(jni, sdp);
// sdp_mline_index is not used, pass an invalid value -1.
jobject j_candidate =
jni->NewObject(*candidate_class, ctor, j_mid, -1, j_sdp);
CHECK_EXCEPTION(jni) << "error during Java Candidate NewObject";
return j_candidate;
}
jobjectArray NativeToJavaCandidateArray(
JNIEnv* jni,
const std::vector<cricket::Candidate>& candidates) {
jclass candidate_class = FindClass(jni, "org/webrtc/IceCandidate");
jobjectArray java_candidates =
jni->NewObjectArray(candidates.size(), candidate_class, NULL);
int i = 0;
for (const cricket::Candidate& candidate : candidates) {
jobject j_candidate =
NativeToJavaCandidate(jni, &candidate_class, candidate);
jni->SetObjectArrayElement(java_candidates, i++, j_candidate);
}
return java_candidates;
}
SessionDescriptionInterface* JavaToNativeSessionDescription(JNIEnv* jni,
jobject j_sdp) {
jfieldID j_type_id = GetFieldID(jni, GetObjectClass(jni, j_sdp), "type",
"Lorg/webrtc/SessionDescription$Type;");
jobject j_type = GetObjectField(jni, j_sdp, j_type_id);
jmethodID j_canonical_form_id =
GetMethodID(jni, GetObjectClass(jni, j_type), "canonicalForm",
"()Ljava/lang/String;");
jstring j_type_string =
(jstring)jni->CallObjectMethod(j_type, j_canonical_form_id);
CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
std::string std_type = JavaToStdString(jni, j_type_string);
jfieldID j_description_id = GetFieldID(jni, GetObjectClass(jni, j_sdp),
"description", "Ljava/lang/String;");
jstring j_description = (jstring)GetObjectField(jni, j_sdp, j_description_id);
std::string std_description = JavaToStdString(jni, j_description);
return CreateSessionDescription(std_type, std_description, NULL);
}
jobject NativeToJavaSessionDescription(
JNIEnv* jni,
const SessionDescriptionInterface* desc) {
std::string sdp;
RTC_CHECK(desc->ToString(&sdp)) << "got so far: " << sdp;
jstring j_description = JavaStringFromStdString(jni, sdp);
jclass j_type_class = FindClass(jni, "org/webrtc/SessionDescription$Type");
jmethodID j_type_from_canonical = GetStaticMethodID(
jni, j_type_class, "fromCanonicalForm",
"(Ljava/lang/String;)Lorg/webrtc/SessionDescription$Type;");
jstring j_type_string = JavaStringFromStdString(jni, desc->type());
jobject j_type = jni->CallStaticObjectMethod(
j_type_class, j_type_from_canonical, j_type_string);
CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
jclass j_sdp_class = FindClass(jni, "org/webrtc/SessionDescription");
jmethodID j_sdp_ctor =
GetMethodID(jni, j_sdp_class, "<init>",
"(Lorg/webrtc/SessionDescription$Type;Ljava/lang/String;)V");
jobject j_sdp =
jni->NewObject(j_sdp_class, j_sdp_ctor, j_type, j_description);
CHECK_EXCEPTION(jni) << "error during NewObject";
return j_sdp;
}
PeerConnectionFactoryInterface::Options
JavaToNativePeerConnectionFactoryOptions(JNIEnv* jni, jobject options) {
jclass options_class = jni->GetObjectClass(options);
jfieldID network_ignore_mask_field =
jni->GetFieldID(options_class, "networkIgnoreMask", "I");
int network_ignore_mask =
jni->GetIntField(options, network_ignore_mask_field);
jfieldID disable_encryption_field =
jni->GetFieldID(options_class, "disableEncryption", "Z");
bool disable_encryption =
jni->GetBooleanField(options, disable_encryption_field);
jfieldID disable_network_monitor_field =
jni->GetFieldID(options_class, "disableNetworkMonitor", "Z");
bool disable_network_monitor =
jni->GetBooleanField(options, disable_network_monitor_field);
PeerConnectionFactoryInterface::Options native_options;
// This doesn't necessarily match the c++ version of this struct; feel free
// to add more parameters as necessary.
native_options.network_ignore_mask = network_ignore_mask;
native_options.disable_encryption = disable_encryption;
native_options.disable_network_monitor = disable_network_monitor;
return native_options;
}
PeerConnectionInterface::IceTransportsType JavaToNativeIceTransportsType(
JNIEnv* jni,
jobject j_ice_transports_type) {
std::string enum_name =
GetJavaEnumName(jni, "org/webrtc/PeerConnection$IceTransportsType",
j_ice_transports_type);
if (enum_name == "ALL")
return PeerConnectionInterface::kAll;
if (enum_name == "RELAY")
return PeerConnectionInterface::kRelay;
if (enum_name == "NOHOST")
return PeerConnectionInterface::kNoHost;
if (enum_name == "NONE")
return PeerConnectionInterface::kNone;
RTC_CHECK(false) << "Unexpected IceTransportsType enum_name " << enum_name;
return PeerConnectionInterface::kAll;
}
PeerConnectionInterface::BundlePolicy JavaToNativeBundlePolicy(
JNIEnv* jni,
jobject j_bundle_policy) {
std::string enum_name = GetJavaEnumName(
jni, "org/webrtc/PeerConnection$BundlePolicy", j_bundle_policy);
if (enum_name == "BALANCED")
return PeerConnectionInterface::kBundlePolicyBalanced;
if (enum_name == "MAXBUNDLE")
return PeerConnectionInterface::kBundlePolicyMaxBundle;
if (enum_name == "MAXCOMPAT")
return PeerConnectionInterface::kBundlePolicyMaxCompat;
RTC_CHECK(false) << "Unexpected BundlePolicy enum_name " << enum_name;
return PeerConnectionInterface::kBundlePolicyBalanced;
}
PeerConnectionInterface::RtcpMuxPolicy JavaToNativeRtcpMuxPolicy(
JNIEnv* jni,
jobject j_rtcp_mux_policy) {
std::string enum_name = GetJavaEnumName(
jni, "org/webrtc/PeerConnection$RtcpMuxPolicy", j_rtcp_mux_policy);
if (enum_name == "NEGOTIATE")
return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
if (enum_name == "REQUIRE")
return PeerConnectionInterface::kRtcpMuxPolicyRequire;
RTC_CHECK(false) << "Unexpected RtcpMuxPolicy enum_name " << enum_name;
return PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
}
PeerConnectionInterface::TcpCandidatePolicy JavaToNativeTcpCandidatePolicy(
JNIEnv* jni,
jobject j_tcp_candidate_policy) {
std::string enum_name =
GetJavaEnumName(jni, "org/webrtc/PeerConnection$TcpCandidatePolicy",
j_tcp_candidate_policy);
if (enum_name == "ENABLED")
return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
if (enum_name == "DISABLED")
return PeerConnectionInterface::kTcpCandidatePolicyDisabled;
RTC_CHECK(false) << "Unexpected TcpCandidatePolicy enum_name " << enum_name;
return PeerConnectionInterface::kTcpCandidatePolicyEnabled;
}
PeerConnectionInterface::CandidateNetworkPolicy
JavaToNativeCandidateNetworkPolicy(JNIEnv* jni,
jobject j_candidate_network_policy) {
std::string enum_name =
GetJavaEnumName(jni, "org/webrtc/PeerConnection$CandidateNetworkPolicy",
j_candidate_network_policy);
if (enum_name == "ALL")
return PeerConnectionInterface::kCandidateNetworkPolicyAll;
if (enum_name == "LOW_COST")
return PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
RTC_CHECK(false) << "Unexpected CandidateNetworkPolicy enum_name "
<< enum_name;
return PeerConnectionInterface::kCandidateNetworkPolicyAll;
}
rtc::KeyType JavaToNativeKeyType(JNIEnv* jni, jobject j_key_type) {
std::string enum_name =
GetJavaEnumName(jni, "org/webrtc/PeerConnection$KeyType", j_key_type);
if (enum_name == "RSA")
return rtc::KT_RSA;
if (enum_name == "ECDSA")
return rtc::KT_ECDSA;
RTC_CHECK(false) << "Unexpected KeyType enum_name " << enum_name;
return rtc::KT_ECDSA;
}
PeerConnectionInterface::ContinualGatheringPolicy
JavaToNativeContinualGatheringPolicy(JNIEnv* jni, jobject j_gathering_policy) {
std::string enum_name =
GetJavaEnumName(jni, "org/webrtc/PeerConnection$ContinualGatheringPolicy",
j_gathering_policy);
if (enum_name == "GATHER_ONCE")
return PeerConnectionInterface::GATHER_ONCE;
if (enum_name == "GATHER_CONTINUALLY")
return PeerConnectionInterface::GATHER_CONTINUALLY;
RTC_CHECK(false) << "Unexpected ContinualGatheringPolicy enum name "
<< enum_name;
return PeerConnectionInterface::GATHER_ONCE;
}
PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
JNIEnv* jni,
jobject j_ice_server_tls_cert_policy) {
std::string enum_name =
GetJavaEnumName(jni, "org/webrtc/PeerConnection$TlsCertPolicy",
j_ice_server_tls_cert_policy);
if (enum_name == "TLS_CERT_POLICY_SECURE")
return PeerConnectionInterface::kTlsCertPolicySecure;
if (enum_name == "TLS_CERT_POLICY_INSECURE_NO_CHECK")
return PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck;
RTC_CHECK(false) << "Unexpected TlsCertPolicy enum_name " << enum_name;
return PeerConnectionInterface::kTlsCertPolicySecure;
}
void JavaToNativeIceServers(JNIEnv* jni,
jobject j_ice_servers,
PeerConnectionInterface::IceServers* ice_servers) {
for (jobject j_ice_server : Iterable(jni, j_ice_servers)) {
jclass j_ice_server_class = GetObjectClass(jni, j_ice_server);
jfieldID j_ice_server_urls_id =
GetFieldID(jni, j_ice_server_class, "urls", "Ljava/util/List;");
jfieldID j_ice_server_username_id =
GetFieldID(jni, j_ice_server_class, "username", "Ljava/lang/String;");
jfieldID j_ice_server_password_id =
GetFieldID(jni, j_ice_server_class, "password", "Ljava/lang/String;");
jfieldID j_ice_server_tls_cert_policy_id =
GetFieldID(jni, j_ice_server_class, "tlsCertPolicy",
"Lorg/webrtc/PeerConnection$TlsCertPolicy;");
jobject j_ice_server_tls_cert_policy =
GetObjectField(jni, j_ice_server, j_ice_server_tls_cert_policy_id);
jfieldID j_ice_server_hostname_id =
GetFieldID(jni, j_ice_server_class, "hostname", "Ljava/lang/String;");
jfieldID j_ice_server_tls_alpn_protocols_id = GetFieldID(
jni, j_ice_server_class, "tlsAlpnProtocols", "Ljava/util/List;");
jfieldID j_ice_server_tls_elliptic_curves_id = GetFieldID(
jni, j_ice_server_class, "tlsEllipticCurves", "Ljava/util/List;");
jobject urls = GetObjectField(jni, j_ice_server, j_ice_server_urls_id);
jstring username = reinterpret_cast<jstring>(
GetObjectField(jni, j_ice_server, j_ice_server_username_id));
jstring password = reinterpret_cast<jstring>(
GetObjectField(jni, j_ice_server, j_ice_server_password_id));
PeerConnectionInterface::TlsCertPolicy tls_cert_policy =
JavaToNativeTlsCertPolicy(jni, j_ice_server_tls_cert_policy);
jstring hostname = reinterpret_cast<jstring>(
GetObjectField(jni, j_ice_server, j_ice_server_hostname_id));
jobject tls_alpn_protocols = GetNullableObjectField(
jni, j_ice_server, j_ice_server_tls_alpn_protocols_id);
jobject tls_elliptic_curves = GetNullableObjectField(
jni, j_ice_server, j_ice_server_tls_elliptic_curves_id);
PeerConnectionInterface::IceServer server;
server.urls = JavaToStdVectorStrings(jni, urls);
server.username = JavaToStdString(jni, username);
server.password = JavaToStdString(jni, password);
server.tls_cert_policy = tls_cert_policy;
server.hostname = JavaToStdString(jni, hostname);
server.tls_alpn_protocols = JavaToStdVectorStrings(jni, tls_alpn_protocols);
server.tls_elliptic_curves =
JavaToStdVectorStrings(jni, tls_elliptic_curves);
ice_servers->push_back(server);
}
}
void JavaToNativeRTCConfiguration(
JNIEnv* jni,
jobject j_rtc_config,
PeerConnectionInterface::RTCConfiguration* rtc_config) {
jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
jfieldID j_ice_transports_type_id =
GetFieldID(jni, j_rtc_config_class, "iceTransportsType",
"Lorg/webrtc/PeerConnection$IceTransportsType;");
jobject j_ice_transports_type =
GetObjectField(jni, j_rtc_config, j_ice_transports_type_id);
jfieldID j_bundle_policy_id =
GetFieldID(jni, j_rtc_config_class, "bundlePolicy",
"Lorg/webrtc/PeerConnection$BundlePolicy;");
jobject j_bundle_policy =
GetObjectField(jni, j_rtc_config, j_bundle_policy_id);
jfieldID j_rtcp_mux_policy_id =
GetFieldID(jni, j_rtc_config_class, "rtcpMuxPolicy",
"Lorg/webrtc/PeerConnection$RtcpMuxPolicy;");
jobject j_rtcp_mux_policy =
GetObjectField(jni, j_rtc_config, j_rtcp_mux_policy_id);
jfieldID j_tcp_candidate_policy_id =
GetFieldID(jni, j_rtc_config_class, "tcpCandidatePolicy",
"Lorg/webrtc/PeerConnection$TcpCandidatePolicy;");
jobject j_tcp_candidate_policy =
GetObjectField(jni, j_rtc_config, j_tcp_candidate_policy_id);
jfieldID j_candidate_network_policy_id =
GetFieldID(jni, j_rtc_config_class, "candidateNetworkPolicy",
"Lorg/webrtc/PeerConnection$CandidateNetworkPolicy;");
jobject j_candidate_network_policy =
GetObjectField(jni, j_rtc_config, j_candidate_network_policy_id);
jfieldID j_ice_servers_id =
GetFieldID(jni, j_rtc_config_class, "iceServers", "Ljava/util/List;");
jobject j_ice_servers = GetObjectField(jni, j_rtc_config, j_ice_servers_id);
jfieldID j_audio_jitter_buffer_max_packets_id =
GetFieldID(jni, j_rtc_config_class, "audioJitterBufferMaxPackets", "I");
jfieldID j_audio_jitter_buffer_fast_accelerate_id = GetFieldID(
jni, j_rtc_config_class, "audioJitterBufferFastAccelerate", "Z");
jfieldID j_ice_connection_receiving_timeout_id =
GetFieldID(jni, j_rtc_config_class, "iceConnectionReceivingTimeout", "I");
jfieldID j_ice_backup_candidate_pair_ping_interval_id = GetFieldID(
jni, j_rtc_config_class, "iceBackupCandidatePairPingInterval", "I");
jfieldID j_continual_gathering_policy_id =
GetFieldID(jni, j_rtc_config_class, "continualGatheringPolicy",
"Lorg/webrtc/PeerConnection$ContinualGatheringPolicy;");
jobject j_continual_gathering_policy =
GetObjectField(jni, j_rtc_config, j_continual_gathering_policy_id);
jfieldID j_ice_candidate_pool_size_id =
GetFieldID(jni, j_rtc_config_class, "iceCandidatePoolSize", "I");
jfieldID j_presume_writable_when_fully_relayed_id = GetFieldID(
jni, j_rtc_config_class, "presumeWritableWhenFullyRelayed", "Z");
jfieldID j_prune_turn_ports_id =
GetFieldID(jni, j_rtc_config_class, "pruneTurnPorts", "Z");
jfieldID j_ice_check_min_interval_id = GetFieldID(
jni, j_rtc_config_class, "iceCheckMinInterval", "Ljava/lang/Integer;");
jclass j_integer_class = jni->FindClass("java/lang/Integer");
jmethodID int_value_id = GetMethodID(jni, j_integer_class, "intValue", "()I");
jfieldID j_disable_ipv6_on_wifi_id =
GetFieldID(jni, j_rtc_config_class, "disableIPv6OnWifi", "Z");
jfieldID j_max_ipv6_networks_id =
GetFieldID(jni, j_rtc_config_class, "maxIPv6Networks", "I");
jfieldID j_ice_regather_interval_range_id =
GetFieldID(jni, j_rtc_config_class, "iceRegatherIntervalRange",
"Lorg/webrtc/PeerConnection$IntervalRange;");
jclass j_interval_range_class =
jni->FindClass("org/webrtc/PeerConnection$IntervalRange");
jmethodID get_min_id =
GetMethodID(jni, j_interval_range_class, "getMin", "()I");
jmethodID get_max_id =
GetMethodID(jni, j_interval_range_class, "getMax", "()I");
rtc_config->type = JavaToNativeIceTransportsType(jni, j_ice_transports_type);
rtc_config->bundle_policy = JavaToNativeBundlePolicy(jni, j_bundle_policy);
rtc_config->rtcp_mux_policy =
JavaToNativeRtcpMuxPolicy(jni, j_rtcp_mux_policy);
rtc_config->tcp_candidate_policy =
JavaToNativeTcpCandidatePolicy(jni, j_tcp_candidate_policy);
rtc_config->candidate_network_policy =
JavaToNativeCandidateNetworkPolicy(jni, j_candidate_network_policy);
JavaToNativeIceServers(jni, j_ice_servers, &rtc_config->servers);
rtc_config->audio_jitter_buffer_max_packets =
GetIntField(jni, j_rtc_config, j_audio_jitter_buffer_max_packets_id);
rtc_config->audio_jitter_buffer_fast_accelerate = GetBooleanField(
jni, j_rtc_config, j_audio_jitter_buffer_fast_accelerate_id);
rtc_config->ice_connection_receiving_timeout =
GetIntField(jni, j_rtc_config, j_ice_connection_receiving_timeout_id);
rtc_config->ice_backup_candidate_pair_ping_interval = GetIntField(
jni, j_rtc_config, j_ice_backup_candidate_pair_ping_interval_id);
rtc_config->continual_gathering_policy =
JavaToNativeContinualGatheringPolicy(jni, j_continual_gathering_policy);
rtc_config->ice_candidate_pool_size =
GetIntField(jni, j_rtc_config, j_ice_candidate_pool_size_id);
rtc_config->prune_turn_ports =
GetBooleanField(jni, j_rtc_config, j_prune_turn_ports_id);
rtc_config->presume_writable_when_fully_relayed = GetBooleanField(
jni, j_rtc_config, j_presume_writable_when_fully_relayed_id);
jobject j_ice_check_min_interval =
GetNullableObjectField(jni, j_rtc_config, j_ice_check_min_interval_id);
if (!IsNull(jni, j_ice_check_min_interval)) {
int ice_check_min_interval_value =
jni->CallIntMethod(j_ice_check_min_interval, int_value_id);
rtc_config->ice_check_min_interval =
rtc::Optional<int>(ice_check_min_interval_value);
}
rtc_config->disable_ipv6_on_wifi =
GetBooleanField(jni, j_rtc_config, j_disable_ipv6_on_wifi_id);
rtc_config->max_ipv6_networks =
GetIntField(jni, j_rtc_config, j_max_ipv6_networks_id);
jobject j_ice_regather_interval_range = GetNullableObjectField(
jni, j_rtc_config, j_ice_regather_interval_range_id);
if (!IsNull(jni, j_ice_regather_interval_range)) {
int min = jni->CallIntMethod(j_ice_regather_interval_range, get_min_id);
int max = jni->CallIntMethod(j_ice_regather_interval_range, get_max_id);
rtc_config->ice_regather_interval_range.emplace(min, max);
}
}
void JavaToNativeRtpParameters(JNIEnv* jni,
jobject j_parameters,
RtpParameters* parameters) {
RTC_CHECK(parameters != nullptr);
jclass parameters_class = jni->FindClass("org/webrtc/RtpParameters");
jfieldID encodings_id =
GetFieldID(jni, parameters_class, "encodings", "Ljava/util/LinkedList;");
jfieldID codecs_id =
GetFieldID(jni, parameters_class, "codecs", "Ljava/util/LinkedList;");
// Convert encodings.
jobject j_encodings = GetObjectField(jni, j_parameters, encodings_id);
jclass j_encoding_parameters_class =
jni->FindClass("org/webrtc/RtpParameters$Encoding");
jfieldID active_id =
GetFieldID(jni, j_encoding_parameters_class, "active", "Z");
jfieldID bitrate_id = GetFieldID(jni, j_encoding_parameters_class,
"maxBitrateBps", "Ljava/lang/Integer;");
jfieldID ssrc_id =
GetFieldID(jni, j_encoding_parameters_class, "ssrc", "Ljava/lang/Long;");
jclass j_integer_class = jni->FindClass("java/lang/Integer");
jclass j_long_class = jni->FindClass("java/lang/Long");
jmethodID int_value_id = GetMethodID(jni, j_integer_class, "intValue", "()I");
jmethodID long_value_id = GetMethodID(jni, j_long_class, "longValue", "()J");
for (jobject j_encoding_parameters : Iterable(jni, j_encodings)) {
RtpEncodingParameters encoding;
encoding.active = GetBooleanField(jni, j_encoding_parameters, active_id);
jobject j_bitrate =
GetNullableObjectField(jni, j_encoding_parameters, bitrate_id);
if (!IsNull(jni, j_bitrate)) {
int bitrate_value = jni->CallIntMethod(j_bitrate, int_value_id);
CHECK_EXCEPTION(jni) << "error during CallIntMethod";
encoding.max_bitrate_bps = rtc::Optional<int>(bitrate_value);
}
jobject j_ssrc =
GetNullableObjectField(jni, j_encoding_parameters, ssrc_id);
if (!IsNull(jni, j_ssrc)) {
jlong ssrc_value = jni->CallLongMethod(j_ssrc, long_value_id);
CHECK_EXCEPTION(jni) << "error during CallLongMethod";
encoding.ssrc = rtc::Optional<uint32_t>(ssrc_value);
}
parameters->encodings.push_back(encoding);
}
// Convert codecs.
jobject j_codecs = GetObjectField(jni, j_parameters, codecs_id);
jclass codec_class = jni->FindClass("org/webrtc/RtpParameters$Codec");
jfieldID payload_type_id = GetFieldID(jni, codec_class, "payloadType", "I");
jfieldID name_id = GetFieldID(jni, codec_class, "name", "Ljava/lang/String;");
jfieldID kind_id = GetFieldID(jni, codec_class, "kind",
"Lorg/webrtc/MediaStreamTrack$MediaType;");
jfieldID clock_rate_id =
GetFieldID(jni, codec_class, "clockRate", "Ljava/lang/Integer;");
jfieldID num_channels_id =
GetFieldID(jni, codec_class, "numChannels", "Ljava/lang/Integer;");
for (jobject j_codec : Iterable(jni, j_codecs)) {
RtpCodecParameters codec;
codec.payload_type = GetIntField(jni, j_codec, payload_type_id);
codec.name = JavaToStdString(jni, GetStringField(jni, j_codec, name_id));
codec.kind =
JavaToNativeMediaType(jni, GetObjectField(jni, j_codec, kind_id));
jobject j_clock_rate = GetNullableObjectField(jni, j_codec, clock_rate_id);
if (!IsNull(jni, j_clock_rate)) {
int clock_rate_value = jni->CallIntMethod(j_clock_rate, int_value_id);
CHECK_EXCEPTION(jni) << "error during CallIntMethod";
codec.clock_rate = rtc::Optional<int>(clock_rate_value);
}
jobject j_num_channels =
GetNullableObjectField(jni, j_codec, num_channels_id);
if (!IsNull(jni, j_num_channels)) {
int num_channels_value = jni->CallIntMethod(j_num_channels, int_value_id);
CHECK_EXCEPTION(jni) << "error during CallIntMethod";
codec.num_channels = rtc::Optional<int>(num_channels_value);
}
parameters->codecs.push_back(codec);
}
}
jobject NativeToJavaRtpParameters(JNIEnv* jni,
const RtpParameters& parameters) {
jclass parameters_class = jni->FindClass("org/webrtc/RtpParameters");
jmethodID parameters_ctor =
GetMethodID(jni, parameters_class, "<init>", "()V");
jobject j_parameters = jni->NewObject(parameters_class, parameters_ctor);
CHECK_EXCEPTION(jni) << "error during NewObject";
// Add encodings.
jclass encoding_class = jni->FindClass("org/webrtc/RtpParameters$Encoding");
jmethodID encoding_ctor = GetMethodID(jni, encoding_class, "<init>", "()V");
jfieldID encodings_id =
GetFieldID(jni, parameters_class, "encodings", "Ljava/util/LinkedList;");
jobject j_encodings = GetObjectField(jni, j_parameters, encodings_id);
jmethodID encodings_add = GetMethodID(jni, GetObjectClass(jni, j_encodings),
"add", "(Ljava/lang/Object;)Z");
jfieldID active_id = GetFieldID(jni, encoding_class, "active", "Z");
jfieldID bitrate_id =
GetFieldID(jni, encoding_class, "maxBitrateBps", "Ljava/lang/Integer;");
jfieldID ssrc_id =
GetFieldID(jni, encoding_class, "ssrc", "Ljava/lang/Long;");
jclass integer_class = jni->FindClass("java/lang/Integer");
jclass long_class = jni->FindClass("java/lang/Long");
jmethodID integer_ctor = GetMethodID(jni, integer_class, "<init>", "(I)V");
jmethodID long_ctor = GetMethodID(jni, long_class, "<init>", "(J)V");
for (const RtpEncodingParameters& encoding : parameters.encodings) {
jobject j_encoding_parameters =
jni->NewObject(encoding_class, encoding_ctor);
CHECK_EXCEPTION(jni) << "error during NewObject";
jni->SetBooleanField(j_encoding_parameters, active_id, encoding.active);
CHECK_EXCEPTION(jni) << "error during SetBooleanField";
if (encoding.max_bitrate_bps) {
jobject j_bitrate_value = jni->NewObject(integer_class, integer_ctor,
*(encoding.max_bitrate_bps));
CHECK_EXCEPTION(jni) << "error during NewObject";
jni->SetObjectField(j_encoding_parameters, bitrate_id, j_bitrate_value);
CHECK_EXCEPTION(jni) << "error during SetObjectField";
}
if (encoding.ssrc) {
jobject j_ssrc_value = jni->NewObject(long_class, long_ctor,
static_cast<jlong>(*encoding.ssrc));
CHECK_EXCEPTION(jni) << "error during NewObject";
jni->SetObjectField(j_encoding_parameters, ssrc_id, j_ssrc_value);
CHECK_EXCEPTION(jni) << "error during SetObjectField";
}
jboolean added = jni->CallBooleanMethod(j_encodings, encodings_add,
j_encoding_parameters);
CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
RTC_CHECK(added);
}
// Add codecs.
jclass codec_class = jni->FindClass("org/webrtc/RtpParameters$Codec");
jmethodID codec_ctor = GetMethodID(jni, codec_class, "<init>", "()V");
jfieldID codecs_id =
GetFieldID(jni, parameters_class, "codecs", "Ljava/util/LinkedList;");
jobject j_codecs = GetObjectField(jni, j_parameters, codecs_id);
jmethodID codecs_add = GetMethodID(jni, GetObjectClass(jni, j_codecs), "add",
"(Ljava/lang/Object;)Z");
jfieldID payload_type_id = GetFieldID(jni, codec_class, "payloadType", "I");
jfieldID name_id = GetFieldID(jni, codec_class, "name", "Ljava/lang/String;");
jfieldID kind_id = GetFieldID(jni, codec_class, "kind",
"Lorg/webrtc/MediaStreamTrack$MediaType;");
jfieldID clock_rate_id =
GetFieldID(jni, codec_class, "clockRate", "Ljava/lang/Integer;");
jfieldID num_channels_id =
GetFieldID(jni, codec_class, "numChannels", "Ljava/lang/Integer;");
for (const RtpCodecParameters& codec : parameters.codecs) {
jobject j_codec = jni->NewObject(codec_class, codec_ctor);
CHECK_EXCEPTION(jni) << "error during NewObject";
jni->SetIntField(j_codec, payload_type_id, codec.payload_type);
CHECK_EXCEPTION(jni) << "error during SetIntField";
jni->SetObjectField(j_codec, name_id,
JavaStringFromStdString(jni, codec.name));
CHECK_EXCEPTION(jni) << "error during SetObjectField";
jni->SetObjectField(j_codec, kind_id,
NativeToJavaMediaType(jni, codec.kind));
CHECK_EXCEPTION(jni) << "error during SetObjectField";
if (codec.clock_rate) {
jobject j_clock_rate_value =
jni->NewObject(integer_class, integer_ctor, *(codec.clock_rate));
CHECK_EXCEPTION(jni) << "error during NewObject";
jni->SetObjectField(j_codec, clock_rate_id, j_clock_rate_value);
CHECK_EXCEPTION(jni) << "error during SetObjectField";
}
if (codec.num_channels) {
jobject j_num_channels_value =
jni->NewObject(integer_class, integer_ctor, *(codec.num_channels));
CHECK_EXCEPTION(jni) << "error during NewObject";
jni->SetObjectField(j_codec, num_channels_id, j_num_channels_value);
CHECK_EXCEPTION(jni) << "error during SetObjectField";
}
jboolean added = jni->CallBooleanMethod(j_codecs, codecs_add, j_codec);
CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
RTC_CHECK(added);
}
return j_parameters;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,111 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_JAVA_NATIVE_CONVERSION_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_JAVA_NATIVE_CONVERSION_H_
#include <vector>
#include "webrtc/api/datachannelinterface.h"
#include "webrtc/api/jsep.h"
#include "webrtc/api/jsepicecandidate.h"
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/api/mediatypes.h"
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/api/rtpparameters.h"
#include "webrtc/rtc_base/sslidentity.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
// This file contains helper methods for converting between simple C++ and Java
// PeerConnection-related structures. Similar to some methods in jni_helpers.h,
// but specifically for structures tied to the PeerConnection API.
namespace webrtc {
namespace jni {
DataChannelInit JavaToNativeDataChannelInit(JNIEnv* jni, jobject j_init);
cricket::MediaType JavaToNativeMediaType(JNIEnv* jni, jobject j_media_type);
jobject NativeToJavaMediaType(JNIEnv* jni, cricket::MediaType media_type);
cricket::Candidate JavaToNativeCandidate(JNIEnv* jni, jobject j_candidate);
jobject NativeToJavaCandidate(JNIEnv* jni,
jclass* candidate_class,
const cricket::Candidate& candidate);
jobjectArray NativeToJavaCandidateArray(
JNIEnv* jni,
const std::vector<cricket::Candidate>& candidates);
SessionDescriptionInterface* JavaToNativeSessionDescription(JNIEnv* jni,
jobject j_sdp);
jobject NativeToJavaSessionDescription(JNIEnv* jni,
const SessionDescriptionInterface* desc);
PeerConnectionFactoryInterface::Options
JavaToNativePeerConnectionFactoryOptions(JNIEnv* jni, jobject options);
/*****************************************************
* Below are all things that go into RTCConfiguration.
*****************************************************/
PeerConnectionInterface::IceTransportsType JavaToNativeIceTransportsType(
JNIEnv* jni,
jobject j_ice_transports_type);
PeerConnectionInterface::BundlePolicy JavaToNativeBundlePolicy(
JNIEnv* jni,
jobject j_bundle_policy);
PeerConnectionInterface::RtcpMuxPolicy JavaToNativeRtcpMuxPolicy(
JNIEnv* jni,
jobject j_rtcp_mux_policy);
PeerConnectionInterface::TcpCandidatePolicy JavaToNativeTcpCandidatePolicy(
JNIEnv* jni,
jobject j_tcp_candidate_policy);
PeerConnectionInterface::CandidateNetworkPolicy
JavaToNativeCandidateNetworkPolicy(JNIEnv* jni,
jobject j_candidate_network_policy);
rtc::KeyType JavaToNativeKeyType(JNIEnv* jni, jobject j_key_type);
PeerConnectionInterface::ContinualGatheringPolicy
JavaToNativeContinualGatheringPolicy(JNIEnv* jni, jobject j_gathering_policy);
PeerConnectionInterface::TlsCertPolicy JavaToNativeTlsCertPolicy(
JNIEnv* jni,
jobject j_ice_server_tls_cert_policy);
void JavaToNativeIceServers(JNIEnv* jni,
jobject j_ice_servers,
PeerConnectionInterface::IceServers* ice_servers);
void JavaToNativeRTCConfiguration(
JNIEnv* jni,
jobject j_rtc_config,
PeerConnectionInterface::RTCConfiguration* rtc_config);
/*********************************************************
* RtpParameters, used for RtpSender and RtpReceiver APIs.
*********************************************************/
void JavaToNativeRtpParameters(JNIEnv* jni,
jobject j_parameters,
RtpParameters* parameters);
jobject NativeToJavaRtpParameters(JNIEnv* jni, const RtpParameters& parameters);
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_JAVA_NATIVE_CONVERSION_H_

View File

@ -0,0 +1,79 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/system_wrappers/include/logcat_trace_context.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(void,
Logging_nativeEnableTracing,
JNIEnv* jni,
jclass,
jstring j_path,
jint nativeLevels) {
std::string path = JavaToStdString(jni, j_path);
if (nativeLevels != kTraceNone) {
Trace::set_level_filter(nativeLevels);
if (path != "logcat:") {
RTC_CHECK_EQ(0, Trace::SetTraceFile(path.c_str(), false))
<< "SetTraceFile failed";
} else {
// Intentionally leak this to avoid needing to reason about its lifecycle.
// It keeps no state and functions only as a dispatch point.
static LogcatTraceContext* g_trace_callback = new LogcatTraceContext();
}
}
}
JNI_FUNCTION_DECLARATION(void,
Logging_nativeEnableLogToDebugOutput,
JNIEnv* jni,
jclass,
jint nativeSeverity) {
if (nativeSeverity >= rtc::LS_SENSITIVE && nativeSeverity <= rtc::LS_NONE) {
rtc::LogMessage::LogToDebug(
static_cast<rtc::LoggingSeverity>(nativeSeverity));
}
}
JNI_FUNCTION_DECLARATION(void,
Logging_nativeEnableLogThreads,
JNIEnv* jni,
jclass) {
rtc::LogMessage::LogThreads(true);
}
JNI_FUNCTION_DECLARATION(void,
Logging_nativeEnableLogTimeStamps,
JNIEnv* jni,
jclass) {
rtc::LogMessage::LogTimestamps(true);
}
JNI_FUNCTION_DECLARATION(void,
Logging_nativeLog,
JNIEnv* jni,
jclass,
jint j_severity,
jstring j_tag,
jstring j_message) {
std::string message = JavaToStdString(jni, j_message);
std::string tag = JavaToStdString(jni, j_tag);
LOG_TAG(static_cast<rtc::LoggingSeverity>(j_severity), tag) << message;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,41 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/media_jni.h"
#include "webrtc/call/callfactoryinterface.h"
#include "webrtc/logging/rtc_event_log/rtc_event_log_factory_interface.h"
#include "webrtc/media/engine/webrtcmediaengine.h"
#include "webrtc/modules/audio_processing/include/audio_processing.h"
namespace webrtc {
namespace jni {
CallFactoryInterface* CreateCallFactory() {
return webrtc::CreateCallFactory().release();
}
RtcEventLogFactoryInterface* CreateRtcEventLogFactory() {
return webrtc::CreateRtcEventLogFactory().release();
}
cricket::MediaEngineInterface* CreateMediaEngine(
AudioDeviceModule* adm,
const rtc::scoped_refptr<AudioEncoderFactory>& audio_encoder_factory,
const rtc::scoped_refptr<AudioDecoderFactory>& audio_decoder_factory,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer) {
return cricket::WebRtcMediaEngineFactory::Create(
adm, audio_encoder_factory, audio_decoder_factory, video_encoder_factory,
video_decoder_factory, audio_mixer, AudioProcessing::Create());
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,48 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_MEDIA_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_MEDIA_JNI_H_
#include "webrtc/rtc_base/scoped_ref_ptr.h"
namespace webrtc {
class AudioDeviceModule;
class CallFactoryInterface;
class AudioEncoderFactory;
class AudioDecoderFactory;
class RtcEventLogFactoryInterface;
class AudioMixer;
} // namespace webrtc
namespace cricket {
class MediaEngineInterface;
class WebRtcVideoEncoderFactory;
class WebRtcVideoDecoderFactory;
} // namespace cricket
namespace webrtc {
namespace jni {
CallFactoryInterface* CreateCallFactory();
RtcEventLogFactoryInterface* CreateRtcEventLogFactory();
cricket::MediaEngineInterface* CreateMediaEngine(
AudioDeviceModule* adm,
const rtc::scoped_refptr<AudioEncoderFactory>& audio_encoder_factory,
const rtc::scoped_refptr<AudioDecoderFactory>& audio_decoder_factory,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer);
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_MEDIA_JNI_H_

View File

@ -0,0 +1,49 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/mediaconstraints_jni.h"
namespace webrtc {
namespace jni {
MediaConstraintsJni::MediaConstraintsJni(JNIEnv* jni, jobject j_constraints) {
PopulateConstraintsFromJavaPairList(jni, j_constraints, "mandatory",
&mandatory_);
PopulateConstraintsFromJavaPairList(jni, j_constraints, "optional",
&optional_);
}
// static
void MediaConstraintsJni::PopulateConstraintsFromJavaPairList(
JNIEnv* jni,
jobject j_constraints,
const char* field_name,
Constraints* field) {
jfieldID j_id = GetFieldID(jni, GetObjectClass(jni, j_constraints),
field_name, "Ljava/util/List;");
jobject j_list = GetObjectField(jni, j_constraints, j_id);
for (jobject entry : Iterable(jni, j_list)) {
jmethodID get_key = GetMethodID(jni, GetObjectClass(jni, entry), "getKey",
"()Ljava/lang/String;");
jstring j_key =
reinterpret_cast<jstring>(jni->CallObjectMethod(entry, get_key));
CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
jmethodID get_value = GetMethodID(jni, GetObjectClass(jni, entry),
"getValue", "()Ljava/lang/String;");
jstring j_value =
reinterpret_cast<jstring>(jni->CallObjectMethod(entry, get_value));
CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
field->push_back(
Constraint(JavaToStdString(jni, j_key), JavaToStdString(jni, j_value)));
}
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,45 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_MEDIACONSTRAINTS_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_MEDIACONSTRAINTS_JNI_H_
#include "webrtc/api/mediaconstraintsinterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Wrapper for a Java MediaConstraints object. Copies all needed data so when
// the constructor returns the Java object is no longer needed.
class MediaConstraintsJni : public MediaConstraintsInterface {
public:
MediaConstraintsJni(JNIEnv* jni, jobject j_constraints);
virtual ~MediaConstraintsJni() {}
// MediaConstraintsInterface.
const Constraints& GetMandatory() const override { return mandatory_; }
const Constraints& GetOptional() const override { return optional_; }
private:
// Helper for translating a List<Pair<String, String>> to a Constraints.
static void PopulateConstraintsFromJavaPairList(JNIEnv* jni,
jobject j_constraints,
const char* field_name,
Constraints* field);
Constraints mandatory_;
Constraints optional_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_MEDIACONSTRAINTS_JNI_H_

View File

@ -0,0 +1,28 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(jobject,
MediaSource_nativeState,
JNIEnv* jni,
jclass,
jlong j_p) {
rtc::scoped_refptr<MediaSourceInterface> p(
reinterpret_cast<MediaSourceInterface*>(j_p));
return JavaEnumFromIndexAndClassName(jni, "MediaSource$State", p->state());
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,71 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(jboolean,
MediaStream_nativeAddAudioTrack,
JNIEnv* jni,
jclass,
jlong pointer,
jlong j_audio_track_pointer) {
return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
}
JNI_FUNCTION_DECLARATION(jboolean,
MediaStream_nativeAddVideoTrack,
JNIEnv* jni,
jclass,
jlong pointer,
jlong j_video_track_pointer) {
return reinterpret_cast<MediaStreamInterface*>(pointer)->AddTrack(
reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
}
JNI_FUNCTION_DECLARATION(jboolean,
MediaStream_nativeRemoveAudioTrack,
JNIEnv* jni,
jclass,
jlong pointer,
jlong j_audio_track_pointer) {
return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
reinterpret_cast<AudioTrackInterface*>(j_audio_track_pointer));
}
JNI_FUNCTION_DECLARATION(jboolean,
MediaStream_nativeRemoveVideoTrack,
JNIEnv* jni,
jclass,
jlong pointer,
jlong j_video_track_pointer) {
return reinterpret_cast<MediaStreamInterface*>(pointer)->RemoveTrack(
reinterpret_cast<VideoTrackInterface*>(j_video_track_pointer));
}
JNI_FUNCTION_DECLARATION(jstring,
MediaStream_nativeLabel,
JNIEnv* jni,
jclass,
jlong j_p) {
return JavaStringFromStdString(
jni, reinterpret_cast<MediaStreamInterface*>(j_p)->label());
}
JNI_FUNCTION_DECLARATION(void, MediaStream_free, JNIEnv*, jclass, jlong j_p) {
CHECK_RELEASE(reinterpret_cast<MediaStreamInterface*>(j_p));
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,64 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/mediastreaminterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(jstring,
MediaStreamTrack_nativeId,
JNIEnv* jni,
jclass,
jlong j_p) {
return JavaStringFromStdString(
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->id());
}
JNI_FUNCTION_DECLARATION(jstring,
MediaStreamTrack_nativeKind,
JNIEnv* jni,
jclass,
jlong j_p) {
return JavaStringFromStdString(
jni, reinterpret_cast<MediaStreamTrackInterface*>(j_p)->kind());
}
JNI_FUNCTION_DECLARATION(jboolean,
MediaStreamTrack_nativeEnabled,
JNIEnv* jni,
jclass,
jlong j_p) {
return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->enabled();
}
JNI_FUNCTION_DECLARATION(jobject,
MediaStreamTrack_nativeState,
JNIEnv* jni,
jclass,
jlong j_p) {
return JavaEnumFromIndexAndClassName(
jni, "MediaStreamTrack$State",
reinterpret_cast<MediaStreamTrackInterface*>(j_p)->state());
}
JNI_FUNCTION_DECLARATION(jboolean,
MediaStreamTrack_nativeSetEnabled,
JNIEnv* jni,
jclass,
jlong j_p,
jboolean enabled) {
return reinterpret_cast<MediaStreamTrackInterface*>(j_p)->set_enabled(
enabled);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,25 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/audio_jni.h"
namespace webrtc {
namespace jni {
rtc::scoped_refptr<AudioDecoderFactory> CreateAudioDecoderFactory() {
return nullptr;
}
rtc::scoped_refptr<AudioEncoderFactory> CreateAudioEncoderFactory() {
return nullptr;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,35 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/media_jni.h"
namespace webrtc {
namespace jni {
CallFactoryInterface* CreateCallFactory() {
return nullptr;
}
RtcEventLogFactoryInterface* CreateRtcEventLogFactory() {
return nullptr;
}
cricket::MediaEngineInterface* CreateMediaEngine(
AudioDeviceModule* adm,
const rtc::scoped_refptr<AudioEncoderFactory>& audio_encoder_factory,
const rtc::scoped_refptr<AudioDecoderFactory>& audio_decoder_factory,
cricket::WebRtcVideoEncoderFactory* video_encoder_factory,
cricket::WebRtcVideoDecoderFactory* video_decoder_factory,
rtc::scoped_refptr<AudioMixer> audio_mixer) {
return nullptr;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,34 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/video_jni.h"
namespace webrtc {
namespace jni {
cricket::WebRtcVideoEncoderFactory* CreateVideoEncoderFactory(
JNIEnv* jni,
jobject j_encoder_factory) {
return nullptr;
}
cricket::WebRtcVideoDecoderFactory* CreateVideoDecoderFactory(
JNIEnv* jni,
jobject j_decoder_factory) {
return nullptr;
}
jobject GetJavaSurfaceTextureHelper(
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper) {
return nullptr;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,66 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/ownedfactoryandthreads.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
PeerConnectionFactoryInterface* factoryFromJava(jlong j_p) {
return reinterpret_cast<OwnedFactoryAndThreads*>(j_p)->factory();
}
OwnedFactoryAndThreads::~OwnedFactoryAndThreads() {
CHECK_RELEASE(factory_);
if (network_monitor_factory_ != nullptr) {
rtc::NetworkMonitorFactory::ReleaseFactory(network_monitor_factory_);
}
}
void OwnedFactoryAndThreads::JavaCallbackOnFactoryThreads() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jclass j_factory_class = FindClass(jni, "org/webrtc/PeerConnectionFactory");
jmethodID m = nullptr;
if (network_thread_->IsCurrent()) {
LOG(LS_INFO) << "Network thread JavaCallback";
m = GetStaticMethodID(jni, j_factory_class, "onNetworkThreadReady", "()V");
}
if (worker_thread_->IsCurrent()) {
LOG(LS_INFO) << "Worker thread JavaCallback";
m = GetStaticMethodID(jni, j_factory_class, "onWorkerThreadReady", "()V");
}
if (signaling_thread_->IsCurrent()) {
LOG(LS_INFO) << "Signaling thread JavaCallback";
m = GetStaticMethodID(jni, j_factory_class, "onSignalingThreadReady",
"()V");
}
if (m != nullptr) {
jni->CallStaticVoidMethod(j_factory_class, m);
CHECK_EXCEPTION(jni) << "error during JavaCallback::CallStaticVoidMethod";
}
}
void OwnedFactoryAndThreads::InvokeJavaCallbacksOnFactoryThreads() {
LOG(LS_INFO) << "InvokeJavaCallbacksOnFactoryThreads.";
network_thread_->Invoke<void>(RTC_FROM_HERE,
[this] { JavaCallbackOnFactoryThreads(); });
worker_thread_->Invoke<void>(RTC_FROM_HERE,
[this] { JavaCallbackOnFactoryThreads(); });
signaling_thread_->Invoke<void>(RTC_FROM_HERE,
[this] { JavaCallbackOnFactoryThreads(); });
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,81 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_OWNEDFACTORYANDTHREADS_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_OWNEDFACTORYANDTHREADS_H_
#include <jni.h>
#include <memory>
#include <utility>
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/rtc_base/thread.h"
using cricket::WebRtcVideoDecoderFactory;
using cricket::WebRtcVideoEncoderFactory;
using rtc::Thread;
namespace webrtc {
namespace jni {
PeerConnectionFactoryInterface* factoryFromJava(jlong j_p);
// Helper struct for working around the fact that CreatePeerConnectionFactory()
// comes in two flavors: either entirely automagical (constructing its own
// threads and deleting them on teardown, but no external codec factory support)
// or entirely manual (requires caller to delete threads after factory
// teardown). This struct takes ownership of its ctor's arguments to present a
// single thing for Java to hold and eventually free.
class OwnedFactoryAndThreads {
public:
OwnedFactoryAndThreads(std::unique_ptr<Thread> network_thread,
std::unique_ptr<Thread> worker_thread,
std::unique_ptr<Thread> signaling_thread,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory,
rtc::NetworkMonitorFactory* network_monitor_factory,
PeerConnectionFactoryInterface* factory)
: network_thread_(std::move(network_thread)),
worker_thread_(std::move(worker_thread)),
signaling_thread_(std::move(signaling_thread)),
encoder_factory_(encoder_factory),
decoder_factory_(decoder_factory),
network_monitor_factory_(network_monitor_factory),
factory_(factory) {}
~OwnedFactoryAndThreads();
PeerConnectionFactoryInterface* factory() { return factory_; }
Thread* signaling_thread() { return signaling_thread_.get(); }
Thread* worker_thread() { return worker_thread_.get(); }
WebRtcVideoEncoderFactory* encoder_factory() { return encoder_factory_; }
WebRtcVideoDecoderFactory* decoder_factory() { return decoder_factory_; }
rtc::NetworkMonitorFactory* network_monitor_factory() {
return network_monitor_factory_;
}
void clear_network_monitor_factory() { network_monitor_factory_ = nullptr; }
void InvokeJavaCallbacksOnFactoryThreads();
private:
void JavaCallbackOnFactoryThreads();
const std::unique_ptr<Thread> network_thread_;
const std::unique_ptr<Thread> worker_thread_;
const std::unique_ptr<Thread> signaling_thread_;
WebRtcVideoEncoderFactory* encoder_factory_;
WebRtcVideoDecoderFactory* decoder_factory_;
rtc::NetworkMonitorFactory* network_monitor_factory_;
PeerConnectionFactoryInterface* factory_; // Const after ctor except dtor.
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_OWNEDFACTORYANDTHREADS_H_

View File

@ -0,0 +1,427 @@
/*
* Copyright 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// Lifecycle notes: objects are owned where they will be called; in other words
// FooObservers are owned by C++-land, and user-callable objects (e.g.
// PeerConnection and VideoTrack) are owned by Java-land.
// When this file (or other files in this directory) allocates C++
// RefCountInterfaces it AddRef()s an artificial ref simulating the jlong held
// in Java-land, and then Release()s the ref in the respective free call.
// Sometimes this AddRef is implicit in the construction of a scoped_refptr<>
// which is then .release()d. Any persistent (non-local) references from C++ to
// Java must be global or weak (in which case they must be checked before use)!
//
// Exception notes: pretty much all JNI calls can throw Java exceptions, so each
// call through a JNIEnv* pointer needs to be followed by an ExceptionCheck()
// call. In this file this is done in CHECK_EXCEPTION, making for much easier
// debugging in case of failure (the alternative is to wait for control to
// return to the Java frame that called code in this file, at which point it's
// impossible to tell which JNI call broke).
#include <limits>
#include <memory>
#include <utility>
#include "webrtc/api/mediaconstraintsinterface.h"
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/api/rtpreceiverinterface.h"
#include "webrtc/api/rtpsenderinterface.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/pc/java_native_conversion.h"
#include "webrtc/sdk/android/src/jni/pc/mediaconstraints_jni.h"
#include "webrtc/sdk/android/src/jni/pc/peerconnectionobserver_jni.h"
#include "webrtc/sdk/android/src/jni/pc/rtcstatscollectorcallbackwrapper.h"
#include "webrtc/sdk/android/src/jni/pc/sdpobserver_jni.h"
#include "webrtc/sdk/android/src/jni/pc/statsobserver_jni.h"
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
namespace jni {
static rtc::scoped_refptr<PeerConnectionInterface> ExtractNativePC(
JNIEnv* jni,
jobject j_pc) {
jfieldID native_pc_id =
GetFieldID(jni, GetObjectClass(jni, j_pc), "nativePeerConnection", "J");
jlong j_p = GetLongField(jni, j_pc, native_pc_id);
return rtc::scoped_refptr<PeerConnectionInterface>(
reinterpret_cast<PeerConnectionInterface*>(j_p));
}
JNI_FUNCTION_DECLARATION(void,
PeerConnection_freeObserver,
JNIEnv*,
jclass,
jlong j_p) {
PeerConnectionObserverJni* p =
reinterpret_cast<PeerConnectionObserverJni*>(j_p);
delete p;
}
JNI_FUNCTION_DECLARATION(jobject,
PeerConnection_getLocalDescription,
JNIEnv* jni,
jobject j_pc) {
const SessionDescriptionInterface* sdp =
ExtractNativePC(jni, j_pc)->local_description();
return sdp ? NativeToJavaSessionDescription(jni, sdp) : NULL;
}
JNI_FUNCTION_DECLARATION(jobject,
PeerConnection_getRemoteDescription,
JNIEnv* jni,
jobject j_pc) {
const SessionDescriptionInterface* sdp =
ExtractNativePC(jni, j_pc)->remote_description();
return sdp ? NativeToJavaSessionDescription(jni, sdp) : NULL;
}
JNI_FUNCTION_DECLARATION(jobject,
PeerConnection_createDataChannel,
JNIEnv* jni,
jobject j_pc,
jstring j_label,
jobject j_init) {
DataChannelInit init = JavaToNativeDataChannelInit(jni, j_init);
rtc::scoped_refptr<DataChannelInterface> channel(
ExtractNativePC(jni, j_pc)->CreateDataChannel(
JavaToStdString(jni, j_label), &init));
// Mustn't pass channel.get() directly through NewObject to avoid reading its
// vararg parameter as 64-bit and reading memory that doesn't belong to the
// 32-bit parameter.
jlong nativeChannelPtr = jlongFromPointer(channel.get());
if (!nativeChannelPtr) {
LOG(LS_ERROR) << "Failed to create DataChannel";
return nullptr;
}
jclass j_data_channel_class = FindClass(jni, "org/webrtc/DataChannel");
jmethodID j_data_channel_ctor =
GetMethodID(jni, j_data_channel_class, "<init>", "(J)V");
jobject j_channel = jni->NewObject(j_data_channel_class, j_data_channel_ctor,
nativeChannelPtr);
CHECK_EXCEPTION(jni) << "error during NewObject";
// Channel is now owned by Java object, and will be freed from there.
int bumped_count = channel->AddRef();
RTC_CHECK(bumped_count == 2) << "Unexpected refcount";
return j_channel;
}
JNI_FUNCTION_DECLARATION(void,
PeerConnection_createOffer,
JNIEnv* jni,
jobject j_pc,
jobject j_observer,
jobject j_constraints) {
MediaConstraintsJni* constraints =
new MediaConstraintsJni(jni, j_constraints);
rtc::scoped_refptr<CreateSdpObserverJni> observer(
new rtc::RefCountedObject<CreateSdpObserverJni>(jni, j_observer,
constraints));
ExtractNativePC(jni, j_pc)->CreateOffer(observer, constraints);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnection_createAnswer,
JNIEnv* jni,
jobject j_pc,
jobject j_observer,
jobject j_constraints) {
MediaConstraintsJni* constraints =
new MediaConstraintsJni(jni, j_constraints);
rtc::scoped_refptr<CreateSdpObserverJni> observer(
new rtc::RefCountedObject<CreateSdpObserverJni>(jni, j_observer,
constraints));
ExtractNativePC(jni, j_pc)->CreateAnswer(observer, constraints);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnection_setLocalDescription,
JNIEnv* jni,
jobject j_pc,
jobject j_observer,
jobject j_sdp) {
rtc::scoped_refptr<SetSdpObserverJni> observer(
new rtc::RefCountedObject<SetSdpObserverJni>(jni, j_observer, nullptr));
ExtractNativePC(jni, j_pc)->SetLocalDescription(
observer, JavaToNativeSessionDescription(jni, j_sdp));
}
JNI_FUNCTION_DECLARATION(void,
PeerConnection_setRemoteDescription,
JNIEnv* jni,
jobject j_pc,
jobject j_observer,
jobject j_sdp) {
rtc::scoped_refptr<SetSdpObserverJni> observer(
new rtc::RefCountedObject<SetSdpObserverJni>(jni, j_observer, nullptr));
ExtractNativePC(jni, j_pc)->SetRemoteDescription(
observer, JavaToNativeSessionDescription(jni, j_sdp));
}
JNI_FUNCTION_DECLARATION(jboolean,
PeerConnection_nativeSetConfiguration,
JNIEnv* jni,
jobject j_pc,
jobject j_rtc_config,
jlong native_observer) {
// Need to merge constraints into RTCConfiguration again, which are stored
// in the observer object.
PeerConnectionObserverJni* observer =
reinterpret_cast<PeerConnectionObserverJni*>(native_observer);
PeerConnectionInterface::RTCConfiguration rtc_config(
PeerConnectionInterface::RTCConfigurationType::kAggressive);
JavaToNativeRTCConfiguration(jni, j_rtc_config, &rtc_config);
CopyConstraintsIntoRtcConfiguration(observer->constraints(), &rtc_config);
return ExtractNativePC(jni, j_pc)->SetConfiguration(rtc_config);
}
JNI_FUNCTION_DECLARATION(jboolean,
PeerConnection_nativeAddIceCandidate,
JNIEnv* jni,
jobject j_pc,
jstring j_sdp_mid,
jint j_sdp_mline_index,
jstring j_candidate_sdp) {
std::string sdp_mid = JavaToStdString(jni, j_sdp_mid);
std::string sdp = JavaToStdString(jni, j_candidate_sdp);
std::unique_ptr<IceCandidateInterface> candidate(
CreateIceCandidate(sdp_mid, j_sdp_mline_index, sdp, nullptr));
return ExtractNativePC(jni, j_pc)->AddIceCandidate(candidate.get());
}
JNI_FUNCTION_DECLARATION(jboolean,
PeerConnection_nativeRemoveIceCandidates,
JNIEnv* jni,
jobject j_pc,
jobjectArray j_candidates) {
std::vector<cricket::Candidate> candidates;
size_t num_candidates = jni->GetArrayLength(j_candidates);
for (size_t i = 0; i < num_candidates; ++i) {
jobject j_candidate = jni->GetObjectArrayElement(j_candidates, i);
candidates.push_back(JavaToNativeCandidate(jni, j_candidate));
}
return ExtractNativePC(jni, j_pc)->RemoveIceCandidates(candidates);
}
JNI_FUNCTION_DECLARATION(jboolean,
PeerConnection_nativeAddLocalStream,
JNIEnv* jni,
jobject j_pc,
jlong native_stream) {
return ExtractNativePC(jni, j_pc)->AddStream(
reinterpret_cast<MediaStreamInterface*>(native_stream));
}
JNI_FUNCTION_DECLARATION(void,
PeerConnection_nativeRemoveLocalStream,
JNIEnv* jni,
jobject j_pc,
jlong native_stream) {
ExtractNativePC(jni, j_pc)->RemoveStream(
reinterpret_cast<MediaStreamInterface*>(native_stream));
}
JNI_FUNCTION_DECLARATION(jobject,
PeerConnection_nativeCreateSender,
JNIEnv* jni,
jobject j_pc,
jstring j_kind,
jstring j_stream_id) {
jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
jmethodID j_rtp_sender_ctor =
GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
std::string kind = JavaToStdString(jni, j_kind);
std::string stream_id = JavaToStdString(jni, j_stream_id);
rtc::scoped_refptr<RtpSenderInterface> sender =
ExtractNativePC(jni, j_pc)->CreateSender(kind, stream_id);
if (!sender.get()) {
return nullptr;
}
jlong nativeSenderPtr = jlongFromPointer(sender.get());
jobject j_sender =
jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
CHECK_EXCEPTION(jni) << "error during NewObject";
// Sender is now owned by the Java object, and will be freed from
// RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
sender->AddRef();
return j_sender;
}
JNI_FUNCTION_DECLARATION(jobject,
PeerConnection_nativeGetSenders,
JNIEnv* jni,
jobject j_pc) {
jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
jmethodID j_array_list_ctor =
GetMethodID(jni, j_array_list_class, "<init>", "()V");
jmethodID j_array_list_add =
GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
jobject j_senders = jni->NewObject(j_array_list_class, j_array_list_ctor);
CHECK_EXCEPTION(jni) << "error during NewObject";
jclass j_rtp_sender_class = FindClass(jni, "org/webrtc/RtpSender");
jmethodID j_rtp_sender_ctor =
GetMethodID(jni, j_rtp_sender_class, "<init>", "(J)V");
auto senders = ExtractNativePC(jni, j_pc)->GetSenders();
for (const auto& sender : senders) {
jlong nativeSenderPtr = jlongFromPointer(sender.get());
jobject j_sender =
jni->NewObject(j_rtp_sender_class, j_rtp_sender_ctor, nativeSenderPtr);
CHECK_EXCEPTION(jni) << "error during NewObject";
// Sender is now owned by the Java object, and will be freed from
// RtpSender.dispose(), called by PeerConnection.dispose() or getSenders().
sender->AddRef();
jni->CallBooleanMethod(j_senders, j_array_list_add, j_sender);
CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
}
return j_senders;
}
JNI_FUNCTION_DECLARATION(jobject,
PeerConnection_nativeGetReceivers,
JNIEnv* jni,
jobject j_pc) {
jclass j_array_list_class = FindClass(jni, "java/util/ArrayList");
jmethodID j_array_list_ctor =
GetMethodID(jni, j_array_list_class, "<init>", "()V");
jmethodID j_array_list_add =
GetMethodID(jni, j_array_list_class, "add", "(Ljava/lang/Object;)Z");
jobject j_receivers = jni->NewObject(j_array_list_class, j_array_list_ctor);
CHECK_EXCEPTION(jni) << "error during NewObject";
jclass j_rtp_receiver_class = FindClass(jni, "org/webrtc/RtpReceiver");
jmethodID j_rtp_receiver_ctor =
GetMethodID(jni, j_rtp_receiver_class, "<init>", "(J)V");
auto receivers = ExtractNativePC(jni, j_pc)->GetReceivers();
for (const auto& receiver : receivers) {
jlong nativeReceiverPtr = jlongFromPointer(receiver.get());
jobject j_receiver = jni->NewObject(j_rtp_receiver_class,
j_rtp_receiver_ctor, nativeReceiverPtr);
CHECK_EXCEPTION(jni) << "error during NewObject";
// Receiver is now owned by Java object, and will be freed from there.
receiver->AddRef();
jni->CallBooleanMethod(j_receivers, j_array_list_add, j_receiver);
CHECK_EXCEPTION(jni) << "error during CallBooleanMethod";
}
return j_receivers;
}
JNI_FUNCTION_DECLARATION(bool,
PeerConnection_nativeOldGetStats,
JNIEnv* jni,
jobject j_pc,
jobject j_observer,
jlong native_track) {
rtc::scoped_refptr<StatsObserverJni> observer(
new rtc::RefCountedObject<StatsObserverJni>(jni, j_observer));
return ExtractNativePC(jni, j_pc)->GetStats(
observer, reinterpret_cast<MediaStreamTrackInterface*>(native_track),
PeerConnectionInterface::kStatsOutputLevelStandard);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnection_nativeNewGetStats,
JNIEnv* jni,
jobject j_pc,
jobject j_callback) {
rtc::scoped_refptr<RTCStatsCollectorCallbackWrapper> callback(
new rtc::RefCountedObject<RTCStatsCollectorCallbackWrapper>(jni,
j_callback));
ExtractNativePC(jni, j_pc)->GetStats(callback);
}
JNI_FUNCTION_DECLARATION(jboolean,
PeerConnection_setBitrate,
JNIEnv* jni,
jobject j_pc,
jobject j_min,
jobject j_current,
jobject j_max) {
PeerConnectionInterface::BitrateParameters params;
jclass j_integer_class = jni->FindClass("java/lang/Integer");
jmethodID int_value_id = GetMethodID(jni, j_integer_class, "intValue", "()I");
if (!IsNull(jni, j_min)) {
int min_value = jni->CallIntMethod(j_min, int_value_id);
params.min_bitrate_bps = rtc::Optional<int>(min_value);
}
if (!IsNull(jni, j_current)) {
int current_value = jni->CallIntMethod(j_current, int_value_id);
params.current_bitrate_bps = rtc::Optional<int>(current_value);
}
if (!IsNull(jni, j_max)) {
int max_value = jni->CallIntMethod(j_max, int_value_id);
params.max_bitrate_bps = rtc::Optional<int>(max_value);
}
return ExtractNativePC(jni, j_pc)->SetBitrate(params).ok();
}
JNI_FUNCTION_DECLARATION(bool,
PeerConnection_nativeStartRtcEventLog,
JNIEnv* jni,
jobject j_pc,
int file_descriptor,
int max_size_bytes) {
return ExtractNativePC(jni, j_pc)->StartRtcEventLog(file_descriptor,
max_size_bytes);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnection_nativeStopRtcEventLog,
JNIEnv* jni,
jobject j_pc) {
ExtractNativePC(jni, j_pc)->StopRtcEventLog();
}
JNI_FUNCTION_DECLARATION(jobject,
PeerConnection_signalingState,
JNIEnv* jni,
jobject j_pc) {
PeerConnectionInterface::SignalingState state =
ExtractNativePC(jni, j_pc)->signaling_state();
return JavaEnumFromIndexAndClassName(jni, "PeerConnection$SignalingState",
state);
}
JNI_FUNCTION_DECLARATION(jobject,
PeerConnection_iceConnectionState,
JNIEnv* jni,
jobject j_pc) {
PeerConnectionInterface::IceConnectionState state =
ExtractNativePC(jni, j_pc)->ice_connection_state();
return JavaEnumFromIndexAndClassName(jni, "PeerConnection$IceConnectionState",
state);
}
JNI_FUNCTION_DECLARATION(jobject,
PeerConnection_iceGatheringState,
JNIEnv* jni,
jobject j_pc) {
PeerConnectionInterface::IceGatheringState state =
ExtractNativePC(jni, j_pc)->ice_gathering_state();
return JavaEnumFromIndexAndClassName(jni, "PeerConnection$IceGatheringState",
state);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnection_close,
JNIEnv* jni,
jobject j_pc) {
ExtractNativePC(jni, j_pc)->Close();
return;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,377 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include <utility>
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/media/base/mediaengine.h"
#include "webrtc/modules/utility/include/jvm_android.h"
#include "webrtc/rtc_base/event_tracer.h"
#include "webrtc/rtc_base/stringutils.h"
#include "webrtc/rtc_base/thread.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/pc/androidnetworkmonitor_jni.h"
#include "webrtc/sdk/android/src/jni/pc/audio_jni.h"
#include "webrtc/sdk/android/src/jni/pc/java_native_conversion.h"
#include "webrtc/sdk/android/src/jni/pc/media_jni.h"
#include "webrtc/sdk/android/src/jni/pc/ownedfactoryandthreads.h"
#include "webrtc/sdk/android/src/jni/pc/peerconnectionobserver_jni.h"
#include "webrtc/sdk/android/src/jni/pc/video_jni.h"
#include "webrtc/system_wrappers/include/field_trial.h"
// Adding 'nogncheck' to disable the gn include headers check.
// We don't want to depend on 'system_wrappers:field_trial_default' because
// clients should be able to provide their own implementation.
#include "webrtc/system_wrappers/include/field_trial_default.h" // nogncheck
#include "webrtc/system_wrappers/include/trace.h"
namespace webrtc {
namespace jni {
// Note: Some of the video-specific PeerConnectionFactory methods are
// implemented in "video_jni.cc". This is done so that if an application
// doesn't need video support, it can just link with "null_video_jni.cc"
// instead of "video_jni.cc", which doesn't bring in the video-specific
// dependencies.
// Field trials initialization string
static char* field_trials_init_string = nullptr;
// Set in PeerConnectionFactory_initializeAndroidGlobals().
static bool factory_static_initialized = false;
static bool video_hw_acceleration_enabled = true;
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_nativeCreateObserver,
JNIEnv* jni,
jclass,
jobject j_observer) {
return (jlong) new PeerConnectionObserverJni(jni, j_observer);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_nativeInitializeAndroidGlobals,
JNIEnv* jni,
jclass,
jobject context,
jboolean video_hw_acceleration) {
video_hw_acceleration_enabled = video_hw_acceleration;
if (!factory_static_initialized) {
JVM::Initialize(GetJVM());
factory_static_initialized = true;
}
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_initializeFieldTrials,
JNIEnv* jni,
jclass,
jstring j_trials_init_string) {
field_trials_init_string = NULL;
if (j_trials_init_string != NULL) {
const char* init_string =
jni->GetStringUTFChars(j_trials_init_string, NULL);
int init_string_length = jni->GetStringUTFLength(j_trials_init_string);
field_trials_init_string = new char[init_string_length + 1];
rtc::strcpyn(field_trials_init_string, init_string_length + 1, init_string);
jni->ReleaseStringUTFChars(j_trials_init_string, init_string);
LOG(LS_INFO) << "initializeFieldTrials: " << field_trials_init_string;
}
field_trial::InitFieldTrialsFromString(field_trials_init_string);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_initializeInternalTracer,
JNIEnv* jni,
jclass) {
rtc::tracing::SetupInternalTracer();
}
JNI_FUNCTION_DECLARATION(jstring,
PeerConnectionFactory_nativeFieldTrialsFindFullName,
JNIEnv* jni,
jclass,
jstring j_name) {
return JavaStringFromStdString(
jni, field_trial::FindFullName(JavaToStdString(jni, j_name)));
}
JNI_FUNCTION_DECLARATION(jboolean,
PeerConnectionFactory_startInternalTracingCapture,
JNIEnv* jni,
jclass,
jstring j_event_tracing_filename) {
if (!j_event_tracing_filename)
return false;
const char* init_string =
jni->GetStringUTFChars(j_event_tracing_filename, NULL);
LOG(LS_INFO) << "Starting internal tracing to: " << init_string;
bool ret = rtc::tracing::StartInternalCapture(init_string);
jni->ReleaseStringUTFChars(j_event_tracing_filename, init_string);
return ret;
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_stopInternalTracingCapture,
JNIEnv* jni,
jclass) {
rtc::tracing::StopInternalCapture();
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_shutdownInternalTracer,
JNIEnv* jni,
jclass) {
rtc::tracing::ShutdownInternalTracer();
}
JNI_FUNCTION_DECLARATION(
jlong,
PeerConnectionFactory_nativeCreatePeerConnectionFactory,
JNIEnv* jni,
jclass,
jobject joptions,
jobject jencoder_factory,
jobject jdecoder_factory) {
// talk/ assumes pretty widely that the current Thread is ThreadManager'd, but
// ThreadManager only WrapCurrentThread()s the thread where it is first
// created. Since the semantics around when auto-wrapping happens in
// webrtc/rtc_base/ are convoluted, we simply wrap here to avoid having to
// think about ramifications of auto-wrapping there.
rtc::ThreadManager::Instance()->WrapCurrentThread();
Trace::CreateTrace();
std::unique_ptr<rtc::Thread> network_thread =
rtc::Thread::CreateWithSocketServer();
network_thread->SetName("network_thread", nullptr);
RTC_CHECK(network_thread->Start()) << "Failed to start thread";
std::unique_ptr<rtc::Thread> worker_thread = rtc::Thread::Create();
worker_thread->SetName("worker_thread", nullptr);
RTC_CHECK(worker_thread->Start()) << "Failed to start thread";
std::unique_ptr<rtc::Thread> signaling_thread = rtc::Thread::Create();
signaling_thread->SetName("signaling_thread", NULL);
RTC_CHECK(signaling_thread->Start()) << "Failed to start thread";
cricket::WebRtcVideoEncoderFactory* video_encoder_factory = nullptr;
cricket::WebRtcVideoDecoderFactory* video_decoder_factory = nullptr;
rtc::NetworkMonitorFactory* network_monitor_factory = nullptr;
auto audio_encoder_factory = CreateAudioEncoderFactory();
auto audio_decoder_factory = CreateAudioDecoderFactory();
PeerConnectionFactoryInterface::Options options;
bool has_options = joptions != NULL;
if (has_options) {
options = JavaToNativePeerConnectionFactoryOptions(jni, joptions);
}
if (video_hw_acceleration_enabled) {
video_encoder_factory = CreateVideoEncoderFactory(jni, jencoder_factory);
video_decoder_factory = CreateVideoDecoderFactory(jni, jdecoder_factory);
}
// Do not create network_monitor_factory only if the options are
// provided and disable_network_monitor therein is set to true.
if (!(has_options && options.disable_network_monitor)) {
network_monitor_factory = new AndroidNetworkMonitorFactory();
rtc::NetworkMonitorFactory::SetFactory(network_monitor_factory);
}
AudioDeviceModule* adm = nullptr;
rtc::scoped_refptr<AudioMixer> audio_mixer = nullptr;
std::unique_ptr<CallFactoryInterface> call_factory(CreateCallFactory());
std::unique_ptr<RtcEventLogFactoryInterface> rtc_event_log_factory(
CreateRtcEventLogFactory());
std::unique_ptr<cricket::MediaEngineInterface> media_engine(CreateMediaEngine(
adm, audio_encoder_factory, audio_decoder_factory, video_encoder_factory,
video_decoder_factory, audio_mixer));
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
CreateModularPeerConnectionFactory(
network_thread.get(), worker_thread.get(), signaling_thread.get(),
adm, audio_encoder_factory, audio_decoder_factory,
video_encoder_factory, video_decoder_factory, audio_mixer,
std::move(media_engine), std::move(call_factory),
std::move(rtc_event_log_factory)));
RTC_CHECK(factory) << "Failed to create the peer connection factory; "
<< "WebRTC/libjingle init likely failed on this device";
// TODO(honghaiz): Maybe put the options as the argument of
// CreatePeerConnectionFactory.
if (has_options) {
factory->SetOptions(options);
}
OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
std::move(network_thread), std::move(worker_thread),
std::move(signaling_thread), video_encoder_factory, video_decoder_factory,
network_monitor_factory, factory.release());
owned_factory->InvokeJavaCallbacksOnFactoryThreads();
return jlongFromPointer(owned_factory);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_nativeFreeFactory,
JNIEnv*,
jclass,
jlong j_p) {
delete reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
if (field_trials_init_string) {
field_trial::InitFieldTrialsFromString(NULL);
delete field_trials_init_string;
field_trials_init_string = NULL;
}
Trace::ReturnTrace();
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_nativeThreadsCallbacks,
JNIEnv*,
jclass,
jlong j_p) {
OwnedFactoryAndThreads* factory =
reinterpret_cast<OwnedFactoryAndThreads*>(j_p);
factory->InvokeJavaCallbacksOnFactoryThreads();
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_nativeCreateLocalMediaStream,
JNIEnv* jni,
jclass,
jlong native_factory,
jstring label) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
rtc::scoped_refptr<MediaStreamInterface> stream(
factory->CreateLocalMediaStream(JavaToStdString(jni, label)));
return (jlong)stream.release();
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_nativeCreateAudioSource,
JNIEnv* jni,
jclass,
jlong native_factory,
jobject j_constraints) {
std::unique_ptr<MediaConstraintsJni> constraints(
new MediaConstraintsJni(jni, j_constraints));
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
cricket::AudioOptions options;
CopyConstraintsIntoAudioOptions(constraints.get(), &options);
rtc::scoped_refptr<AudioSourceInterface> source(
factory->CreateAudioSource(options));
return (jlong)source.release();
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_nativeCreateAudioTrack,
JNIEnv* jni,
jclass,
jlong native_factory,
jstring id,
jlong native_source) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
rtc::scoped_refptr<AudioTrackInterface> track(factory->CreateAudioTrack(
JavaToStdString(jni, id),
reinterpret_cast<AudioSourceInterface*>(native_source)));
return (jlong)track.release();
}
JNI_FUNCTION_DECLARATION(jboolean,
PeerConnectionFactory_nativeStartAecDump,
JNIEnv* jni,
jclass,
jlong native_factory,
jint file,
jint filesize_limit_bytes) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
return factory->StartAecDump(file, filesize_limit_bytes);
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_nativeStopAecDump,
JNIEnv* jni,
jclass,
jlong native_factory) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
factory->StopAecDump();
}
JNI_FUNCTION_DECLARATION(void,
PeerConnectionFactory_nativeSetOptions,
JNIEnv* jni,
jclass,
jlong native_factory,
jobject options) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
PeerConnectionFactoryInterface::Options options_to_set =
JavaToNativePeerConnectionFactoryOptions(jni, options);
factory->SetOptions(options_to_set);
if (options_to_set.disable_network_monitor) {
OwnedFactoryAndThreads* owner =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
if (owner->network_monitor_factory()) {
rtc::NetworkMonitorFactory::ReleaseFactory(
owner->network_monitor_factory());
owner->clear_network_monitor_factory();
}
}
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_nativeCreatePeerConnection,
JNIEnv* jni,
jclass,
jlong factory,
jobject j_rtc_config,
jobject j_constraints,
jlong observer_p) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> f(
reinterpret_cast<PeerConnectionFactoryInterface*>(
factoryFromJava(factory)));
PeerConnectionInterface::RTCConfiguration rtc_config(
PeerConnectionInterface::RTCConfigurationType::kAggressive);
JavaToNativeRTCConfiguration(jni, j_rtc_config, &rtc_config);
jclass j_rtc_config_class = GetObjectClass(jni, j_rtc_config);
jfieldID j_key_type_id = GetFieldID(jni, j_rtc_config_class, "keyType",
"Lorg/webrtc/PeerConnection$KeyType;");
jobject j_key_type = GetObjectField(jni, j_rtc_config, j_key_type_id);
// Generate non-default certificate.
rtc::KeyType key_type = JavaToNativeKeyType(jni, j_key_type);
if (key_type != rtc::KT_DEFAULT) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate =
rtc::RTCCertificateGenerator::GenerateCertificate(
rtc::KeyParams(key_type), rtc::Optional<uint64_t>());
if (!certificate) {
LOG(LS_ERROR) << "Failed to generate certificate. KeyType: " << key_type;
return 0;
}
rtc_config.certificates.push_back(certificate);
}
PeerConnectionObserverJni* observer =
reinterpret_cast<PeerConnectionObserverJni*>(observer_p);
observer->SetConstraints(new MediaConstraintsJni(jni, j_constraints));
CopyConstraintsIntoRtcConfiguration(observer->constraints(), &rtc_config);
rtc::scoped_refptr<PeerConnectionInterface> pc(
f->CreatePeerConnection(rtc_config, nullptr, nullptr, observer));
return (jlong)pc.release();
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,307 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/peerconnectionobserver_jni.h"
#include <string>
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/pc/java_native_conversion.h"
namespace webrtc {
namespace jni {
// Convenience, used since callbacks occur on the signaling thread, which may
// be a non-Java thread.
static JNIEnv* jni() {
return AttachCurrentThreadIfNeeded();
}
PeerConnectionObserverJni::PeerConnectionObserverJni(JNIEnv* jni,
jobject j_observer)
: j_observer_global_(jni, j_observer),
j_observer_class_(jni, GetObjectClass(jni, *j_observer_global_)),
j_media_stream_class_(jni, FindClass(jni, "org/webrtc/MediaStream")),
j_media_stream_ctor_(
GetMethodID(jni, *j_media_stream_class_, "<init>", "(J)V")),
j_audio_track_class_(jni, FindClass(jni, "org/webrtc/AudioTrack")),
j_audio_track_ctor_(
GetMethodID(jni, *j_audio_track_class_, "<init>", "(J)V")),
j_video_track_class_(jni, FindClass(jni, "org/webrtc/VideoTrack")),
j_video_track_ctor_(
GetMethodID(jni, *j_video_track_class_, "<init>", "(J)V")),
j_data_channel_class_(jni, FindClass(jni, "org/webrtc/DataChannel")),
j_data_channel_ctor_(
GetMethodID(jni, *j_data_channel_class_, "<init>", "(J)V")),
j_rtp_receiver_class_(jni, FindClass(jni, "org/webrtc/RtpReceiver")),
j_rtp_receiver_ctor_(
GetMethodID(jni, *j_rtp_receiver_class_, "<init>", "(J)V")) {}
PeerConnectionObserverJni::~PeerConnectionObserverJni() {
ScopedLocalRefFrame local_ref_frame(jni());
while (!remote_streams_.empty())
DisposeRemoteStream(remote_streams_.begin());
while (!rtp_receivers_.empty())
DisposeRtpReceiver(rtp_receivers_.begin());
}
void PeerConnectionObserverJni::OnIceCandidate(
const IceCandidateInterface* candidate) {
ScopedLocalRefFrame local_ref_frame(jni());
std::string sdp;
RTC_CHECK(candidate->ToString(&sdp)) << "got so far: " << sdp;
jclass candidate_class = FindClass(jni(), "org/webrtc/IceCandidate");
jmethodID ctor =
GetMethodID(jni(), candidate_class, "<init>",
"(Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;)V");
jstring j_mid = JavaStringFromStdString(jni(), candidate->sdp_mid());
jstring j_sdp = JavaStringFromStdString(jni(), sdp);
jstring j_url = JavaStringFromStdString(jni(), candidate->candidate().url());
jobject j_candidate = jni()->NewObject(
candidate_class, ctor, j_mid, candidate->sdp_mline_index(), j_sdp, j_url);
CHECK_EXCEPTION(jni()) << "error during NewObject";
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onIceCandidate",
"(Lorg/webrtc/IceCandidate;)V");
jni()->CallVoidMethod(*j_observer_global_, m, j_candidate);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void PeerConnectionObserverJni::OnIceCandidatesRemoved(
const std::vector<cricket::Candidate>& candidates) {
ScopedLocalRefFrame local_ref_frame(jni());
jobjectArray candidates_array = NativeToJavaCandidateArray(jni(), candidates);
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onIceCandidatesRemoved",
"([Lorg/webrtc/IceCandidate;)V");
jni()->CallVoidMethod(*j_observer_global_, m, candidates_array);
CHECK_EXCEPTION(jni()) << "Error during CallVoidMethod";
}
void PeerConnectionObserverJni::OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) {
ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSignalingChange",
"(Lorg/webrtc/PeerConnection$SignalingState;)V");
jobject new_state_enum = JavaEnumFromIndexAndClassName(
jni(), "PeerConnection$SignalingState", new_state);
jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void PeerConnectionObserverJni::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m =
GetMethodID(jni(), *j_observer_class_, "onIceConnectionChange",
"(Lorg/webrtc/PeerConnection$IceConnectionState;)V");
jobject new_state_enum = JavaEnumFromIndexAndClassName(
jni(), "PeerConnection$IceConnectionState", new_state);
jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void PeerConnectionObserverJni::OnIceConnectionReceivingChange(bool receiving) {
ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(jni(), *j_observer_class_,
"onIceConnectionReceivingChange", "(Z)V");
jni()->CallVoidMethod(*j_observer_global_, m, receiving);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void PeerConnectionObserverJni::OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) {
ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onIceGatheringChange",
"(Lorg/webrtc/PeerConnection$IceGatheringState;)V");
jobject new_state_enum = JavaEnumFromIndexAndClassName(
jni(), "PeerConnection$IceGatheringState", new_state);
jni()->CallVoidMethod(*j_observer_global_, m, new_state_enum);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void PeerConnectionObserverJni::OnAddStream(
rtc::scoped_refptr<MediaStreamInterface> stream) {
ScopedLocalRefFrame local_ref_frame(jni());
// The stream could be added into the remote_streams_ map when calling
// OnAddTrack.
jobject j_stream = GetOrCreateJavaStream(stream);
for (const auto& track : stream->GetAudioTracks()) {
jstring id = JavaStringFromStdString(jni(), track->id());
// Java AudioTrack holds one reference. Corresponding Release() is in
// MediaStreamTrack_free, triggered by AudioTrack.dispose().
track->AddRef();
jobject j_track =
jni()->NewObject(*j_audio_track_class_, j_audio_track_ctor_,
reinterpret_cast<jlong>(track.get()), id);
CHECK_EXCEPTION(jni()) << "error during NewObject";
jfieldID audio_tracks_id = GetFieldID(
jni(), *j_media_stream_class_, "audioTracks", "Ljava/util/LinkedList;");
jobject audio_tracks = GetObjectField(jni(), j_stream, audio_tracks_id);
jmethodID add = GetMethodID(jni(), GetObjectClass(jni(), audio_tracks),
"add", "(Ljava/lang/Object;)Z");
jboolean added = jni()->CallBooleanMethod(audio_tracks, add, j_track);
CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
RTC_CHECK(added);
}
for (const auto& track : stream->GetVideoTracks()) {
jstring id = JavaStringFromStdString(jni(), track->id());
// Java VideoTrack holds one reference. Corresponding Release() is in
// MediaStreamTrack_free, triggered by VideoTrack.dispose().
track->AddRef();
jobject j_track =
jni()->NewObject(*j_video_track_class_, j_video_track_ctor_,
reinterpret_cast<jlong>(track.get()), id);
CHECK_EXCEPTION(jni()) << "error during NewObject";
jfieldID video_tracks_id = GetFieldID(
jni(), *j_media_stream_class_, "videoTracks", "Ljava/util/LinkedList;");
jobject video_tracks = GetObjectField(jni(), j_stream, video_tracks_id);
jmethodID add = GetMethodID(jni(), GetObjectClass(jni(), video_tracks),
"add", "(Ljava/lang/Object;)Z");
jboolean added = jni()->CallBooleanMethod(video_tracks, add, j_track);
CHECK_EXCEPTION(jni()) << "error during CallBooleanMethod";
RTC_CHECK(added);
}
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onAddStream",
"(Lorg/webrtc/MediaStream;)V");
jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void PeerConnectionObserverJni::OnRemoveStream(
rtc::scoped_refptr<MediaStreamInterface> stream) {
ScopedLocalRefFrame local_ref_frame(jni());
NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream);
RTC_CHECK(it != remote_streams_.end())
<< "unexpected stream: " << std::hex << stream;
jobject j_stream = it->second;
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onRemoveStream",
"(Lorg/webrtc/MediaStream;)V");
jni()->CallVoidMethod(*j_observer_global_, m, j_stream);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
// Release the refptr reference so that DisposeRemoteStream can assert
// it removes the final reference.
stream = nullptr;
DisposeRemoteStream(it);
}
void PeerConnectionObserverJni::OnDataChannel(
rtc::scoped_refptr<DataChannelInterface> channel) {
ScopedLocalRefFrame local_ref_frame(jni());
jobject j_channel =
jni()->NewObject(*j_data_channel_class_, j_data_channel_ctor_,
jlongFromPointer(channel.get()));
CHECK_EXCEPTION(jni()) << "error during NewObject";
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onDataChannel",
"(Lorg/webrtc/DataChannel;)V");
jni()->CallVoidMethod(*j_observer_global_, m, j_channel);
// Channel is now owned by Java object, and will be freed from
// DataChannel.dispose(). Important that this be done _after_ the
// CallVoidMethod above as Java code might call back into native code and be
// surprised to see a refcount of 2.
int bumped_count = channel->AddRef();
RTC_CHECK(bumped_count == 2) << "Unexpected refcount OnDataChannel";
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void PeerConnectionObserverJni::OnRenegotiationNeeded() {
ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m =
GetMethodID(jni(), *j_observer_class_, "onRenegotiationNeeded", "()V");
jni()->CallVoidMethod(*j_observer_global_, m);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
void PeerConnectionObserverJni::OnAddTrack(
rtc::scoped_refptr<RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
ScopedLocalRefFrame local_ref_frame(jni());
jobject j_rtp_receiver =
jni()->NewObject(*j_rtp_receiver_class_, j_rtp_receiver_ctor_,
jlongFromPointer(receiver.get()));
CHECK_EXCEPTION(jni()) << "error during NewObject";
receiver->AddRef();
rtp_receivers_[receiver] = NewGlobalRef(jni(), j_rtp_receiver);
jobjectArray j_stream_array = NativeToJavaMediaStreamArray(jni(), streams);
jmethodID m =
GetMethodID(jni(), *j_observer_class_, "onAddTrack",
"(Lorg/webrtc/RtpReceiver;[Lorg/webrtc/MediaStream;)V");
jni()->CallVoidMethod(*j_observer_global_, m, j_rtp_receiver, j_stream_array);
CHECK_EXCEPTION(jni()) << "Error during CallVoidMethod";
}
void PeerConnectionObserverJni::SetConstraints(
MediaConstraintsJni* constraints) {
RTC_CHECK(!constraints_.get()) << "constraints already set!";
constraints_.reset(constraints);
}
void PeerConnectionObserverJni::DisposeRemoteStream(
const NativeToJavaStreamsMap::iterator& it) {
jobject j_stream = it->second;
remote_streams_.erase(it);
jni()->CallVoidMethod(
j_stream, GetMethodID(jni(), *j_media_stream_class_, "dispose", "()V"));
CHECK_EXCEPTION(jni()) << "error during MediaStream.dispose()";
DeleteGlobalRef(jni(), j_stream);
}
void PeerConnectionObserverJni::DisposeRtpReceiver(
const NativeToJavaRtpReceiverMap::iterator& it) {
jobject j_rtp_receiver = it->second;
rtp_receivers_.erase(it);
jni()->CallVoidMethod(
j_rtp_receiver,
GetMethodID(jni(), *j_rtp_receiver_class_, "dispose", "()V"));
CHECK_EXCEPTION(jni()) << "error during RtpReceiver.dispose()";
DeleteGlobalRef(jni(), j_rtp_receiver);
}
// If the NativeToJavaStreamsMap contains the stream, return it.
// Otherwise, create a new Java MediaStream.
jobject PeerConnectionObserverJni::GetOrCreateJavaStream(
const rtc::scoped_refptr<MediaStreamInterface>& stream) {
NativeToJavaStreamsMap::iterator it = remote_streams_.find(stream);
if (it != remote_streams_.end()) {
return it->second;
}
// Java MediaStream holds one reference. Corresponding Release() is in
// MediaStream_free, triggered by MediaStream.dispose().
stream->AddRef();
jobject j_stream =
jni()->NewObject(*j_media_stream_class_, j_media_stream_ctor_,
reinterpret_cast<jlong>(stream.get()));
CHECK_EXCEPTION(jni()) << "error during NewObject";
remote_streams_[stream] = NewGlobalRef(jni(), j_stream);
return j_stream;
}
jobjectArray PeerConnectionObserverJni::NativeToJavaMediaStreamArray(
JNIEnv* jni,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
jobjectArray java_streams =
jni->NewObjectArray(streams.size(), *j_media_stream_class_, nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < streams.size(); ++i) {
jobject j_stream = GetOrCreateJavaStream(streams[i]);
jni->SetObjectArrayElement(java_streams, i, j_stream);
}
return java_streams;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,95 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_PEERCONNECTIONOBSERVER_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_PEERCONNECTIONOBSERVER_JNI_H_
#include <map>
#include <memory>
#include <vector>
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/pc/mediaconstraints_jni.h"
namespace webrtc {
namespace jni {
// Adapter between the C++ PeerConnectionObserver interface and the Java
// PeerConnection.Observer interface. Wraps an instance of the Java interface
// and dispatches C++ callbacks to Java.
class PeerConnectionObserverJni : public PeerConnectionObserver {
public:
PeerConnectionObserverJni(JNIEnv* jni, jobject j_observer);
virtual ~PeerConnectionObserverJni();
// Implementation of PeerConnectionObserver interface, which propagates
// the callbacks to the Java observer.
void OnIceCandidate(const IceCandidateInterface* candidate) override;
void OnIceCandidatesRemoved(
const std::vector<cricket::Candidate>& candidates) override;
void OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) override;
void OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) override;
void OnIceConnectionReceivingChange(bool receiving) override;
void OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) override;
void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> channel) override;
void OnRenegotiationNeeded() override;
void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>&
streams) override;
void SetConstraints(MediaConstraintsJni* constraints);
const MediaConstraintsJni* constraints() { return constraints_.get(); }
private:
typedef std::map<MediaStreamInterface*, jobject> NativeToJavaStreamsMap;
typedef std::map<RtpReceiverInterface*, jobject> NativeToJavaRtpReceiverMap;
void DisposeRemoteStream(const NativeToJavaStreamsMap::iterator& it);
void DisposeRtpReceiver(const NativeToJavaRtpReceiverMap::iterator& it);
// If the NativeToJavaStreamsMap contains the stream, return it.
// Otherwise, create a new Java MediaStream.
jobject GetOrCreateJavaStream(
const rtc::scoped_refptr<MediaStreamInterface>& stream);
// Converts array of streams, creating or re-using Java streams as necessary.
jobjectArray NativeToJavaMediaStreamArray(
JNIEnv* jni,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams);
const ScopedGlobalRef<jobject> j_observer_global_;
const ScopedGlobalRef<jclass> j_observer_class_;
const ScopedGlobalRef<jclass> j_media_stream_class_;
const jmethodID j_media_stream_ctor_;
const ScopedGlobalRef<jclass> j_audio_track_class_;
const jmethodID j_audio_track_ctor_;
const ScopedGlobalRef<jclass> j_video_track_class_;
const jmethodID j_video_track_ctor_;
const ScopedGlobalRef<jclass> j_data_channel_class_;
const jmethodID j_data_channel_ctor_;
const ScopedGlobalRef<jclass> j_rtp_receiver_class_;
const jmethodID j_rtp_receiver_ctor_;
// C++ -> Java remote streams. The stored jobects are global refs and must be
// manually deleted upon removal. Use DisposeRemoteStream().
NativeToJavaStreamsMap remote_streams_;
NativeToJavaRtpReceiverMap rtp_receivers_;
std::unique_ptr<MediaConstraintsJni> constraints_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_PEERCONNECTIONOBSERVER_JNI_H_

View File

@ -0,0 +1,267 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/rtcstatscollectorcallbackwrapper.h"
#include <string>
#include <vector>
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace jni {
RTCStatsCollectorCallbackWrapper::RTCStatsCollectorCallbackWrapper(
JNIEnv* jni,
jobject j_callback)
: j_callback_global_(jni, j_callback),
j_callback_class_(jni, GetObjectClass(jni, j_callback)),
j_stats_report_class_(FindClass(jni, "org/webrtc/RTCStatsReport")),
j_stats_report_ctor_(GetMethodID(jni,
j_stats_report_class_,
"<init>",
"(JLjava/util/Map;)V")),
j_stats_class_(FindClass(jni, "org/webrtc/RTCStats")),
j_stats_ctor_(GetMethodID(
jni,
j_stats_class_,
"<init>",
"(JLjava/lang/String;Ljava/lang/String;Ljava/util/Map;)V")),
j_linked_hash_map_class_(FindClass(jni, "java/util/LinkedHashMap")),
j_linked_hash_map_ctor_(
GetMethodID(jni, j_linked_hash_map_class_, "<init>", "()V")),
j_linked_hash_map_put_(GetMethodID(
jni,
j_linked_hash_map_class_,
"put",
"(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;")),
j_boolean_class_(FindClass(jni, "java/lang/Boolean")),
j_boolean_ctor_(GetMethodID(jni, j_boolean_class_, "<init>", "(Z)V")),
j_integer_class_(FindClass(jni, "java/lang/Integer")),
j_integer_ctor_(GetMethodID(jni, j_integer_class_, "<init>", "(I)V")),
j_long_class_(FindClass(jni, "java/lang/Long")),
j_long_ctor_(GetMethodID(jni, j_long_class_, "<init>", "(J)V")),
j_big_integer_class_(FindClass(jni, "java/math/BigInteger")),
j_big_integer_ctor_(GetMethodID(jni,
j_big_integer_class_,
"<init>",
"(Ljava/lang/String;)V")),
j_double_class_(FindClass(jni, "java/lang/Double")),
j_double_ctor_(GetMethodID(jni, j_double_class_, "<init>", "(D)V")),
j_string_class_(FindClass(jni, "java/lang/String")) {}
void RTCStatsCollectorCallbackWrapper::OnStatsDelivered(
const rtc::scoped_refptr<const RTCStatsReport>& report) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject j_report = ReportToJava(jni, report);
jmethodID m = GetMethodID(jni, *j_callback_class_, "onStatsDelivered",
"(Lorg/webrtc/RTCStatsReport;)V");
jni->CallVoidMethod(*j_callback_global_, m, j_report);
CHECK_EXCEPTION(jni) << "error during CallVoidMethod";
}
jobject RTCStatsCollectorCallbackWrapper::ReportToJava(
JNIEnv* jni,
const rtc::scoped_refptr<const RTCStatsReport>& report) {
jobject j_stats_map =
jni->NewObject(j_linked_hash_map_class_, j_linked_hash_map_ctor_);
CHECK_EXCEPTION(jni) << "error during NewObject";
for (const RTCStats& stats : *report) {
// Create a local reference frame for each RTCStats, since there is a
// maximum number of references that can be created in one frame.
ScopedLocalRefFrame local_ref_frame(jni);
jstring j_id = JavaStringFromStdString(jni, stats.id());
jobject j_stats = StatsToJava(jni, stats);
jni->CallObjectMethod(j_stats_map, j_linked_hash_map_put_, j_id, j_stats);
CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
}
jobject j_report = jni->NewObject(j_stats_report_class_, j_stats_report_ctor_,
report->timestamp_us(), j_stats_map);
CHECK_EXCEPTION(jni) << "error during NewObject";
return j_report;
}
jobject RTCStatsCollectorCallbackWrapper::StatsToJava(JNIEnv* jni,
const RTCStats& stats) {
jstring j_type = JavaStringFromStdString(jni, stats.type());
jstring j_id = JavaStringFromStdString(jni, stats.id());
jobject j_members =
jni->NewObject(j_linked_hash_map_class_, j_linked_hash_map_ctor_);
for (const RTCStatsMemberInterface* member : stats.Members()) {
if (!member->is_defined()) {
continue;
}
// Create a local reference frame for each member as well.
ScopedLocalRefFrame local_ref_frame(jni);
jstring j_name = JavaStringFromStdString(jni, member->name());
jobject j_member = MemberToJava(jni, member);
jni->CallObjectMethod(j_members, j_linked_hash_map_put_, j_name, j_member);
CHECK_EXCEPTION(jni) << "error during CallObjectMethod";
}
jobject j_stats =
jni->NewObject(j_stats_class_, j_stats_ctor_, stats.timestamp_us(),
j_type, j_id, j_members);
CHECK_EXCEPTION(jni) << "error during NewObject";
return j_stats;
}
jobject RTCStatsCollectorCallbackWrapper::MemberToJava(
JNIEnv* jni,
const RTCStatsMemberInterface* member) {
switch (member->type()) {
case RTCStatsMemberInterface::kBool: {
jobject value = jni->NewObject(j_boolean_class_, j_boolean_ctor_,
*member->cast_to<RTCStatsMember<bool>>());
CHECK_EXCEPTION(jni) << "error during NewObject";
return value;
}
case RTCStatsMemberInterface::kInt32: {
jobject value =
jni->NewObject(j_integer_class_, j_integer_ctor_,
*member->cast_to<RTCStatsMember<int32_t>>());
CHECK_EXCEPTION(jni) << "error during NewObject";
return value;
}
case RTCStatsMemberInterface::kUint32: {
jobject value =
jni->NewObject(j_long_class_, j_long_ctor_,
(jlong)*member->cast_to<RTCStatsMember<uint32_t>>());
CHECK_EXCEPTION(jni) << "error during NewObject";
return value;
}
case RTCStatsMemberInterface::kInt64: {
jobject value =
jni->NewObject(j_long_class_, j_long_ctor_,
*member->cast_to<RTCStatsMember<int64_t>>());
CHECK_EXCEPTION(jni) << "error during NewObject";
return value;
}
case RTCStatsMemberInterface::kUint64: {
jobject value =
jni->NewObject(j_big_integer_class_, j_big_integer_ctor_,
JavaStringFromStdString(jni, member->ValueToString()));
CHECK_EXCEPTION(jni) << "error during NewObject";
return value;
}
case RTCStatsMemberInterface::kDouble: {
jobject value =
jni->NewObject(j_double_class_, j_double_ctor_,
*member->cast_to<RTCStatsMember<double>>());
CHECK_EXCEPTION(jni) << "error during NewObject";
return value;
}
case RTCStatsMemberInterface::kString: {
return JavaStringFromStdString(
jni, *member->cast_to<RTCStatsMember<std::string>>());
}
case RTCStatsMemberInterface::kSequenceBool: {
const std::vector<bool>& values =
*member->cast_to<RTCStatsMember<std::vector<bool>>>();
jobjectArray j_values =
jni->NewObjectArray(values.size(), j_boolean_class_, nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < values.size(); ++i) {
jobject value =
jni->NewObject(j_boolean_class_, j_boolean_ctor_, values[i]);
jni->SetObjectArrayElement(j_values, i, value);
CHECK_EXCEPTION(jni) << "error during SetObjectArrayElement";
}
return j_values;
}
case RTCStatsMemberInterface::kSequenceInt32: {
const std::vector<int32_t>& values =
*member->cast_to<RTCStatsMember<std::vector<int32_t>>>();
jobjectArray j_values =
jni->NewObjectArray(values.size(), j_integer_class_, nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < values.size(); ++i) {
jobject value =
jni->NewObject(j_integer_class_, j_integer_ctor_, values[i]);
jni->SetObjectArrayElement(j_values, i, value);
CHECK_EXCEPTION(jni) << "error during SetObjectArrayElement";
}
return j_values;
}
case RTCStatsMemberInterface::kSequenceUint32: {
const std::vector<uint32_t>& values =
*member->cast_to<RTCStatsMember<std::vector<uint32_t>>>();
jobjectArray j_values =
jni->NewObjectArray(values.size(), j_long_class_, nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < values.size(); ++i) {
jobject value = jni->NewObject(j_long_class_, j_long_ctor_, values[i]);
jni->SetObjectArrayElement(j_values, i, value);
CHECK_EXCEPTION(jni) << "error during SetObjectArrayElement";
}
return j_values;
}
case RTCStatsMemberInterface::kSequenceInt64: {
const std::vector<int64_t>& values =
*member->cast_to<RTCStatsMember<std::vector<int64_t>>>();
jobjectArray j_values =
jni->NewObjectArray(values.size(), j_long_class_, nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < values.size(); ++i) {
jobject value = jni->NewObject(j_long_class_, j_long_ctor_, values[i]);
jni->SetObjectArrayElement(j_values, i, value);
CHECK_EXCEPTION(jni) << "error during SetObjectArrayElement";
}
return j_values;
}
case RTCStatsMemberInterface::kSequenceUint64: {
const std::vector<uint64_t>& values =
*member->cast_to<RTCStatsMember<std::vector<uint64_t>>>();
jobjectArray j_values =
jni->NewObjectArray(values.size(), j_big_integer_class_, nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < values.size(); ++i) {
jobject value = jni->NewObject(
j_big_integer_class_, j_big_integer_ctor_,
JavaStringFromStdString(jni, rtc::ToString(values[i])));
jni->SetObjectArrayElement(j_values, i, value);
CHECK_EXCEPTION(jni) << "error during SetObjectArrayElement";
}
return j_values;
}
case RTCStatsMemberInterface::kSequenceDouble: {
const std::vector<double>& values =
*member->cast_to<RTCStatsMember<std::vector<double>>>();
jobjectArray j_values =
jni->NewObjectArray(values.size(), j_double_class_, nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < values.size(); ++i) {
jobject value =
jni->NewObject(j_double_class_, j_double_ctor_, values[i]);
jni->SetObjectArrayElement(j_values, i, value);
CHECK_EXCEPTION(jni) << "error during SetObjectArrayElement";
}
return j_values;
}
case RTCStatsMemberInterface::kSequenceString: {
const std::vector<std::string>& values =
*member->cast_to<RTCStatsMember<std::vector<std::string>>>();
jobjectArray j_values =
jni->NewObjectArray(values.size(), j_string_class_, nullptr);
CHECK_EXCEPTION(jni) << "error during NewObjectArray";
for (size_t i = 0; i < values.size(); ++i) {
jni->SetObjectArrayElement(j_values, i,
JavaStringFromStdString(jni, values[i]));
CHECK_EXCEPTION(jni) << "error during SetObjectArrayElement";
}
return j_values;
}
}
RTC_NOTREACHED();
return nullptr;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,64 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_RTCSTATSCOLLECTORCALLBACKWRAPPER_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_RTCSTATSCOLLECTORCALLBACKWRAPPER_H_
#include <jni.h>
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Adapter for a Java RTCStatsCollectorCallback presenting a C++
// RTCStatsCollectorCallback and dispatching the callback from C++ back to
// Java.
class RTCStatsCollectorCallbackWrapper : public RTCStatsCollectorCallback {
public:
RTCStatsCollectorCallbackWrapper(JNIEnv* jni, jobject j_callback);
void OnStatsDelivered(
const rtc::scoped_refptr<const RTCStatsReport>& report) override;
private:
// Helper functions for converting C++ RTCStatsReport to Java equivalent.
jobject ReportToJava(JNIEnv* jni,
const rtc::scoped_refptr<const RTCStatsReport>& report);
jobject StatsToJava(JNIEnv* jni, const RTCStats& stats);
jobject MemberToJava(JNIEnv* jni, const RTCStatsMemberInterface* member);
const ScopedGlobalRef<jobject> j_callback_global_;
const ScopedGlobalRef<jclass> j_callback_class_;
const jclass j_stats_report_class_;
const jmethodID j_stats_report_ctor_;
const jclass j_stats_class_;
const jmethodID j_stats_ctor_;
const jclass j_linked_hash_map_class_;
const jmethodID j_linked_hash_map_ctor_;
const jmethodID j_linked_hash_map_put_;
const jclass j_boolean_class_;
const jmethodID j_boolean_ctor_;
const jclass j_integer_class_;
const jmethodID j_integer_ctor_;
const jclass j_long_class_;
const jmethodID j_long_ctor_;
const jclass j_big_integer_class_;
const jmethodID j_big_integer_ctor_;
const jclass j_double_class_;
const jmethodID j_double_ctor_;
const jclass j_string_class_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_RTCSTATSCOLLECTORCALLBACKWRAPPER_H_

View File

@ -0,0 +1,96 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/rtpreceiverinterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/pc/java_native_conversion.h"
#include "webrtc/sdk/android/src/jni/pc/rtpreceiverobserver_jni.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(jlong,
RtpReceiver_nativeGetTrack,
JNIEnv* jni,
jclass,
jlong j_rtp_receiver_pointer,
jlong j_track_pointer) {
return jlongFromPointer(
reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
->track()
.release());
}
JNI_FUNCTION_DECLARATION(jboolean,
RtpReceiver_nativeSetParameters,
JNIEnv* jni,
jclass,
jlong j_rtp_receiver_pointer,
jobject j_parameters) {
if (IsNull(jni, j_parameters)) {
return false;
}
RtpParameters parameters;
JavaToNativeRtpParameters(jni, j_parameters, &parameters);
return reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
->SetParameters(parameters);
}
JNI_FUNCTION_DECLARATION(jobject,
RtpReceiver_nativeGetParameters,
JNIEnv* jni,
jclass,
jlong j_rtp_receiver_pointer) {
RtpParameters parameters =
reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
->GetParameters();
return NativeToJavaRtpParameters(jni, parameters);
}
JNI_FUNCTION_DECLARATION(jstring,
RtpReceiver_nativeId,
JNIEnv* jni,
jclass,
jlong j_rtp_receiver_pointer) {
return JavaStringFromStdString(
jni,
reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)->id());
}
JNI_FUNCTION_DECLARATION(jlong,
RtpReceiver_nativeSetObserver,
JNIEnv* jni,
jclass,
jlong j_rtp_receiver_pointer,
jobject j_observer) {
RtpReceiverObserverJni* rtpReceiverObserver =
new RtpReceiverObserverJni(jni, j_observer);
reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
->SetObserver(rtpReceiverObserver);
return jlongFromPointer(rtpReceiverObserver);
}
JNI_FUNCTION_DECLARATION(void,
RtpReceiver_nativeUnsetObserver,
JNIEnv* jni,
jclass,
jlong j_rtp_receiver_pointer,
jlong j_observer_pointer) {
reinterpret_cast<RtpReceiverInterface*>(j_rtp_receiver_pointer)
->SetObserver(nullptr);
RtpReceiverObserverJni* observer =
reinterpret_cast<RtpReceiverObserverJni*>(j_observer_pointer);
if (observer) {
delete observer;
}
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,34 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/rtpreceiverobserver_jni.h"
#include "webrtc/sdk/android/src/jni/pc/java_native_conversion.h"
namespace webrtc {
namespace jni {
void RtpReceiverObserverJni::OnFirstPacketReceived(
cricket::MediaType media_type) {
JNIEnv* const jni = AttachCurrentThreadIfNeeded();
jmethodID j_on_first_packet_received_mid = GetMethodID(
jni, GetObjectClass(jni, *j_observer_global_), "onFirstPacketReceived",
"(Lorg/webrtc/MediaStreamTrack$MediaType;)V");
// Get the Java version of media type.
jobject JavaMediaType = NativeToJavaMediaType(jni, media_type);
// Trigger the callback function.
jni->CallVoidMethod(*j_observer_global_, j_on_first_packet_received_mid,
JavaMediaType);
CHECK_EXCEPTION(jni) << "error during CallVoidMethod";
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,39 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_RTPRECEIVEROBSERVER_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_RTPRECEIVEROBSERVER_JNI_H_
#include "webrtc/api/rtpreceiverinterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Adapter between the C++ RtpReceiverObserverInterface and the Java
// RtpReceiver.Observer interface. Wraps an instance of the Java interface and
// dispatches C++ callbacks to Java.
class RtpReceiverObserverJni : public RtpReceiverObserverInterface {
public:
RtpReceiverObserverJni(JNIEnv* jni, jobject j_observer)
: j_observer_global_(jni, j_observer) {}
~RtpReceiverObserverJni() override {}
void OnFirstPacketReceived(cricket::MediaType media_type) override;
private:
const ScopedGlobalRef<jobject> j_observer_global_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_RTPRECEIVEROBSERVER_JNI_H_

View File

@ -0,0 +1,86 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/api/rtpsenderinterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/pc/java_native_conversion.h"
namespace webrtc {
namespace jni {
JNI_FUNCTION_DECLARATION(jboolean,
RtpSender_nativeSetTrack,
JNIEnv* jni,
jclass,
jlong j_rtp_sender_pointer,
jlong j_track_pointer) {
return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
->SetTrack(reinterpret_cast<MediaStreamTrackInterface*>(j_track_pointer));
}
JNI_FUNCTION_DECLARATION(jlong,
RtpSender_nativeGetTrack,
JNIEnv* jni,
jclass,
jlong j_rtp_sender_pointer) {
return jlongFromPointer(
reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
->track()
.release());
}
JNI_FUNCTION_DECLARATION(jlong,
RtpSender_nativeGetDtmfSender,
JNIEnv* jni,
jclass,
jlong j_rtp_sender_pointer) {
return jlongFromPointer(
reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
->GetDtmfSender()
.release());
}
JNI_FUNCTION_DECLARATION(jboolean,
RtpSender_nativeSetParameters,
JNIEnv* jni,
jclass,
jlong j_rtp_sender_pointer,
jobject j_parameters) {
if (IsNull(jni, j_parameters)) {
return false;
}
RtpParameters parameters;
JavaToNativeRtpParameters(jni, j_parameters, &parameters);
return reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
->SetParameters(parameters);
}
JNI_FUNCTION_DECLARATION(jobject,
RtpSender_nativeGetParameters,
JNIEnv* jni,
jclass,
jlong j_rtp_sender_pointer) {
RtpParameters parameters =
reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)
->GetParameters();
return NativeToJavaRtpParameters(jni, parameters);
}
JNI_FUNCTION_DECLARATION(jstring,
RtpSender_nativeId,
JNIEnv* jni,
jclass,
jlong j_rtp_sender_pointer) {
return JavaStringFromStdString(
jni, reinterpret_cast<RtpSenderInterface*>(j_rtp_sender_pointer)->id());
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,109 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_SDPOBSERVER_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_SDPOBSERVER_JNI_H_
#include <memory>
#include <string>
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/pc/mediaconstraints_jni.h"
namespace webrtc {
namespace jni {
// Adapter for a Java StatsObserver presenting a C++
// CreateSessionDescriptionObserver or SetSessionDescriptionObserver and
// dispatching the callback from C++ back to Java.
template <class T> // T is one of {Create,Set}SessionDescriptionObserver.
class SdpObserverJni : public T {
public:
SdpObserverJni(JNIEnv* jni,
jobject j_observer,
MediaConstraintsJni* constraints)
: constraints_(constraints),
j_observer_global_(jni, j_observer),
j_observer_class_(jni, GetObjectClass(jni, j_observer)) {}
virtual ~SdpObserverJni() {}
// Can't mark override because of templating.
virtual void OnSuccess() {
ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onSetSuccess", "()V");
jni()->CallVoidMethod(*j_observer_global_, m);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
// Can't mark override because of templating.
virtual void OnSuccess(SessionDescriptionInterface* desc) {
ScopedLocalRefFrame local_ref_frame(jni());
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onCreateSuccess",
"(Lorg/webrtc/SessionDescription;)V");
jobject j_sdp = NativeToJavaSessionDescription(jni(), desc);
jni()->CallVoidMethod(*j_observer_global_, m, j_sdp);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
// OnSuccess transfers ownership of the description (there's a TODO to make
// it use unique_ptr...).
delete desc;
}
protected:
// Common implementation for failure of Set & Create types, distinguished by
// |op| being "Set" or "Create".
void DoOnFailure(const std::string& op, const std::string& error) {
jmethodID m = GetMethodID(jni(), *j_observer_class_, "on" + op + "Failure",
"(Ljava/lang/String;)V");
jstring j_error_string = JavaStringFromStdString(jni(), error);
jni()->CallVoidMethod(*j_observer_global_, m, j_error_string);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
JNIEnv* jni() { return AttachCurrentThreadIfNeeded(); }
private:
std::unique_ptr<MediaConstraintsJni> constraints_;
const ScopedGlobalRef<jobject> j_observer_global_;
const ScopedGlobalRef<jclass> j_observer_class_;
};
class CreateSdpObserverJni
: public SdpObserverJni<CreateSessionDescriptionObserver> {
public:
CreateSdpObserverJni(JNIEnv* jni,
jobject j_observer,
MediaConstraintsJni* constraints)
: SdpObserverJni(jni, j_observer, constraints) {}
void OnFailure(const std::string& error) override {
ScopedLocalRefFrame local_ref_frame(jni());
SdpObserverJni::DoOnFailure(std::string("Create"), error);
}
};
class SetSdpObserverJni : public SdpObserverJni<SetSessionDescriptionObserver> {
public:
SetSdpObserverJni(JNIEnv* jni,
jobject j_observer,
MediaConstraintsJni* constraints)
: SdpObserverJni(jni, j_observer, constraints) {}
void OnFailure(const std::string& error) override {
ScopedLocalRefFrame local_ref_frame(jni());
SdpObserverJni::DoOnFailure(std::string("Set"), error);
}
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_SDPOBSERVER_JNI_H_

View File

@ -0,0 +1,85 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/pc/statsobserver_jni.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace jni {
// Convenience, used since callbacks occur on the signaling thread, which may
// be a non-Java thread.
static JNIEnv* jni() {
return AttachCurrentThreadIfNeeded();
}
StatsObserverJni::StatsObserverJni(JNIEnv* jni, jobject j_observer)
: j_observer_global_(jni, j_observer),
j_observer_class_(jni, GetObjectClass(jni, j_observer)),
j_stats_report_class_(jni, FindClass(jni, "org/webrtc/StatsReport")),
j_stats_report_ctor_(GetMethodID(jni,
*j_stats_report_class_,
"<init>",
"(Ljava/lang/String;Ljava/lang/String;D"
"[Lorg/webrtc/StatsReport$Value;)V")),
j_value_class_(jni, FindClass(jni, "org/webrtc/StatsReport$Value")),
j_value_ctor_(GetMethodID(jni,
*j_value_class_,
"<init>",
"(Ljava/lang/String;Ljava/lang/String;)V")) {}
void StatsObserverJni::OnComplete(const StatsReports& reports) {
ScopedLocalRefFrame local_ref_frame(jni());
jobjectArray j_reports = ReportsToJava(jni(), reports);
jmethodID m = GetMethodID(jni(), *j_observer_class_, "onComplete",
"([Lorg/webrtc/StatsReport;)V");
jni()->CallVoidMethod(*j_observer_global_, m, j_reports);
CHECK_EXCEPTION(jni()) << "error during CallVoidMethod";
}
jobjectArray StatsObserverJni::ReportsToJava(JNIEnv* jni,
const StatsReports& reports) {
jobjectArray reports_array =
jni->NewObjectArray(reports.size(), *j_stats_report_class_, NULL);
int i = 0;
for (const auto* report : reports) {
ScopedLocalRefFrame local_ref_frame(jni);
jstring j_id = JavaStringFromStdString(jni, report->id()->ToString());
jstring j_type = JavaStringFromStdString(jni, report->TypeToString());
jobjectArray j_values = ValuesToJava(jni, report->values());
jobject j_report =
jni->NewObject(*j_stats_report_class_, j_stats_report_ctor_, j_id,
j_type, report->timestamp(), j_values);
jni->SetObjectArrayElement(reports_array, i++, j_report);
}
return reports_array;
}
jobjectArray StatsObserverJni::ValuesToJava(JNIEnv* jni,
const StatsReport::Values& values) {
jobjectArray j_values =
jni->NewObjectArray(values.size(), *j_value_class_, NULL);
int i = 0;
for (const auto& it : values) {
ScopedLocalRefFrame local_ref_frame(jni);
// Should we use the '.name' enum value here instead of converting the
// name to a string?
jstring j_name = JavaStringFromStdString(jni, it.second->display_name());
jstring j_value = JavaStringFromStdString(jni, it.second->ToString());
jobject j_element_value =
jni->NewObject(*j_value_class_, j_value_ctor_, j_name, j_value);
jni->SetObjectArrayElement(j_values, i++, j_element_value);
}
return j_values;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,44 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_STATSOBSERVER_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_STATSOBSERVER_JNI_H_
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Adapter for a Java StatsObserver presenting a C++ StatsObserver and
// dispatching the callback from C++ back to Java.
class StatsObserverJni : public StatsObserver {
public:
StatsObserverJni(JNIEnv* jni, jobject j_observer);
void OnComplete(const StatsReports& reports) override;
private:
jobjectArray ReportsToJava(JNIEnv* jni, const StatsReports& reports);
jobjectArray ValuesToJava(JNIEnv* jni, const StatsReport::Values& values);
const ScopedGlobalRef<jobject> j_observer_global_;
const ScopedGlobalRef<jclass> j_observer_class_;
const ScopedGlobalRef<jclass> j_stats_report_class_;
const jmethodID j_stats_report_ctor_;
const ScopedGlobalRef<jclass> j_value_class_;
const jmethodID j_value_ctor_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_STATSOBSERVER_JNI_H_

View File

@ -0,0 +1,136 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "webrtc/api/videosourceproxy.h"
#include "webrtc/media/engine/webrtcvideodecoderfactory.h"
#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/androidmediadecoder_jni.h"
#include "webrtc/sdk/android/src/jni/androidmediaencoder_jni.h"
#include "webrtc/sdk/android/src/jni/androidvideotracksource.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/pc/ownedfactoryandthreads.h"
#include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h"
#include "webrtc/sdk/android/src/jni/videodecoderfactorywrapper.h"
#include "webrtc/sdk/android/src/jni/videoencoderfactorywrapper.h"
namespace webrtc {
namespace jni {
// TODO(sakal): Remove this once MediaCodecVideoDecoder/Encoder are no longer
// used and all applications inject their own codecs.
// This is semi broken if someone wants to create multiple peerconnection
// factories.
static bool use_media_codec_encoder_factory;
static bool use_media_codec_decoder_factory;
cricket::WebRtcVideoEncoderFactory* CreateVideoEncoderFactory(
JNIEnv* jni,
jobject j_encoder_factory) {
use_media_codec_encoder_factory = j_encoder_factory == nullptr;
if (use_media_codec_encoder_factory) {
return new MediaCodecVideoEncoderFactory();
} else {
return new VideoEncoderFactoryWrapper(jni, j_encoder_factory);
}
}
cricket::WebRtcVideoDecoderFactory* CreateVideoDecoderFactory(
JNIEnv* jni,
jobject j_decoder_factory) {
use_media_codec_decoder_factory = j_decoder_factory == nullptr;
if (use_media_codec_decoder_factory) {
return new MediaCodecVideoDecoderFactory();
} else {
return new VideoDecoderFactoryWrapper(jni, j_decoder_factory);
}
}
jobject GetJavaSurfaceTextureHelper(
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper) {
return surface_texture_helper
? surface_texture_helper->GetJavaSurfaceTextureHelper()
: nullptr;
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_nativeCreateVideoSource,
JNIEnv* jni,
jclass,
jlong native_factory,
jobject j_surface_texture_helper,
jboolean is_screencast) {
OwnedFactoryAndThreads* factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
rtc::scoped_refptr<AndroidVideoTrackSource> source(
new rtc::RefCountedObject<AndroidVideoTrackSource>(
factory->signaling_thread(), jni, j_surface_texture_helper,
is_screencast));
rtc::scoped_refptr<VideoTrackSourceProxy> proxy_source =
VideoTrackSourceProxy::Create(factory->signaling_thread(),
factory->worker_thread(), source);
return (jlong)proxy_source.release();
}
JNI_FUNCTION_DECLARATION(jlong,
PeerConnectionFactory_nativeCreateVideoTrack,
JNIEnv* jni,
jclass,
jlong native_factory,
jstring id,
jlong native_source) {
rtc::scoped_refptr<PeerConnectionFactoryInterface> factory(
factoryFromJava(native_factory));
rtc::scoped_refptr<VideoTrackInterface> track(factory->CreateVideoTrack(
JavaToStdString(jni, id),
reinterpret_cast<VideoTrackSourceInterface*>(native_source)));
return (jlong)track.release();
}
JNI_FUNCTION_DECLARATION(
void,
PeerConnectionFactory_nativeSetVideoHwAccelerationOptions,
JNIEnv* jni,
jclass,
jlong native_factory,
jobject local_egl_context,
jobject remote_egl_context) {
OwnedFactoryAndThreads* owned_factory =
reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
jclass j_eglbase14_context_class =
FindClass(jni, "org/webrtc/EglBase14$Context");
MediaCodecVideoEncoderFactory* encoder_factory =
static_cast<MediaCodecVideoEncoderFactory*>(
owned_factory->encoder_factory());
if (use_media_codec_encoder_factory && encoder_factory &&
jni->IsInstanceOf(local_egl_context, j_eglbase14_context_class)) {
LOG(LS_INFO) << "Set EGL context for HW encoding.";
encoder_factory->SetEGLContext(jni, local_egl_context);
}
MediaCodecVideoDecoderFactory* decoder_factory =
static_cast<MediaCodecVideoDecoderFactory*>(
owned_factory->decoder_factory());
if (use_media_codec_decoder_factory && decoder_factory) {
LOG(LS_INFO) << "Set EGL context for HW decoding.";
decoder_factory->SetEGLContext(jni, remote_egl_context);
}
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,42 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_PC_VIDEO_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_PC_VIDEO_JNI_H_
#include <jni.h>
#include "webrtc/rtc_base/scoped_ref_ptr.h"
namespace cricket {
class WebRtcVideoEncoderFactory;
class WebRtcVideoDecoderFactory;
} // namespace cricket
namespace webrtc {
namespace jni {
class SurfaceTextureHelper;
cricket::WebRtcVideoEncoderFactory* CreateVideoEncoderFactory(
JNIEnv* jni,
jobject j_encoder_factory);
cricket::WebRtcVideoDecoderFactory* CreateVideoDecoderFactory(
JNIEnv* jni,
jobject j_decoder_factory);
jobject GetJavaSurfaceTextureHelper(
const rtc::scoped_refptr<SurfaceTextureHelper>& surface_texture_helper);
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_PC_VIDEO_JNI_H_

View File

@ -0,0 +1,85 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/surfacetexturehelper_jni.h"
#include "webrtc/rtc_base/bind.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace jni {
rtc::scoped_refptr<SurfaceTextureHelper> SurfaceTextureHelper::create(
JNIEnv* jni,
const char* thread_name,
jobject j_egl_context) {
jobject j_surface_texture_helper = jni->CallStaticObjectMethod(
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
GetStaticMethodID(jni, FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
"create",
"(Ljava/lang/String;Lorg/webrtc/EglBase$Context;)"
"Lorg/webrtc/SurfaceTextureHelper;"),
jni->NewStringUTF(thread_name), j_egl_context);
CHECK_EXCEPTION(jni)
<< "error during initialization of Java SurfaceTextureHelper";
if (IsNull(jni, j_surface_texture_helper))
return nullptr;
return new rtc::RefCountedObject<SurfaceTextureHelper>(
jni, j_surface_texture_helper);
}
SurfaceTextureHelper::SurfaceTextureHelper(JNIEnv* jni,
jobject j_surface_texture_helper)
: j_surface_texture_helper_(jni, j_surface_texture_helper),
j_return_texture_method_(
GetMethodID(jni,
FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
"returnTextureFrame",
"()V")) {
CHECK_EXCEPTION(jni) << "error during initialization of SurfaceTextureHelper";
}
SurfaceTextureHelper::~SurfaceTextureHelper() {
LOG(LS_INFO) << "SurfaceTextureHelper dtor";
JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->CallVoidMethod(
*j_surface_texture_helper_,
GetMethodID(jni, FindClass(jni, "org/webrtc/SurfaceTextureHelper"),
"dispose", "()V"));
CHECK_EXCEPTION(jni) << "error during SurfaceTextureHelper.dispose()";
}
jobject SurfaceTextureHelper::GetJavaSurfaceTextureHelper() const {
return *j_surface_texture_helper_;
}
void SurfaceTextureHelper::ReturnTextureFrame() const {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
jni->CallVoidMethod(*j_surface_texture_helper_, j_return_texture_method_);
CHECK_EXCEPTION(
jni) << "error during SurfaceTextureHelper.returnTextureFrame";
}
rtc::scoped_refptr<VideoFrameBuffer> SurfaceTextureHelper::CreateTextureFrame(
int width,
int height,
const NativeHandleImpl& native_handle) {
return new rtc::RefCountedObject<AndroidTextureBuffer>(
width, height, native_handle, *j_surface_texture_helper_,
rtc::Bind(&SurfaceTextureHelper::ReturnTextureFrame, this));
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,70 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_SURFACETEXTUREHELPER_JNI_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_SURFACETEXTUREHELPER_JNI_H_
#include <jni.h>
#include "webrtc/api/video/video_frame_buffer.h"
#include "webrtc/rtc_base/refcount.h"
#include "webrtc/rtc_base/scoped_ref_ptr.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/native_handle_impl.h"
namespace webrtc {
namespace jni {
// Helper class to create and synchronize access to an Android SurfaceTexture.
// It is used for creating VideoFrameBuffers from a SurfaceTexture when
// the SurfaceTexture has been updated.
// When the VideoFrameBuffer is released, this class returns the buffer to the
// java SurfaceTextureHelper so it can be updated safely. The VideoFrameBuffer
// can be released on an arbitrary thread.
// SurfaceTextureHelper is reference counted to make sure that it is not
// destroyed while a VideoFrameBuffer is in use.
// This class is the C++ counterpart of the java class SurfaceTextureHelper.
// It owns the corresponding java object, and calls the java dispose
// method when destroyed.
// Usage:
// 1. Create an instance of this class.
// 2. Get the Java SurfaceTextureHelper with GetJavaSurfaceTextureHelper().
// 3. Register a listener to the Java SurfaceListener and start producing
// new buffers.
// 4. Call CreateTextureFrame to wrap the Java texture in a VideoFrameBuffer.
class SurfaceTextureHelper : public rtc::RefCountInterface {
public:
// Might return null if creating the Java SurfaceTextureHelper fails.
static rtc::scoped_refptr<SurfaceTextureHelper> create(
JNIEnv* jni, const char* thread_name, jobject j_egl_context);
jobject GetJavaSurfaceTextureHelper() const;
rtc::scoped_refptr<VideoFrameBuffer> CreateTextureFrame(
int width,
int height,
const NativeHandleImpl& native_handle);
// May be called on arbitrary thread.
void ReturnTextureFrame() const;
protected:
~SurfaceTextureHelper();
SurfaceTextureHelper(JNIEnv* jni, jobject j_surface_texture_helper);
private:
const ScopedGlobalRef<jobject> j_surface_texture_helper_;
const jmethodID j_return_texture_method_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_SURFACETEXTUREHELPER_JNI_H_

View File

@ -0,0 +1,198 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "webrtc/api/video/video_frame.h"
#include "webrtc/media/base/videosinkinterface.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/native_handle_impl.h"
namespace webrtc {
namespace jni {
// Wrapper dispatching rtc::VideoSinkInterface to a Java VideoRenderer
// instance.
class JavaVideoRendererWrapper : public rtc::VideoSinkInterface<VideoFrame> {
public:
JavaVideoRendererWrapper(JNIEnv* jni, jobject j_callbacks)
: j_callbacks_(jni, j_callbacks),
j_render_frame_id_(
GetMethodID(jni,
GetObjectClass(jni, j_callbacks),
"renderFrame",
"(Lorg/webrtc/VideoRenderer$I420Frame;)V")),
j_frame_class_(jni,
FindClass(jni, "org/webrtc/VideoRenderer$I420Frame")),
j_i420_frame_ctor_id_(GetMethodID(jni,
*j_frame_class_,
"<init>",
"(III[I[Ljava/nio/ByteBuffer;J)V")),
j_texture_frame_ctor_id_(
GetMethodID(jni, *j_frame_class_, "<init>", "(IIII[FJ)V")),
j_byte_buffer_class_(jni, FindClass(jni, "java/nio/ByteBuffer")) {
CHECK_EXCEPTION(jni);
}
virtual ~JavaVideoRendererWrapper() {}
void OnFrame(const VideoFrame& video_frame) override {
ScopedLocalRefFrame local_ref_frame(jni());
jobject j_frame;
if (video_frame.video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kNative) {
AndroidVideoFrameBuffer* android_buffer =
static_cast<AndroidVideoFrameBuffer*>(
video_frame.video_frame_buffer().get());
switch (android_buffer->android_type()) {
case AndroidVideoFrameBuffer::AndroidType::kTextureBuffer:
j_frame = ToJavaTextureFrame(&video_frame);
break;
case AndroidVideoFrameBuffer::AndroidType::kJavaBuffer:
j_frame = static_cast<AndroidVideoBuffer*>(android_buffer)
->ToJavaI420Frame(jni(), video_frame.rotation());
break;
default:
RTC_NOTREACHED();
}
} else {
j_frame = ToJavaI420Frame(&video_frame);
}
// |j_callbacks_| is responsible for releasing |j_frame| with
// VideoRenderer.renderFrameDone().
jni()->CallVoidMethod(*j_callbacks_, j_render_frame_id_, j_frame);
CHECK_EXCEPTION(jni());
}
private:
// Make a shallow copy of |frame| to be used with Java. The callee has
// ownership of the frame, and the frame should be released with
// VideoRenderer.releaseNativeFrame().
static jlong javaShallowCopy(const VideoFrame* frame) {
return jlongFromPointer(new VideoFrame(*frame));
}
// Return a VideoRenderer.I420Frame referring to the data in |frame|.
jobject ToJavaI420Frame(const VideoFrame* frame) {
jintArray strides = jni()->NewIntArray(3);
jint* strides_array = jni()->GetIntArrayElements(strides, NULL);
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
frame->video_frame_buffer()->ToI420();
strides_array[0] = i420_buffer->StrideY();
strides_array[1] = i420_buffer->StrideU();
strides_array[2] = i420_buffer->StrideV();
jni()->ReleaseIntArrayElements(strides, strides_array, 0);
jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
jobject y_buffer = jni()->NewDirectByteBuffer(
const_cast<uint8_t*>(i420_buffer->DataY()),
i420_buffer->StrideY() * i420_buffer->height());
size_t chroma_height = i420_buffer->ChromaHeight();
jobject u_buffer =
jni()->NewDirectByteBuffer(const_cast<uint8_t*>(i420_buffer->DataU()),
i420_buffer->StrideU() * chroma_height);
jobject v_buffer =
jni()->NewDirectByteBuffer(const_cast<uint8_t*>(i420_buffer->DataV()),
i420_buffer->StrideV() * chroma_height);
jni()->SetObjectArrayElement(planes, 0, y_buffer);
jni()->SetObjectArrayElement(planes, 1, u_buffer);
jni()->SetObjectArrayElement(planes, 2, v_buffer);
return jni()->NewObject(*j_frame_class_, j_i420_frame_ctor_id_,
frame->width(), frame->height(),
static_cast<int>(frame->rotation()), strides,
planes, javaShallowCopy(frame));
}
// Return a VideoRenderer.I420Frame referring texture object in |frame|.
jobject ToJavaTextureFrame(const VideoFrame* frame) {
NativeHandleImpl handle =
static_cast<AndroidTextureBuffer*>(frame->video_frame_buffer().get())
->native_handle_impl();
jfloatArray sampling_matrix = handle.sampling_matrix.ToJava(jni());
return jni()->NewObject(
*j_frame_class_, j_texture_frame_ctor_id_, frame->width(),
frame->height(), static_cast<int>(frame->rotation()),
handle.oes_texture_id, sampling_matrix, javaShallowCopy(frame));
}
JNIEnv* jni() { return AttachCurrentThreadIfNeeded(); }
ScopedGlobalRef<jobject> j_callbacks_;
jmethodID j_render_frame_id_;
ScopedGlobalRef<jclass> j_frame_class_;
jmethodID j_i420_frame_ctor_id_;
jmethodID j_texture_frame_ctor_id_;
ScopedGlobalRef<jclass> j_byte_buffer_class_;
};
JNI_FUNCTION_DECLARATION(void,
VideoRenderer_freeWrappedVideoRenderer,
JNIEnv*,
jclass,
jlong j_p) {
delete reinterpret_cast<JavaVideoRendererWrapper*>(j_p);
}
JNI_FUNCTION_DECLARATION(void,
VideoRenderer_releaseNativeFrame,
JNIEnv* jni,
jclass,
jlong j_frame_ptr) {
delete reinterpret_cast<const VideoFrame*>(j_frame_ptr);
}
JNI_FUNCTION_DECLARATION(jlong,
VideoRenderer_nativeWrapVideoRenderer,
JNIEnv* jni,
jclass,
jobject j_callbacks) {
std::unique_ptr<JavaVideoRendererWrapper> renderer(
new JavaVideoRendererWrapper(jni, j_callbacks));
return (jlong)renderer.release();
}
JNI_FUNCTION_DECLARATION(void,
VideoRenderer_nativeCopyPlane,
JNIEnv* jni,
jclass,
jobject j_src_buffer,
jint width,
jint height,
jint src_stride,
jobject j_dst_buffer,
jint dst_stride) {
size_t src_size = jni->GetDirectBufferCapacity(j_src_buffer);
size_t dst_size = jni->GetDirectBufferCapacity(j_dst_buffer);
RTC_CHECK(src_stride >= width) << "Wrong source stride " << src_stride;
RTC_CHECK(dst_stride >= width) << "Wrong destination stride " << dst_stride;
RTC_CHECK(src_size >= src_stride * height)
<< "Insufficient source buffer capacity " << src_size;
RTC_CHECK(dst_size >= dst_stride * height)
<< "Insufficient destination buffer capacity " << dst_size;
uint8_t* src =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_src_buffer));
uint8_t* dst =
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_dst_buffer));
if (src_stride == dst_stride) {
memcpy(dst, src, src_stride * height);
} else {
for (int i = 0; i < height; i++) {
memcpy(dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,46 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/videodecoderfactorywrapper.h"
#include "webrtc/api/video_codecs/video_decoder.h"
#include "webrtc/common_types.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/videodecoderwrapper.h"
namespace webrtc {
namespace jni {
VideoDecoderFactoryWrapper::VideoDecoderFactoryWrapper(JNIEnv* jni,
jobject decoder_factory)
: decoder_factory_(jni, decoder_factory) {
jclass decoder_factory_class = jni->GetObjectClass(*decoder_factory_);
create_decoder_method_ =
jni->GetMethodID(decoder_factory_class, "createDecoder",
"(Ljava/lang/String;)Lorg/webrtc/VideoDecoder;");
}
VideoDecoder* VideoDecoderFactoryWrapper::CreateVideoDecoder(
VideoCodecType type) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
const char* type_payload = CodecTypeToPayloadString(type);
jstring name = jni->NewStringUTF(type_payload);
jobject decoder =
jni->CallObjectMethod(*decoder_factory_, create_decoder_method_, name);
return decoder != nullptr ? new VideoDecoderWrapper(jni, decoder) : nullptr;
}
void VideoDecoderFactoryWrapper::DestroyVideoDecoder(VideoDecoder* decoder) {
delete decoder;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,41 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_VIDEODECODERFACTORYWRAPPER_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_VIDEODECODERFACTORYWRAPPER_H_
#include <jni.h>
#include "webrtc/media/engine/webrtcvideodecoderfactory.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Wrapper for Java VideoDecoderFactory class. Delegates method calls through
// JNI and wraps the decoder inside VideoDecoderWrapper.
class VideoDecoderFactoryWrapper : public cricket::WebRtcVideoDecoderFactory {
public:
VideoDecoderFactoryWrapper(JNIEnv* jni, jobject decoder_factory);
// Caller takes the ownership of the returned object and it should be released
// by calling DestroyVideoDecoder().
VideoDecoder* CreateVideoDecoder(VideoCodecType type) override;
void DestroyVideoDecoder(VideoDecoder* decoder) override;
private:
const ScopedGlobalRef<jobject> decoder_factory_;
jmethodID create_decoder_method_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_VIDEODECODERFACTORYWRAPPER_H_

View File

@ -0,0 +1,315 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/videodecoderwrapper.h"
#include "webrtc/api/video/video_frame.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
#include "webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/timeutils.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace jni {
VideoDecoderWrapper::VideoDecoderWrapper(JNIEnv* jni, jobject decoder)
: android_video_buffer_factory_(jni),
decoder_(jni, decoder),
encoded_image_class_(jni, FindClass(jni, "org/webrtc/EncodedImage")),
frame_type_class_(jni,
FindClass(jni, "org/webrtc/EncodedImage$FrameType")),
settings_class_(jni, FindClass(jni, "org/webrtc/VideoDecoder$Settings")),
video_frame_class_(jni, FindClass(jni, "org/webrtc/VideoFrame")),
video_codec_status_class_(jni,
FindClass(jni, "org/webrtc/VideoCodecStatus")),
integer_class_(jni, jni->FindClass("java/lang/Integer")) {
encoded_image_constructor_ =
jni->GetMethodID(*encoded_image_class_, "<init>",
"(Ljava/nio/ByteBuffer;IIJLorg/webrtc/"
"EncodedImage$FrameType;IZLjava/lang/Integer;)V");
settings_constructor_ =
jni->GetMethodID(*settings_class_, "<init>", "(III)V");
empty_frame_field_ = jni->GetStaticFieldID(
*frame_type_class_, "EmptyFrame", "Lorg/webrtc/EncodedImage$FrameType;");
video_frame_key_field_ =
jni->GetStaticFieldID(*frame_type_class_, "VideoFrameKey",
"Lorg/webrtc/EncodedImage$FrameType;");
video_frame_delta_field_ =
jni->GetStaticFieldID(*frame_type_class_, "VideoFrameDelta",
"Lorg/webrtc/EncodedImage$FrameType;");
video_frame_get_timestamp_ns_method_ =
jni->GetMethodID(*video_frame_class_, "getTimestampNs", "()J");
jclass decoder_class = jni->GetObjectClass(decoder);
init_decode_method_ =
jni->GetMethodID(decoder_class, "initDecode",
"(Lorg/webrtc/VideoDecoder$Settings;Lorg/webrtc/"
"VideoDecoder$Callback;)Lorg/webrtc/VideoCodecStatus;");
release_method_ = jni->GetMethodID(decoder_class, "release",
"()Lorg/webrtc/VideoCodecStatus;");
decode_method_ = jni->GetMethodID(decoder_class, "decode",
"(Lorg/webrtc/EncodedImage;Lorg/webrtc/"
"VideoDecoder$DecodeInfo;)Lorg/webrtc/"
"VideoCodecStatus;");
get_prefers_late_decoding_method_ =
jni->GetMethodID(decoder_class, "getPrefersLateDecoding", "()Z");
get_implementation_name_method_ = jni->GetMethodID(
decoder_class, "getImplementationName", "()Ljava/lang/String;");
get_number_method_ =
jni->GetMethodID(*video_codec_status_class_, "getNumber", "()I");
integer_constructor_ = jni->GetMethodID(*integer_class_, "<init>", "(I)V");
int_value_method_ = jni->GetMethodID(*integer_class_, "intValue", "()I");
initialized_ = false;
// QP parsing starts enabled and we disable it if the decoder provides frames.
qp_parsing_enabled_ = true;
}
int32_t VideoDecoderWrapper::InitDecode(const VideoCodec* codec_settings,
int32_t number_of_cores) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
codec_settings_ = *codec_settings;
number_of_cores_ = number_of_cores;
return InitDecodeInternal(jni);
}
int32_t VideoDecoderWrapper::InitDecodeInternal(JNIEnv* jni) {
jobject settings =
jni->NewObject(*settings_class_, settings_constructor_, number_of_cores_,
codec_settings_.width, codec_settings_.height);
jclass callback_class =
FindClass(jni, "org/webrtc/VideoDecoderWrapperCallback");
jmethodID callback_constructor =
jni->GetMethodID(callback_class, "<init>", "(J)V");
jobject callback = jni->NewObject(callback_class, callback_constructor,
jlongFromPointer(this));
jobject ret =
jni->CallObjectMethod(*decoder_, init_decode_method_, settings, callback);
if (jni->CallIntMethod(ret, get_number_method_) == WEBRTC_VIDEO_CODEC_OK) {
initialized_ = true;
}
// The decoder was reinitialized so re-enable the QP parsing in case it stops
// providing QP values.
qp_parsing_enabled_ = true;
return HandleReturnCode(jni, ret);
}
int32_t VideoDecoderWrapper::Decode(
const EncodedImage& input_image,
bool missing_frames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codec_specific_info,
int64_t render_time_ms) {
if (!initialized_) {
// Most likely initializing the codec failed.
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
}
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
FrameExtraInfo frame_extra_info;
frame_extra_info.capture_time_ns =
input_image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec;
frame_extra_info.timestamp_rtp = input_image._timeStamp;
frame_extra_info.qp =
qp_parsing_enabled_ ? ParseQP(input_image) : rtc::Optional<uint8_t>();
frame_extra_infos_.push_back(frame_extra_info);
jobject jinput_image =
ConvertEncodedImageToJavaEncodedImage(jni, input_image);
jobject ret =
jni->CallObjectMethod(*decoder_, decode_method_, jinput_image, nullptr);
return HandleReturnCode(jni, ret);
}
int32_t VideoDecoderWrapper::RegisterDecodeCompleteCallback(
DecodedImageCallback* callback) {
callback_ = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t VideoDecoderWrapper::Release() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject ret = jni->CallObjectMethod(*decoder_, release_method_);
frame_extra_infos_.clear();
initialized_ = false;
return HandleReturnCode(jni, ret);
}
bool VideoDecoderWrapper::PrefersLateDecoding() const {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
return jni->CallBooleanMethod(*decoder_, get_prefers_late_decoding_method_);
}
const char* VideoDecoderWrapper::ImplementationName() const {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jstring jname = reinterpret_cast<jstring>(
jni->CallObjectMethod(*decoder_, get_implementation_name_method_));
return JavaToStdString(jni, jname).c_str();
}
void VideoDecoderWrapper::OnDecodedFrame(JNIEnv* jni,
jobject jframe,
jobject jdecode_time_ms,
jobject jqp) {
const jlong capture_time_ns =
jni->CallLongMethod(jframe, video_frame_get_timestamp_ns_method_);
FrameExtraInfo frame_extra_info;
do {
if (frame_extra_infos_.empty()) {
LOG(LS_WARNING) << "Java decoder produced an unexpected frame.";
return;
}
frame_extra_info = frame_extra_infos_.front();
frame_extra_infos_.pop_front();
// If the decoder might drop frames so iterate through the queue until we
// find a matching timestamp.
} while (frame_extra_info.capture_time_ns != capture_time_ns);
VideoFrame frame = android_video_buffer_factory_.CreateFrame(
jni, jframe, frame_extra_info.timestamp_rtp);
rtc::Optional<int32_t> decoding_time_ms;
if (jdecode_time_ms != nullptr) {
decoding_time_ms = rtc::Optional<int32_t>(
jni->CallIntMethod(jdecode_time_ms, int_value_method_));
}
rtc::Optional<uint8_t> qp;
if (jqp != nullptr) {
qp = rtc::Optional<uint8_t>(jni->CallIntMethod(jqp, int_value_method_));
// The decoder provides QP values itself, no need to parse the bitstream.
qp_parsing_enabled_ = false;
} else {
qp = frame_extra_info.qp;
// The decoder doesn't provide QP values, ensure bitstream parsing is
// enabled.
qp_parsing_enabled_ = true;
}
callback_->Decoded(frame, decoding_time_ms, qp);
}
jobject VideoDecoderWrapper::ConvertEncodedImageToJavaEncodedImage(
JNIEnv* jni,
const EncodedImage& image) {
jobject buffer = jni->NewDirectByteBuffer(image._buffer, image._length);
jfieldID frame_type_field;
switch (image._frameType) {
case kEmptyFrame:
frame_type_field = empty_frame_field_;
break;
case kVideoFrameKey:
frame_type_field = video_frame_key_field_;
break;
case kVideoFrameDelta:
frame_type_field = video_frame_delta_field_;
break;
default:
RTC_NOTREACHED();
return nullptr;
}
jobject frame_type =
jni->GetStaticObjectField(*frame_type_class_, frame_type_field);
jobject qp = nullptr;
if (image.qp_ != -1) {
qp = jni->NewObject(*integer_class_, integer_constructor_, image.qp_);
}
return jni->NewObject(
*encoded_image_class_, encoded_image_constructor_, buffer,
static_cast<jint>(image._encodedWidth),
static_cast<jint>(image._encodedHeight),
static_cast<jlong>(image.capture_time_ms_ * rtc::kNumNanosecsPerMillisec),
frame_type, static_cast<jint>(image.rotation_), image._completeFrame, qp);
}
int32_t VideoDecoderWrapper::HandleReturnCode(JNIEnv* jni, jobject code) {
int32_t value = jni->CallIntMethod(code, get_number_method_);
if (value < 0) { // Any errors are represented by negative values.
// Reset the codec.
if (Release() == WEBRTC_VIDEO_CODEC_OK) {
InitDecodeInternal(jni);
}
LOG(LS_WARNING) << "Falling back to software decoder.";
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
} else {
return value;
}
}
rtc::Optional<uint8_t> VideoDecoderWrapper::ParseQP(
const EncodedImage& input_image) {
if (input_image.qp_ != -1) {
return rtc::Optional<uint8_t>(input_image.qp_);
}
rtc::Optional<uint8_t> qp;
switch (codec_settings_.codecType) {
case kVideoCodecVP8: {
int qp_int;
if (vp8::GetQp(input_image._buffer, input_image._length, &qp_int)) {
qp = rtc::Optional<uint8_t>(qp_int);
}
break;
}
case kVideoCodecVP9: {
int qp_int;
if (vp9::GetQp(input_image._buffer, input_image._length, &qp_int)) {
qp = rtc::Optional<uint8_t>(qp_int);
}
break;
}
case kVideoCodecH264: {
h264_bitstream_parser_.ParseBitstream(input_image._buffer,
input_image._length);
int qp_int;
if (h264_bitstream_parser_.GetLastSliceQp(&qp_int)) {
qp = rtc::Optional<uint8_t>(qp_int);
}
break;
}
default:
break; // Default is to not provide QP.
}
return qp;
}
JNI_FUNCTION_DECLARATION(void,
VideoDecoderWrapperCallback_nativeOnDecodedFrame,
JNIEnv* jni,
jclass,
jlong jnative_decoder,
jobject jframe,
jobject jdecode_time_ms,
jobject jqp) {
VideoDecoderWrapper* native_decoder =
reinterpret_cast<VideoDecoderWrapper*>(jnative_decoder);
native_decoder->OnDecodedFrame(jni, jframe, jdecode_time_ms, jqp);
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,121 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_VIDEODECODERWRAPPER_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_VIDEODECODERWRAPPER_H_
#include <jni.h>
#include <deque>
#include "webrtc/api/video_codecs/video_decoder.h"
#include "webrtc/common_video/h264/h264_bitstream_parser.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
#include "webrtc/sdk/android/src/jni/native_handle_impl.h"
namespace webrtc {
namespace jni {
// Wraps a Java decoder and delegates all calls to it. Passes
// VideoDecoderWrapperCallback to the decoder on InitDecode. Wraps the received
// frames to AndroidVideoBuffer.
class VideoDecoderWrapper : public VideoDecoder {
public:
VideoDecoderWrapper(JNIEnv* jni, jobject decoder);
int32_t InitDecode(const VideoCodec* codec_settings,
int32_t number_of_cores) override;
int32_t Decode(const EncodedImage& input_image,
bool missing_frames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codec_specific_info,
int64_t render_time_ms) override;
int32_t RegisterDecodeCompleteCallback(
DecodedImageCallback* callback) override;
int32_t Release() override;
// Returns true if the decoder prefer to decode frames late.
// That is, it can not decode infinite number of frames before the decoded
// frame is consumed.
bool PrefersLateDecoding() const override;
const char* ImplementationName() const override;
// Wraps the frame to a AndroidVideoBuffer and passes it to the callback.
void OnDecodedFrame(JNIEnv* jni,
jobject jframe,
jobject jdecode_time_ms,
jobject jqp);
private:
struct FrameExtraInfo {
uint64_t capture_time_ns; // Used as an identifier of the frame.
uint32_t timestamp_rtp;
rtc::Optional<uint8_t> qp;
};
int32_t InitDecodeInternal(JNIEnv* jni);
// Takes Java VideoCodecStatus, handles it and returns WEBRTC_VIDEO_CODEC_*
// status code.
int32_t HandleReturnCode(JNIEnv* jni, jobject code);
rtc::Optional<uint8_t> ParseQP(const EncodedImage& input_image);
VideoCodec codec_settings_;
int32_t number_of_cores_;
bool initialized_;
AndroidVideoBufferFactory android_video_buffer_factory_;
std::deque<FrameExtraInfo> frame_extra_infos_;
bool qp_parsing_enabled_;
H264BitstreamParser h264_bitstream_parser_;
DecodedImageCallback* callback_;
const ScopedGlobalRef<jobject> decoder_;
const ScopedGlobalRef<jclass> encoded_image_class_;
const ScopedGlobalRef<jclass> frame_type_class_;
const ScopedGlobalRef<jclass> settings_class_;
const ScopedGlobalRef<jclass> video_frame_class_;
const ScopedGlobalRef<jclass> video_codec_status_class_;
const ScopedGlobalRef<jclass> integer_class_;
jmethodID encoded_image_constructor_;
jmethodID settings_constructor_;
jfieldID empty_frame_field_;
jfieldID video_frame_key_field_;
jfieldID video_frame_delta_field_;
jmethodID video_frame_get_timestamp_ns_method_;
jmethodID init_decode_method_;
jmethodID release_method_;
jmethodID decode_method_;
jmethodID get_prefers_late_decoding_method_;
jmethodID get_implementation_name_method_;
jmethodID get_number_method_;
jmethodID integer_constructor_;
jmethodID int_value_method_;
jobject ConvertEncodedImageToJavaEncodedImage(JNIEnv* jni,
const EncodedImage& image);
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_VIDEODECODERWRAPPER_H_

View File

@ -0,0 +1,103 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/videoencoderfactorywrapper.h"
#include "webrtc/api/video_codecs/video_encoder.h"
#include "webrtc/common_types.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
#include "webrtc/sdk/android/src/jni/videoencoderwrapper.h"
namespace webrtc {
namespace jni {
VideoEncoderFactoryWrapper::VideoEncoderFactoryWrapper(JNIEnv* jni,
jobject encoder_factory)
: video_codec_info_class_(jni, FindClass(jni, "org/webrtc/VideoCodecInfo")),
hash_map_class_(jni, jni->FindClass("java/util/HashMap")),
encoder_factory_(jni, encoder_factory) {
jclass encoder_factory_class = jni->GetObjectClass(*encoder_factory_);
create_encoder_method_ = jni->GetMethodID(
encoder_factory_class, "createEncoder",
"(Lorg/webrtc/VideoCodecInfo;)Lorg/webrtc/VideoEncoder;");
get_supported_codecs_method_ =
jni->GetMethodID(encoder_factory_class, "getSupportedCodecs",
"()[Lorg/webrtc/VideoCodecInfo;");
video_codec_info_constructor_ =
jni->GetMethodID(*video_codec_info_class_, "<init>",
"(ILjava/lang/String;Ljava/util/Map;)V");
payload_field_ = jni->GetFieldID(*video_codec_info_class_, "payload", "I");
name_field_ =
jni->GetFieldID(*video_codec_info_class_, "name", "Ljava/lang/String;");
params_field_ =
jni->GetFieldID(*video_codec_info_class_, "params", "Ljava/util/Map;");
hash_map_constructor_ = jni->GetMethodID(*hash_map_class_, "<init>", "()V");
put_method_ = jni->GetMethodID(
*hash_map_class_, "put",
"(Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;");
supported_codecs_ = GetSupportedCodecs(jni);
}
VideoEncoder* VideoEncoderFactoryWrapper::CreateVideoEncoder(
const cricket::VideoCodec& codec) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject j_codec_info = ToJavaCodecInfo(jni, codec);
jobject encoder = jni->CallObjectMethod(*encoder_factory_,
create_encoder_method_, j_codec_info);
return encoder != nullptr ? new VideoEncoderWrapper(jni, encoder) : nullptr;
}
jobject VideoEncoderFactoryWrapper::ToJavaCodecInfo(
JNIEnv* jni,
const cricket::VideoCodec& codec) {
jobject j_params = jni->NewObject(*hash_map_class_, hash_map_constructor_);
for (auto const& param : codec.params) {
jni->CallObjectMethod(j_params, put_method_,
JavaStringFromStdString(jni, param.first),
JavaStringFromStdString(jni, param.second));
}
return jni->NewObject(*video_codec_info_class_, video_codec_info_constructor_,
codec.id, JavaStringFromStdString(jni, codec.name),
j_params);
}
std::vector<cricket::VideoCodec> VideoEncoderFactoryWrapper::GetSupportedCodecs(
JNIEnv* jni) const {
const jobjectArray j_supported_codecs = static_cast<jobjectArray>(
jni->CallObjectMethod(*encoder_factory_, get_supported_codecs_method_));
const jsize supported_codecs_count = jni->GetArrayLength(j_supported_codecs);
std::vector<cricket::VideoCodec> supported_codecs;
supported_codecs.resize(supported_codecs_count);
for (jsize i = 0; i < supported_codecs_count; i++) {
jobject j_supported_codec =
jni->GetObjectArrayElement(j_supported_codecs, i);
int payload = jni->GetIntField(j_supported_codec, payload_field_);
jobject j_params = jni->GetObjectField(j_supported_codec, params_field_);
jstring j_name = static_cast<jstring>(
jni->GetObjectField(j_supported_codec, name_field_));
supported_codecs[i] =
cricket::VideoCodec(payload, JavaToStdString(jni, j_name));
supported_codecs[i].params = JavaToStdMapStrings(jni, j_params);
}
return supported_codecs;
}
void VideoEncoderFactoryWrapper::DestroyVideoEncoder(VideoEncoder* encoder) {
delete encoder;
}
} // namespace jni
} // namespace webrtc

View File

@ -0,0 +1,65 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_SDK_ANDROID_SRC_JNI_VIDEOENCODERFACTORYWRAPPER_H_
#define WEBRTC_SDK_ANDROID_SRC_JNI_VIDEOENCODERFACTORYWRAPPER_H_
#include <jni.h>
#include <vector>
#include "webrtc/media/engine/webrtcvideoencoderfactory.h"
#include "webrtc/sdk/android/src/jni/jni_helpers.h"
namespace webrtc {
namespace jni {
// Wrapper for Java VideoEncoderFactory class. Delegates method calls through
// JNI and wraps the encoder inside VideoEncoderWrapper.
class VideoEncoderFactoryWrapper : public cricket::WebRtcVideoEncoderFactory {
public:
VideoEncoderFactoryWrapper(JNIEnv* jni, jobject encoder_factory);
// Caller takes the ownership of the returned object and it should be released
// by calling DestroyVideoEncoder().
VideoEncoder* CreateVideoEncoder(const cricket::VideoCodec& codec) override;
// Returns a list of supported codecs in order of preference.
const std::vector<cricket::VideoCodec>& supported_codecs() const override {
return supported_codecs_;
}
void DestroyVideoEncoder(VideoEncoder* encoder) override;
private:
std::vector<cricket::VideoCodec> GetSupportedCodecs(JNIEnv* jni) const;
jobject ToJavaCodecInfo(JNIEnv* jni, const cricket::VideoCodec& codec);
const ScopedGlobalRef<jclass> video_codec_info_class_;
const ScopedGlobalRef<jclass> hash_map_class_;
const ScopedGlobalRef<jobject> encoder_factory_;
jmethodID create_encoder_method_;
jmethodID get_supported_codecs_method_;
jmethodID video_codec_info_constructor_;
jfieldID payload_field_;
jfieldID name_field_;
jfieldID params_field_;
jmethodID hash_map_constructor_;
jmethodID put_method_;
std::vector<cricket::VideoCodec> supported_codecs_;
};
} // namespace jni
} // namespace webrtc
#endif // WEBRTC_SDK_ANDROID_SRC_JNI_VIDEOENCODERFACTORYWRAPPER_H_

View File

@ -0,0 +1,490 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/sdk/android/src/jni/videoencoderwrapper.h"
#include <utility>
#include "webrtc/common_video/h264/h264_common.h"
#include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/include/video_error_codes.h"
#include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
#include "webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/random.h"
#include "webrtc/rtc_base/timeutils.h"
#include "webrtc/sdk/android/src/jni/classreferenceholder.h"
namespace webrtc {
namespace jni {
static const int kMaxJavaEncoderResets = 3;
VideoEncoderWrapper::VideoEncoderWrapper(JNIEnv* jni, jobject j_encoder)
: encoder_(jni, j_encoder),
settings_class_(jni, FindClass(jni, "org/webrtc/VideoEncoder$Settings")),
encode_info_class_(jni,
FindClass(jni, "org/webrtc/VideoEncoder$EncodeInfo")),
frame_type_class_(jni,
FindClass(jni, "org/webrtc/EncodedImage$FrameType")),
bitrate_allocation_class_(
jni,
FindClass(jni, "org/webrtc/VideoEncoder$BitrateAllocation")),
int_array_class_(jni, jni->FindClass("[I")),
video_frame_factory_(jni) {
jclass encoder_class = FindClass(jni, "org/webrtc/VideoEncoder");
init_encode_method_ =
jni->GetMethodID(encoder_class, "initEncode",
"(Lorg/webrtc/VideoEncoder$Settings;Lorg/webrtc/"
"VideoEncoder$Callback;)Lorg/webrtc/VideoCodecStatus;");
release_method_ = jni->GetMethodID(encoder_class, "release",
"()Lorg/webrtc/VideoCodecStatus;");
encode_method_ = jni->GetMethodID(
encoder_class, "encode",
"(Lorg/webrtc/VideoFrame;Lorg/webrtc/"
"VideoEncoder$EncodeInfo;)Lorg/webrtc/VideoCodecStatus;");
set_channel_parameters_method_ =
jni->GetMethodID(encoder_class, "setChannelParameters",
"(SJ)Lorg/webrtc/VideoCodecStatus;");
set_rate_allocation_method_ =
jni->GetMethodID(encoder_class, "setRateAllocation",
"(Lorg/webrtc/VideoEncoder$BitrateAllocation;I)Lorg/"
"webrtc/VideoCodecStatus;");
get_scaling_settings_method_ =
jni->GetMethodID(encoder_class, "getScalingSettings",
"()Lorg/webrtc/VideoEncoder$ScalingSettings;");
get_implementation_name_method_ = jni->GetMethodID(
encoder_class, "getImplementationName", "()Ljava/lang/String;");
settings_constructor_ =
jni->GetMethodID(*settings_class_, "<init>", "(IIIIIZ)V");
encode_info_constructor_ = jni->GetMethodID(
*encode_info_class_, "<init>", "([Lorg/webrtc/EncodedImage$FrameType;)V");
frame_type_from_native_method_ =
jni->GetStaticMethodID(*frame_type_class_, "fromNative",
"(I)Lorg/webrtc/EncodedImage$FrameType;");
bitrate_allocation_constructor_ =
jni->GetMethodID(*bitrate_allocation_class_, "<init>", "([[I)V");
jclass video_codec_status_class =
FindClass(jni, "org/webrtc/VideoCodecStatus");
get_number_method_ =
jni->GetMethodID(video_codec_status_class, "getNumber", "()I");
jclass integer_class = jni->FindClass("java/lang/Integer");
int_value_method_ = jni->GetMethodID(integer_class, "intValue", "()I");
jclass scaling_settings_class =
FindClass(jni, "org/webrtc/VideoEncoder$ScalingSettings");
scaling_settings_on_field_ =
jni->GetFieldID(scaling_settings_class, "on", "Z");
scaling_settings_low_field_ =
jni->GetFieldID(scaling_settings_class, "low", "Ljava/lang/Integer;");
scaling_settings_high_field_ =
jni->GetFieldID(scaling_settings_class, "high", "Ljava/lang/Integer;");
implementation_name_ = GetImplementationName(jni);
encoder_queue_ = rtc::TaskQueue::Current();
initialized_ = false;
num_resets_ = 0;
Random random(rtc::TimeMicros());
picture_id_ = random.Rand<uint16_t>() & 0x7FFF;
tl0_pic_idx_ = random.Rand<uint8_t>();
}
int32_t VideoEncoderWrapper::InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
size_t max_payload_size) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
number_of_cores_ = number_of_cores;
codec_settings_ = *codec_settings;
num_resets_ = 0;
return InitEncodeInternal(jni);
}
int32_t VideoEncoderWrapper::InitEncodeInternal(JNIEnv* jni) {
bool automatic_resize_on;
switch (codec_settings_.codecType) {
case kVideoCodecVP8:
automatic_resize_on = codec_settings_.VP8()->automaticResizeOn;
break;
case kVideoCodecVP9:
automatic_resize_on = codec_settings_.VP9()->automaticResizeOn;
break;
default:
automatic_resize_on = true;
}
jobject settings =
jni->NewObject(*settings_class_, settings_constructor_, number_of_cores_,
codec_settings_.width, codec_settings_.height,
codec_settings_.startBitrate, codec_settings_.maxFramerate,
automatic_resize_on);
jclass callback_class =
FindClass(jni, "org/webrtc/VideoEncoderWrapperCallback");
jmethodID callback_constructor =
jni->GetMethodID(callback_class, "<init>", "(J)V");
jobject callback = jni->NewObject(callback_class, callback_constructor,
jlongFromPointer(this));
jobject ret =
jni->CallObjectMethod(*encoder_, init_encode_method_, settings, callback);
if (jni->CallIntMethod(ret, get_number_method_) == WEBRTC_VIDEO_CODEC_OK) {
initialized_ = true;
}
return HandleReturnCode(jni, ret);
}
int32_t VideoEncoderWrapper::RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) {
callback_ = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t VideoEncoderWrapper::Release() {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject ret = jni->CallObjectMethod(*encoder_, release_method_);
frame_extra_infos_.clear();
initialized_ = false;
return HandleReturnCode(jni, ret);
}
int32_t VideoEncoderWrapper::Encode(
const VideoFrame& frame,
const CodecSpecificInfo* /* codec_specific_info */,
const std::vector<FrameType>* frame_types) {
if (!initialized_) {
// Most likely initializing the codec failed.
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
}
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
// Construct encode info.
jobjectArray j_frame_types =
jni->NewObjectArray(frame_types->size(), *frame_type_class_, nullptr);
for (size_t i = 0; i < frame_types->size(); ++i) {
jobject j_frame_type = jni->CallStaticObjectMethod(
*frame_type_class_, frame_type_from_native_method_,
static_cast<jint>((*frame_types)[i]));
jni->SetObjectArrayElement(j_frame_types, i, j_frame_type);
}
jobject encode_info = jni->NewObject(*encode_info_class_,
encode_info_constructor_, j_frame_types);
FrameExtraInfo info;
info.capture_time_ns = frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec;
info.timestamp_rtp = frame.timestamp();
frame_extra_infos_.push_back(info);
jobject ret = jni->CallObjectMethod(
*encoder_, encode_method_, video_frame_factory_.ToJavaFrame(jni, frame),
encode_info);
return HandleReturnCode(jni, ret);
}
int32_t VideoEncoderWrapper::SetChannelParameters(uint32_t packet_loss,
int64_t rtt) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject ret = jni->CallObjectMethod(*encoder_, set_channel_parameters_method_,
(jshort)packet_loss, (jlong)rtt);
return HandleReturnCode(jni, ret);
}
int32_t VideoEncoderWrapper::SetRateAllocation(
const BitrateAllocation& allocation,
uint32_t framerate) {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject j_bitrate_allocation = ToJavaBitrateAllocation(jni, allocation);
jobject ret = jni->CallObjectMethod(*encoder_, set_rate_allocation_method_,
j_bitrate_allocation, (jint)framerate);
return HandleReturnCode(jni, ret);
}
VideoEncoderWrapper::ScalingSettings VideoEncoderWrapper::GetScalingSettings()
const {
JNIEnv* jni = AttachCurrentThreadIfNeeded();
ScopedLocalRefFrame local_ref_frame(jni);
jobject j_scaling_settings =
jni->CallObjectMethod(*encoder_, get_scaling_settings_method_);
bool on =
jni->GetBooleanField(j_scaling_settings, scaling_settings_on_field_);
jobject j_low =
jni->GetObjectField(j_scaling_settings, scaling_settings_low_field_);
jobject j_high =
jni->GetObjectField(j_scaling_settings, scaling_settings_high_field_);
if (j_low != nullptr || j_high != nullptr) {
RTC_DCHECK(j_low != nullptr);
RTC_DCHECK(j_high != nullptr);
int low = jni->CallIntMethod(j_low, int_value_method_);
int high = jni->CallIntMethod(j_high, int_value_method_);
return ScalingSettings(on, low, high);
} else {
return ScalingSettings(on);
}
}
const char* VideoEncoderWrapper::ImplementationName() const {
return implementation_name_.c_str();
}
void VideoEncoderWrapper::OnEncodedFrame(JNIEnv* jni,
jobject j_buffer,
jint encoded_width,
jint encoded_height,
jlong capture_time_ns,
jint frame_type,
jint rotation,
jboolean complete_frame,
jobject j_qp) {
const uint8_t* buffer =
static_cast<uint8_t*>(jni->GetDirectBufferAddress(j_buffer));
const size_t buffer_size = jni->GetDirectBufferCapacity(j_buffer);
std::vector<uint8_t> buffer_copy(buffer_size);
memcpy(buffer_copy.data(), buffer, buffer_size);
int qp = -1;
if (j_qp != nullptr) {
qp = jni->CallIntMethod(j_qp, int_value_method_);
}
encoder_queue_->PostTask(
[
this, task_buffer = std::move(buffer_copy), qp, encoded_width,
encoded_height, capture_time_ns, frame_type, rotation, complete_frame
]() {
FrameExtraInfo frame_extra_info;
do {
if (frame_extra_infos_.empty()) {
LOG(LS_WARNING)
<< "Java encoder produced an unexpected frame with timestamp: "
<< capture_time_ns;
return;
}
frame_extra_info = frame_extra_infos_.front();
frame_extra_infos_.pop_front();
// The encoder might drop frames so iterate through the queue until
// we find a matching timestamp.
} while (frame_extra_info.capture_time_ns != capture_time_ns);
RTPFragmentationHeader header = ParseFragmentationHeader(task_buffer);
EncodedImage frame(const_cast<uint8_t*>(task_buffer.data()),
task_buffer.size(), task_buffer.size());
frame._encodedWidth = encoded_width;
frame._encodedHeight = encoded_height;
frame._timeStamp = frame_extra_info.timestamp_rtp;
frame.capture_time_ms_ = capture_time_ns / rtc::kNumNanosecsPerMillisec;
frame._frameType = (FrameType)frame_type;
frame.rotation_ = (VideoRotation)rotation;
frame._completeFrame = complete_frame;
if (qp == -1) {
frame.qp_ = ParseQp(task_buffer);
} else {
frame.qp_ = qp;
}
CodecSpecificInfo info(ParseCodecSpecificInfo(frame));
callback_->OnEncodedImage(frame, &info, &header);
});
}
int32_t VideoEncoderWrapper::HandleReturnCode(JNIEnv* jni, jobject code) {
int32_t value = jni->CallIntMethod(code, get_number_method_);
if (value < 0) { // Any errors are represented by negative values.
// Try resetting the codec.
if (++num_resets_ <= kMaxJavaEncoderResets &&
Release() == WEBRTC_VIDEO_CODEC_OK) {
LOG(LS_WARNING) << "Reset Java encoder: " << num_resets_;
return InitEncodeInternal(jni);
}
LOG(LS_WARNING) << "Falling back to software decoder.";
return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
} else {
return value;
}
}
RTPFragmentationHeader VideoEncoderWrapper::ParseFragmentationHeader(
const std::vector<uint8_t>& buffer) {
RTPFragmentationHeader header;
if (codec_settings_.codecType == kVideoCodecH264) {
h264_bitstream_parser_.ParseBitstream(buffer.data(), buffer.size());
// For H.264 search for start codes.
const std::vector<H264::NaluIndex> nalu_idxs =
H264::FindNaluIndices(buffer.data(), buffer.size());
if (nalu_idxs.empty()) {
LOG(LS_ERROR) << "Start code is not found!";
LOG(LS_ERROR) << "Data:" << buffer[0] << " " << buffer[1] << " "
<< buffer[2] << " " << buffer[3] << " " << buffer[4] << " "
<< buffer[5];
}
header.VerifyAndAllocateFragmentationHeader(nalu_idxs.size());
for (size_t i = 0; i < nalu_idxs.size(); i++) {
header.fragmentationOffset[i] = nalu_idxs[i].payload_start_offset;
header.fragmentationLength[i] = nalu_idxs[i].payload_size;
header.fragmentationPlType[i] = 0;
header.fragmentationTimeDiff[i] = 0;
}
} else {
// Generate a header describing a single fragment.
header.VerifyAndAllocateFragmentationHeader(1);
header.fragmentationOffset[0] = 0;
header.fragmentationLength[0] = buffer.size();
header.fragmentationPlType[0] = 0;
header.fragmentationTimeDiff[0] = 0;
}
return header;
}
int VideoEncoderWrapper::ParseQp(const std::vector<uint8_t>& buffer) {
int qp;
bool success;
switch (codec_settings_.codecType) {
case kVideoCodecVP8:
success = vp8::GetQp(buffer.data(), buffer.size(), &qp);
break;
case kVideoCodecVP9:
success = vp9::GetQp(buffer.data(), buffer.size(), &qp);
break;
case kVideoCodecH264:
success = h264_bitstream_parser_.GetLastSliceQp(&qp);
break;
default: // Default is to not provide QP.
success = false;
break;
}
return success ? qp : -1; // -1 means unknown QP.
}
CodecSpecificInfo VideoEncoderWrapper::ParseCodecSpecificInfo(
const EncodedImage& frame) {
const bool key_frame = frame._frameType == kVideoFrameKey;
CodecSpecificInfo info;
memset(&info, 0, sizeof(info));
info.codecType = codec_settings_.codecType;
info.codec_name = implementation_name_.c_str();
switch (codec_settings_.codecType) {
case kVideoCodecVP8:
info.codecSpecific.VP8.pictureId = picture_id_;
info.codecSpecific.VP8.nonReference = false;
info.codecSpecific.VP8.simulcastIdx = 0;
info.codecSpecific.VP8.temporalIdx = kNoTemporalIdx;
info.codecSpecific.VP8.layerSync = false;
info.codecSpecific.VP8.tl0PicIdx = kNoTl0PicIdx;
info.codecSpecific.VP8.keyIdx = kNoKeyIdx;
break;
case kVideoCodecVP9:
if (key_frame) {
gof_idx_ = 0;
}
info.codecSpecific.VP9.picture_id = picture_id_;
info.codecSpecific.VP9.inter_pic_predicted = key_frame ? false : true;
info.codecSpecific.VP9.flexible_mode = false;
info.codecSpecific.VP9.ss_data_available = key_frame ? true : false;
info.codecSpecific.VP9.tl0_pic_idx = tl0_pic_idx_++;
info.codecSpecific.VP9.temporal_idx = kNoTemporalIdx;
info.codecSpecific.VP9.spatial_idx = kNoSpatialIdx;
info.codecSpecific.VP9.temporal_up_switch = true;
info.codecSpecific.VP9.inter_layer_predicted = false;
info.codecSpecific.VP9.gof_idx =
static_cast<uint8_t>(gof_idx_++ % gof_.num_frames_in_gof);
info.codecSpecific.VP9.num_spatial_layers = 1;
info.codecSpecific.VP9.spatial_layer_resolution_present = false;
if (info.codecSpecific.VP9.ss_data_available) {
info.codecSpecific.VP9.spatial_layer_resolution_present = true;
info.codecSpecific.VP9.width[0] = frame._encodedWidth;
info.codecSpecific.VP9.height[0] = frame._encodedHeight;
info.codecSpecific.VP9.gof.CopyGofInfoVP9(gof_);
}
break;
default:
break;
}
picture_id_ = (picture_id_ + 1) & 0x7FFF;
return info;
}
jobject VideoEncoderWrapper::ToJavaBitrateAllocation(
JNIEnv* jni,
const BitrateAllocation& allocation) {
jobjectArray j_allocation_array = jni->NewObjectArray(
kMaxSpatialLayers, *int_array_class_, nullptr /* initial */);
for (int spatial_i = 0; spatial_i < kMaxSpatialLayers; ++spatial_i) {
jintArray j_array_spatial_layer = jni->NewIntArray(kMaxTemporalStreams);
jint* array_spatial_layer =
jni->GetIntArrayElements(j_array_spatial_layer, nullptr /* isCopy */);
for (int temporal_i = 0; temporal_i < kMaxTemporalStreams; ++temporal_i) {
array_spatial_layer[temporal_i] =
allocation.GetBitrate(spatial_i, temporal_i);
}
jni->ReleaseIntArrayElements(j_array_spatial_layer, array_spatial_layer,
JNI_COMMIT);
jni->SetObjectArrayElement(j_allocation_array, spatial_i,
j_array_spatial_layer);
}
return jni->NewObject(*bitrate_allocation_class_,
bitrate_allocation_constructor_, j_allocation_array);
}
std::string VideoEncoderWrapper::GetImplementationName(JNIEnv* jni) const {
jstring jname = reinterpret_cast<jstring>(
jni->CallObjectMethod(*encoder_, get_implementation_name_method_));
return JavaToStdString(jni, jname);
}
JNI_FUNCTION_DECLARATION(void,
VideoEncoderWrapperCallback_nativeOnEncodedFrame,
JNIEnv* jni,
jclass,
jlong j_native_encoder,
jobject buffer,
jint encoded_width,
jint encoded_height,
jlong capture_time_ns,
jint frame_type,
jint rotation,
jboolean complete_frame,
jobject qp) {
VideoEncoderWrapper* native_encoder =
reinterpret_cast<VideoEncoderWrapper*>(j_native_encoder);
native_encoder->OnEncodedFrame(jni, buffer, encoded_width, encoded_height,
capture_time_ns, frame_type, rotation,
complete_frame, qp);
}
} // namespace jni
} // namespace webrtc

Some files were not shown because too many files have changed in this diff Show More