Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1,60 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** An implementation of CapturerObserver that forwards all calls from Java to the C layer. */
class AndroidVideoTrackSourceObserver implements VideoCapturer.CapturerObserver {
// Pointer to VideoTrackSourceProxy proxying AndroidVideoTrackSource.
private final long nativeSource;
public AndroidVideoTrackSourceObserver(long nativeSource) {
this.nativeSource = nativeSource;
}
@Override
public void onCapturerStarted(boolean success) {
nativeCapturerStarted(nativeSource, success);
}
@Override
public void onCapturerStopped() {
nativeCapturerStopped(nativeSource);
}
@Override
public void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp) {
nativeOnByteBufferFrameCaptured(
nativeSource, data, data.length, width, height, rotation, timeStamp);
}
@Override
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp) {
nativeOnTextureFrameCaptured(
nativeSource, width, height, oesTextureId, transformMatrix, rotation, timestamp);
}
@Override
public void onFrameCaptured(VideoFrame frame) {
nativeOnFrameCaptured(nativeSource, frame.getBuffer().getWidth(), frame.getBuffer().getHeight(),
frame.getRotation(), frame.getTimestampNs(), frame.getBuffer());
}
private native void nativeCapturerStarted(long nativeSource, boolean success);
private native void nativeCapturerStopped(long nativeSource);
private native void nativeOnByteBufferFrameCaptured(long nativeSource, byte[] data, int length,
int width, int height, int rotation, long timeStamp);
private native void nativeOnTextureFrameCaptured(long nativeSource, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
private native void nativeOnFrameCaptured(long nativeSource, int width, int height, int rotation,
long timestampNs, VideoFrame.Buffer frame);
}

View File

@ -0,0 +1,38 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
class BaseBitrateAdjuster implements BitrateAdjuster {
protected int targetBitrateBps = 0;
protected int targetFps = 0;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
this.targetBitrateBps = targetBitrateBps;
this.targetFps = targetFps;
}
@Override
public void reportEncodedFrame(int size) {
// No op.
}
@Override
public int getAdjustedBitrateBps() {
return targetBitrateBps;
}
@Override
public int getAdjustedFramerate() {
return targetFps;
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Object that adjusts the bitrate of a hardware codec. */
interface BitrateAdjuster {
/**
* Sets the target bitrate in bits per second and framerate in frames per second.
*/
void setTargets(int targetBitrateBps, int targetFps);
/**
* Reports that a frame of the given size has been encoded. Returns true if the bitrate should
* be adjusted.
*/
void reportEncodedFrame(int size);
/** Gets the current bitrate. */
int getAdjustedBitrateBps();
/** Gets the current framerate. */
int getAdjustedFramerate();
}

View File

@ -0,0 +1,369 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.SystemClock;
import android.view.Surface;
import android.view.WindowManager;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@SuppressWarnings("deprecation")
class Camera1Session implements CameraSession {
private static final String TAG = "Camera1Session";
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
private static final Histogram camera1StartTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
private static final Histogram camera1StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
private static final Histogram camera1ResolutionHistogram = Histogram.createEnumeration(
"WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
private static enum SessionState { RUNNING, STOPPED }
private final boolean videoFrameEmitTrialEnabled;
private final Handler cameraThreadHandler;
private final Events events;
private final boolean captureToTexture;
private final Context applicationContext;
private final SurfaceTextureHelper surfaceTextureHelper;
private final int cameraId;
private final android.hardware.Camera camera;
private final android.hardware.Camera.CameraInfo info;
private final CaptureFormat captureFormat;
// Used only for stats. Only used on the camera thread.
private final long constructionTimeNs; // Construction time of this class.
private SessionState state;
private boolean firstFrameReported = false;
public static void create(final CreateSessionCallback callback, final Events events,
final boolean captureToTexture, final Context applicationContext,
final SurfaceTextureHelper surfaceTextureHelper, final MediaRecorder mediaRecorder,
final int cameraId, final int width, final int height, final int framerate) {
final long constructionTimeNs = System.nanoTime();
Logging.d(TAG, "Open camera " + cameraId);
events.onCameraOpening();
final android.hardware.Camera camera;
try {
camera = android.hardware.Camera.open(cameraId);
} catch (RuntimeException e) {
callback.onFailure(FailureType.ERROR, e.getMessage());
return;
}
try {
camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
} catch (IOException e) {
camera.release();
callback.onFailure(FailureType.ERROR, e.getMessage());
return;
}
final android.hardware.Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
final android.hardware.Camera.Parameters parameters = camera.getParameters();
final CaptureFormat captureFormat =
findClosestCaptureFormat(parameters, width, height, framerate);
final Size pictureSize = findClosestPictureSize(parameters, width, height);
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
if (!captureToTexture) {
final int frameSize = captureFormat.frameSize();
for (int i = 0; i < NUMBER_OF_CAPTURE_BUFFERS; ++i) {
final ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
camera.addCallbackBuffer(buffer.array());
}
}
// Calculate orientation manually and send it as CVO insted.
camera.setDisplayOrientation(0 /* degrees */);
callback.onDone(
new Camera1Session(events, captureToTexture, applicationContext, surfaceTextureHelper,
mediaRecorder, cameraId, camera, info, captureFormat, constructionTimeNs));
}
private static void updateCameraParameters(android.hardware.Camera camera,
android.hardware.Camera.Parameters parameters, CaptureFormat captureFormat, Size pictureSize,
boolean captureToTexture) {
final List<String> focusModes = parameters.getSupportedFocusModes();
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
if (!captureToTexture) {
parameters.setPreviewFormat(captureFormat.imageFormat);
}
if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true);
}
if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
camera.setParameters(parameters);
}
private static CaptureFormat findClosestCaptureFormat(
android.hardware.Camera.Parameters parameters, int width, int height, int framerate) {
// Find closest supported format for |width| x |height| @ |framerate|.
final List<CaptureFormat.FramerateRange> supportedFramerates =
Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
Logging.d(TAG, "Available fps ranges: " + supportedFramerates);
final CaptureFormat.FramerateRange fpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
final Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
return new CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
private static Size findClosestPictureSize(
android.hardware.Camera.Parameters parameters, int width, int height) {
return CameraEnumerationAndroid.getClosestSupportedSize(
Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
}
private Camera1Session(Events events, boolean captureToTexture, Context applicationContext,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, int cameraId,
android.hardware.Camera camera, android.hardware.Camera.CameraInfo info,
CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d(TAG, "Create new camera1 session on camera " + cameraId);
videoFrameEmitTrialEnabled =
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
.equals(PeerConnectionFactory.TRIAL_ENABLED);
this.cameraThreadHandler = new Handler();
this.events = events;
this.captureToTexture = captureToTexture;
this.applicationContext = applicationContext;
this.surfaceTextureHelper = surfaceTextureHelper;
this.cameraId = cameraId;
this.camera = camera;
this.info = info;
this.captureFormat = captureFormat;
this.constructionTimeNs = constructionTimeNs;
startCapturing();
if (mediaRecorder != null) {
camera.unlock();
mediaRecorder.setCamera(camera);
}
}
@Override
public void stop() {
Logging.d(TAG, "Stop camera1 session on camera " + cameraId);
checkIsOnCameraThread();
if (state != SessionState.STOPPED) {
final long stopStartTime = System.nanoTime();
stopInternal();
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera1StopTimeMsHistogram.addSample(stopTimeMs);
}
}
private void startCapturing() {
Logging.d(TAG, "Start capturing");
checkIsOnCameraThread();
state = SessionState.RUNNING;
camera.setErrorCallback(new android.hardware.Camera.ErrorCallback() {
@Override
public void onError(int error, android.hardware.Camera camera) {
String errorMessage;
if (error == android.hardware.Camera.CAMERA_ERROR_SERVER_DIED) {
errorMessage = "Camera server died!";
} else {
errorMessage = "Camera error: " + error;
}
Logging.e(TAG, errorMessage);
stopInternal();
if (error == android.hardware.Camera.CAMERA_ERROR_EVICTED) {
events.onCameraDisconnected(Camera1Session.this);
} else {
events.onCameraError(Camera1Session.this, errorMessage);
}
}
});
if (captureToTexture) {
listenForTextureFrames();
} else {
listenForBytebufferFrames();
}
try {
camera.startPreview();
} catch (RuntimeException e) {
stopInternal();
events.onCameraError(this, e.getMessage());
}
}
private void stopInternal() {
Logging.d(TAG, "Stop internal");
checkIsOnCameraThread();
if (state == SessionState.STOPPED) {
Logging.d(TAG, "Camera is already stopped");
return;
}
state = SessionState.STOPPED;
surfaceTextureHelper.stopListening();
// Note: stopPreview or other driver code might deadlock. Deadlock in
// android.hardware.Camera._stopPreview(Native Method) has been observed on
// Nexus 5 (hammerhead), OS version LMY48I.
camera.stopPreview();
camera.release();
events.onCameraClosed(this);
Logging.d(TAG, "Stop done");
}
private void listenForTextureFrames() {
surfaceTextureHelper.startListening(new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread();
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
surfaceTextureHelper.returnTextureFrame();
return;
}
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
int rotation = getFrameOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
transformMatrix = RendererCommon.multiplyMatrices(
transformMatrix, RendererCommon.horizontalFlipMatrix());
}
if (videoFrameEmitTrialEnabled) {
final VideoFrame.Buffer buffer =
surfaceTextureHelper.createTextureBuffer(captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
events.onFrameCaptured(Camera1Session.this, frame);
frame.release();
} else {
events.onTextureFrameCaptured(Camera1Session.this, captureFormat.width,
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
}
}
});
}
private void listenForBytebufferFrames() {
camera.setPreviewCallbackWithBuffer(new android.hardware.Camera.PreviewCallback() {
@Override
public void onPreviewFrame(final byte[] data, android.hardware.Camera callbackCamera) {
checkIsOnCameraThread();
if (callbackCamera != camera) {
Logging.e(TAG, "Callback from a different camera. This should never happen.");
return;
}
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Bytebuffer frame captured but camera is no longer running.");
return;
}
final long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
if (!firstFrameReported) {
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
firstFrameReported = true;
}
if (videoFrameEmitTrialEnabled) {
VideoFrame.Buffer frameBuffer = new NV21Buffer(data, captureFormat.width,
captureFormat.height, () -> cameraThreadHandler.post(() -> {
if (state == SessionState.RUNNING) {
camera.addCallbackBuffer(data);
}
}));
final VideoFrame frame =
new VideoFrame(frameBuffer, getFrameOrientation(), captureTimeNs);
events.onFrameCaptured(Camera1Session.this, frame);
frame.release();
} else {
events.onByteBufferFrameCaptured(Camera1Session.this, data, captureFormat.width,
captureFormat.height, getFrameOrientation(), captureTimeNs);
camera.addCallbackBuffer(data);
}
}
});
}
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
private int getFrameOrientation() {
int rotation = getDeviceOrientation();
if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) {
rotation = 360 - rotation;
}
return (info.orientation + rotation) % 360;
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}

View File

@ -0,0 +1,480 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.media.MediaRecorder;
import android.os.Handler;
import android.util.Range;
import android.view.Surface;
import android.view.WindowManager;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
@TargetApi(21)
class Camera2Session implements CameraSession {
private static final String TAG = "Camera2Session";
private static final Histogram camera2StartTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StartTimeMs", 1, 10000, 50);
private static final Histogram camera2StopTimeMsHistogram =
Histogram.createCounts("WebRTC.Android.Camera2.StopTimeMs", 1, 10000, 50);
private static final Histogram camera2ResolutionHistogram = Histogram.createEnumeration(
"WebRTC.Android.Camera2.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
private static enum SessionState { RUNNING, STOPPED }
private final boolean videoFrameEmitTrialEnabled;
private final Handler cameraThreadHandler;
private final CreateSessionCallback callback;
private final Events events;
private final Context applicationContext;
private final CameraManager cameraManager;
private final SurfaceTextureHelper surfaceTextureHelper;
private final Surface mediaRecorderSurface;
private final String cameraId;
private final int width;
private final int height;
private final int framerate;
// Initialized at start
private CameraCharacteristics cameraCharacteristics;
private int cameraOrientation;
private boolean isCameraFrontFacing;
private int fpsUnitFactor;
private CaptureFormat captureFormat;
// Initialized when camera opens
private CameraDevice cameraDevice;
private Surface surface;
// Initialized when capture session is created
private CameraCaptureSession captureSession;
// State
private SessionState state = SessionState.RUNNING;
private boolean firstFrameReported = false;
// Used only for stats. Only used on the camera thread.
private final long constructionTimeNs; // Construction time of this class.
private class CameraStateCallback extends CameraDevice.StateCallback {
private String getErrorDescription(int errorCode) {
switch (errorCode) {
case CameraDevice.StateCallback.ERROR_CAMERA_DEVICE:
return "Camera device has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_CAMERA_DISABLED:
return "Camera device could not be opened due to a device policy.";
case CameraDevice.StateCallback.ERROR_CAMERA_IN_USE:
return "Camera device is in use already.";
case CameraDevice.StateCallback.ERROR_CAMERA_SERVICE:
return "Camera service has encountered a fatal error.";
case CameraDevice.StateCallback.ERROR_MAX_CAMERAS_IN_USE:
return "Camera device could not be opened because"
+ " there are too many other open camera devices.";
default:
return "Unknown camera error: " + errorCode;
}
}
@Override
public void onDisconnected(CameraDevice camera) {
checkIsOnCameraThread();
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
state = SessionState.STOPPED;
stopInternal();
if (startFailure) {
callback.onFailure(FailureType.DISCONNECTED, "Camera disconnected / evicted.");
} else {
events.onCameraDisconnected(Camera2Session.this);
}
}
@Override
public void onError(CameraDevice camera, int errorCode) {
checkIsOnCameraThread();
reportError(getErrorDescription(errorCode));
}
@Override
public void onOpened(CameraDevice camera) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera opened.");
cameraDevice = camera;
final SurfaceTexture surfaceTexture = surfaceTextureHelper.getSurfaceTexture();
surfaceTexture.setDefaultBufferSize(captureFormat.width, captureFormat.height);
surface = new Surface(surfaceTexture);
List<Surface> surfaces = new ArrayList<Surface>();
surfaces.add(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to capture session.");
surfaces.add(mediaRecorderSurface);
}
try {
camera.createCaptureSession(surfaces, new CaptureSessionCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to create capture session. " + e);
return;
}
}
@Override
public void onClosed(CameraDevice camera) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera device closed.");
events.onCameraClosed(Camera2Session.this);
}
}
private class CaptureSessionCallback extends CameraCaptureSession.StateCallback {
@Override
public void onConfigureFailed(CameraCaptureSession session) {
checkIsOnCameraThread();
session.close();
reportError("Failed to configure capture session.");
}
@Override
public void onConfigured(CameraCaptureSession session) {
checkIsOnCameraThread();
Logging.d(TAG, "Camera capture session configured.");
captureSession = session;
try {
/*
* The viable options for video capture requests are:
* TEMPLATE_PREVIEW: High frame rate is given priority over the highest-quality
* post-processing.
* TEMPLATE_RECORD: Stable frame rate is used, and post-processing is set for recording
* quality.
*/
final CaptureRequest.Builder captureRequestBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
// Set auto exposure fps range.
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
new Range<Integer>(captureFormat.framerate.min / fpsUnitFactor,
captureFormat.framerate.max / fpsUnitFactor));
captureRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_AE_LOCK, false);
chooseStabilizationMode(captureRequestBuilder);
chooseFocusMode(captureRequestBuilder);
captureRequestBuilder.addTarget(surface);
if (mediaRecorderSurface != null) {
Logging.d(TAG, "Add MediaRecorder surface to CaptureRequest.Builder");
captureRequestBuilder.addTarget(mediaRecorderSurface);
}
session.setRepeatingRequest(
captureRequestBuilder.build(), new CameraCaptureCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to start capture request. " + e);
return;
}
surfaceTextureHelper.startListening(
new SurfaceTextureHelper.OnTextureFrameAvailableListener() {
@Override
public void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
checkIsOnCameraThread();
if (state != SessionState.RUNNING) {
Logging.d(TAG, "Texture frame captured but camera is no longer running.");
surfaceTextureHelper.returnTextureFrame();
return;
}
if (!firstFrameReported) {
firstFrameReported = true;
final int startTimeMs =
(int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - constructionTimeNs);
camera2StartTimeMsHistogram.addSample(startTimeMs);
}
int rotation = getFrameOrientation();
if (isCameraFrontFacing) {
// Undo the mirror that the OS "helps" us with.
// http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
transformMatrix = RendererCommon.multiplyMatrices(
transformMatrix, RendererCommon.horizontalFlipMatrix());
}
// Undo camera orientation - we report it as rotation instead.
transformMatrix =
RendererCommon.rotateTextureMatrix(transformMatrix, -cameraOrientation);
if (videoFrameEmitTrialEnabled) {
VideoFrame.Buffer buffer = surfaceTextureHelper.createTextureBuffer(
captureFormat.width, captureFormat.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
final VideoFrame frame = new VideoFrame(buffer, rotation, timestampNs);
events.onFrameCaptured(Camera2Session.this, frame);
frame.release();
} else {
events.onTextureFrameCaptured(Camera2Session.this, captureFormat.width,
captureFormat.height, oesTextureId, transformMatrix, rotation, timestampNs);
}
}
});
Logging.d(TAG, "Camera device successfully started.");
callback.onDone(Camera2Session.this);
}
// Prefers optical stabilization over software stabilization if available. Only enables one of
// the stabilization modes at a time because having both enabled can cause strange results.
private void chooseStabilizationMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableOpticalStabilization = cameraCharacteristics.get(
CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION);
if (availableOpticalStabilization != null) {
for (int mode : availableOpticalStabilization) {
if (mode == CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON) {
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_ON);
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_OFF);
Logging.d(TAG, "Using optical stabilization.");
return;
}
}
}
// If no optical mode is available, try software.
final int[] availableVideoStabilization = cameraCharacteristics.get(
CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES);
for (int mode : availableVideoStabilization) {
if (mode == CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON) {
captureRequestBuilder.set(CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE_ON);
captureRequestBuilder.set(CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE,
CaptureRequest.LENS_OPTICAL_STABILIZATION_MODE_OFF);
Logging.d(TAG, "Using video stabilization.");
return;
}
}
Logging.d(TAG, "Stabilization not available.");
}
private void chooseFocusMode(CaptureRequest.Builder captureRequestBuilder) {
final int[] availableFocusModes =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
for (int mode : availableFocusModes) {
if (mode == CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO) {
captureRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
Logging.d(TAG, "Using continuous video auto-focus.");
return;
}
}
Logging.d(TAG, "Auto-focus is not available.");
}
}
private class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
@Override
public void onCaptureFailed(
CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
Logging.d(TAG, "Capture failed: " + failure);
}
}
public static void create(CreateSessionCallback callback, Events events,
Context applicationContext, CameraManager cameraManager,
SurfaceTextureHelper surfaceTextureHelper, MediaRecorder mediaRecorder, String cameraId,
int width, int height, int framerate) {
new Camera2Session(callback, events, applicationContext, cameraManager, surfaceTextureHelper,
mediaRecorder, cameraId, width, height, framerate);
}
private Camera2Session(CreateSessionCallback callback, Events events, Context applicationContext,
CameraManager cameraManager, SurfaceTextureHelper surfaceTextureHelper,
MediaRecorder mediaRecorder, String cameraId, int width, int height, int framerate) {
Logging.d(TAG, "Create new camera2 session on camera " + cameraId);
videoFrameEmitTrialEnabled =
PeerConnectionFactory.fieldTrialsFindFullName(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL)
.equals(PeerConnectionFactory.TRIAL_ENABLED);
constructionTimeNs = System.nanoTime();
this.cameraThreadHandler = new Handler();
this.callback = callback;
this.events = events;
this.applicationContext = applicationContext;
this.cameraManager = cameraManager;
this.surfaceTextureHelper = surfaceTextureHelper;
this.mediaRecorderSurface = (mediaRecorder != null) ? mediaRecorder.getSurface() : null;
this.cameraId = cameraId;
this.width = width;
this.height = height;
this.framerate = framerate;
start();
}
private void start() {
checkIsOnCameraThread();
Logging.d(TAG, "start");
try {
cameraCharacteristics = cameraManager.getCameraCharacteristics(cameraId);
} catch (final CameraAccessException e) {
reportError("getCameraCharacteristics(): " + e.getMessage());
return;
}
cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING)
== CameraMetadata.LENS_FACING_FRONT;
findCaptureFormat();
openCamera();
}
private void findCaptureFormat() {
checkIsOnCameraThread();
Range<Integer>[] fpsRanges =
cameraCharacteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
fpsUnitFactor = Camera2Enumerator.getFpsUnitFactor(fpsRanges);
List<CaptureFormat.FramerateRange> framerateRanges =
Camera2Enumerator.convertFramerates(fpsRanges, fpsUnitFactor);
List<Size> sizes = Camera2Enumerator.getSupportedSizes(cameraCharacteristics);
Logging.d(TAG, "Available preview sizes: " + sizes);
Logging.d(TAG, "Available fps ranges: " + framerateRanges);
if (framerateRanges.isEmpty() || sizes.isEmpty()) {
reportError("No supported capture formats.");
return;
}
final CaptureFormat.FramerateRange bestFpsRange =
CameraEnumerationAndroid.getClosestSupportedFramerateRange(framerateRanges, framerate);
final Size bestSize = CameraEnumerationAndroid.getClosestSupportedSize(sizes, width, height);
CameraEnumerationAndroid.reportCameraResolution(camera2ResolutionHistogram, bestSize);
captureFormat = new CaptureFormat(bestSize.width, bestSize.height, bestFpsRange);
Logging.d(TAG, "Using capture format: " + captureFormat);
}
private void openCamera() {
checkIsOnCameraThread();
Logging.d(TAG, "Opening camera " + cameraId);
events.onCameraOpening();
try {
cameraManager.openCamera(cameraId, new CameraStateCallback(), cameraThreadHandler);
} catch (CameraAccessException e) {
reportError("Failed to open camera: " + e);
return;
}
}
@Override
public void stop() {
Logging.d(TAG, "Stop camera2 session on camera " + cameraId);
checkIsOnCameraThread();
if (state != SessionState.STOPPED) {
final long stopStartTime = System.nanoTime();
state = SessionState.STOPPED;
stopInternal();
final int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera2StopTimeMsHistogram.addSample(stopTimeMs);
}
}
private void stopInternal() {
Logging.d(TAG, "Stop internal");
checkIsOnCameraThread();
surfaceTextureHelper.stopListening();
if (captureSession != null) {
captureSession.close();
captureSession = null;
}
if (surface != null) {
surface.release();
surface = null;
}
if (cameraDevice != null) {
cameraDevice.close();
cameraDevice = null;
}
Logging.d(TAG, "Stop done");
}
private void reportError(String error) {
checkIsOnCameraThread();
Logging.e(TAG, "Error: " + error);
final boolean startFailure = (captureSession == null) && (state != SessionState.STOPPED);
state = SessionState.STOPPED;
stopInternal();
if (startFailure) {
callback.onFailure(FailureType.ERROR, error);
} else {
events.onCameraError(this, error);
}
}
private int getDeviceOrientation() {
int orientation = 0;
WindowManager wm = (WindowManager) applicationContext.getSystemService(Context.WINDOW_SERVICE);
switch (wm.getDefaultDisplay().getRotation()) {
case Surface.ROTATION_90:
orientation = 90;
break;
case Surface.ROTATION_180:
orientation = 180;
break;
case Surface.ROTATION_270:
orientation = 270;
break;
case Surface.ROTATION_0:
default:
orientation = 0;
break;
}
return orientation;
}
private int getFrameOrientation() {
int rotation = getDeviceOrientation();
if (!isCameraFrontFacing) {
rotation = 360 - rotation;
}
return (cameraOrientation + rotation) % 360;
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
}

View File

@ -0,0 +1,590 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Looper;
import java.util.Arrays;
@SuppressWarnings("deprecation")
abstract class CameraCapturer implements CameraVideoCapturer {
enum SwitchState {
IDLE, // No switch requested.
PENDING, // Waiting for previous capture session to open.
IN_PROGRESS, // Waiting for new switched capture session to start.
}
enum MediaRecorderState {
IDLE, // No media recording update (add or remove) requested.
IDLE_TO_ACTIVE, // Waiting for new capture session with added MediaRecorder surface to start.
ACTIVE_TO_IDLE, // Waiting for new capture session with removed MediaRecorder surface to start.
ACTIVE, // MediaRecorder was successfully added to camera pipeline.
}
private static final String TAG = "CameraCapturer";
private final static int MAX_OPEN_CAMERA_ATTEMPTS = 3;
private final static int OPEN_CAMERA_DELAY_MS = 500;
private final static int OPEN_CAMERA_TIMEOUT = 10000;
private final CameraEnumerator cameraEnumerator;
private final CameraEventsHandler eventsHandler;
private final Handler uiThreadHandler;
private final CameraSession.CreateSessionCallback createSessionCallback =
new CameraSession.CreateSessionCallback() {
@Override
public void onDone(CameraSession session) {
checkIsOnCameraThread();
Logging.d(TAG,
"Create session done. Switch state: " + switchState
+ ". MediaRecorder state: " + mediaRecorderState);
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
synchronized (stateLock) {
capturerObserver.onCapturerStarted(true /* success */);
sessionOpening = false;
currentSession = session;
cameraStatistics = new CameraStatistics(surfaceHelper, eventsHandler);
firstFrameObserved = false;
stateLock.notifyAll();
if (switchState == SwitchState.IN_PROGRESS) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchDone(cameraEnumerator.isFrontFacing(cameraName));
switchEventsHandler = null;
}
switchState = SwitchState.IDLE;
} else if (switchState == SwitchState.PENDING) {
switchState = SwitchState.IDLE;
switchCameraInternal(switchEventsHandler);
}
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE
|| mediaRecorderState == MediaRecorderState.ACTIVE_TO_IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderSuccess();
mediaRecorderEventsHandler = null;
}
if (mediaRecorderState == MediaRecorderState.IDLE_TO_ACTIVE) {
mediaRecorderState = MediaRecorderState.ACTIVE;
} else {
mediaRecorderState = MediaRecorderState.IDLE;
}
}
}
}
@Override
public void onFailure(CameraSession.FailureType failureType, String error) {
checkIsOnCameraThread();
uiThreadHandler.removeCallbacks(openCameraTimeoutRunnable);
synchronized (stateLock) {
capturerObserver.onCapturerStarted(false /* success */);
openAttemptsRemaining--;
if (openAttemptsRemaining <= 0) {
Logging.w(TAG, "Opening camera failed, passing: " + error);
sessionOpening = false;
stateLock.notifyAll();
if (switchState != SwitchState.IDLE) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(error);
switchEventsHandler = null;
}
switchState = SwitchState.IDLE;
}
if (mediaRecorderState != MediaRecorderState.IDLE) {
if (mediaRecorderEventsHandler != null) {
mediaRecorderEventsHandler.onMediaRecorderError(error);
mediaRecorderEventsHandler = null;
}
mediaRecorderState = MediaRecorderState.IDLE;
}
if (failureType == CameraSession.FailureType.DISCONNECTED) {
eventsHandler.onCameraDisconnected();
} else {
eventsHandler.onCameraError(error);
}
} else {
Logging.w(TAG, "Opening camera failed, retry: " + error);
createSessionInternal(OPEN_CAMERA_DELAY_MS, null /* mediaRecorder */);
}
}
}
};
private final CameraSession.Events cameraSessionEventsHandler = new CameraSession.Events() {
@Override
public void onCameraOpening() {
checkIsOnCameraThread();
synchronized (stateLock) {
if (currentSession != null) {
Logging.w(TAG, "onCameraOpening while session was open.");
return;
}
eventsHandler.onCameraOpening(cameraName);
}
}
@Override
public void onCameraError(CameraSession session, String error) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onCameraError from another session: " + error);
return;
}
eventsHandler.onCameraError(error);
stopCapture();
}
}
@Override
public void onCameraDisconnected(CameraSession session) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onCameraDisconnected from another session.");
return;
}
eventsHandler.onCameraDisconnected();
stopCapture();
}
}
@Override
public void onCameraClosed(CameraSession session) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession && currentSession != null) {
Logging.d(TAG, "onCameraClosed from another session.");
return;
}
eventsHandler.onCameraClosed();
}
}
@Override
public void onFrameCaptured(CameraSession session, VideoFrame frame) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onTextureFrameCaptured from another session.");
return;
}
if (!firstFrameObserved) {
eventsHandler.onFirstFrameAvailable();
firstFrameObserved = true;
}
cameraStatistics.addFrame();
capturerObserver.onFrameCaptured(frame);
}
}
@Override
public void onByteBufferFrameCaptured(
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onByteBufferFrameCaptured from another session.");
return;
}
if (!firstFrameObserved) {
eventsHandler.onFirstFrameAvailable();
firstFrameObserved = true;
}
cameraStatistics.addFrame();
capturerObserver.onByteBufferFrameCaptured(data, width, height, rotation, timestamp);
}
}
@Override
public void onTextureFrameCaptured(CameraSession session, int width, int height,
int oesTextureId, float[] transformMatrix, int rotation, long timestamp) {
checkIsOnCameraThread();
synchronized (stateLock) {
if (session != currentSession) {
Logging.w(TAG, "onTextureFrameCaptured from another session.");
surfaceHelper.returnTextureFrame();
return;
}
if (!firstFrameObserved) {
eventsHandler.onFirstFrameAvailable();
firstFrameObserved = true;
}
cameraStatistics.addFrame();
capturerObserver.onTextureFrameCaptured(
width, height, oesTextureId, transformMatrix, rotation, timestamp);
}
}
};
private final Runnable openCameraTimeoutRunnable = new Runnable() {
@Override
public void run() {
eventsHandler.onCameraError("Camera failed to start within timeout.");
}
};
// Initialized on initialize
// -------------------------
private Handler cameraThreadHandler;
private Context applicationContext;
private CapturerObserver capturerObserver;
private SurfaceTextureHelper surfaceHelper;
private final Object stateLock = new Object();
private boolean sessionOpening; /* guarded by stateLock */
private CameraSession currentSession; /* guarded by stateLock */
private String cameraName; /* guarded by stateLock */
private int width; /* guarded by stateLock */
private int height; /* guarded by stateLock */
private int framerate; /* guarded by stateLock */
private int openAttemptsRemaining; /* guarded by stateLock */
private SwitchState switchState = SwitchState.IDLE; /* guarded by stateLock */
private CameraSwitchHandler switchEventsHandler; /* guarded by stateLock */
// Valid from onDone call until stopCapture, otherwise null.
private CameraStatistics cameraStatistics; /* guarded by stateLock */
private boolean firstFrameObserved; /* guarded by stateLock */
// Variables used on camera thread - do not require stateLock synchronization.
private MediaRecorderState mediaRecorderState = MediaRecorderState.IDLE;
private MediaRecorderHandler mediaRecorderEventsHandler;
public CameraCapturer(
String cameraName, CameraEventsHandler eventsHandler, CameraEnumerator cameraEnumerator) {
if (eventsHandler == null) {
eventsHandler = new CameraEventsHandler() {
@Override
public void onCameraError(String errorDescription) {}
@Override
public void onCameraDisconnected() {}
@Override
public void onCameraFreezed(String errorDescription) {}
@Override
public void onCameraOpening(String cameraName) {}
@Override
public void onFirstFrameAvailable() {}
@Override
public void onCameraClosed() {}
};
}
this.eventsHandler = eventsHandler;
this.cameraEnumerator = cameraEnumerator;
this.cameraName = cameraName;
uiThreadHandler = new Handler(Looper.getMainLooper());
final String[] deviceNames = cameraEnumerator.getDeviceNames();
if (deviceNames.length == 0) {
throw new RuntimeException("No cameras attached.");
}
if (!Arrays.asList(deviceNames).contains(this.cameraName)) {
throw new IllegalArgumentException(
"Camera name " + this.cameraName + " does not match any known camera device.");
}
}
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext,
CapturerObserver capturerObserver) {
this.applicationContext = applicationContext;
this.capturerObserver = capturerObserver;
this.surfaceHelper = surfaceTextureHelper;
this.cameraThreadHandler =
surfaceTextureHelper == null ? null : surfaceTextureHelper.getHandler();
}
@Override
public void startCapture(int width, int height, int framerate) {
Logging.d(TAG, "startCapture: " + width + "x" + height + "@" + framerate);
if (applicationContext == null) {
throw new RuntimeException("CameraCapturer must be initialized before calling startCapture.");
}
synchronized (stateLock) {
if (sessionOpening || currentSession != null) {
Logging.w(TAG, "Session already open");
return;
}
this.width = width;
this.height = height;
this.framerate = framerate;
sessionOpening = true;
openAttemptsRemaining = MAX_OPEN_CAMERA_ATTEMPTS;
createSessionInternal(0, null /* mediaRecorder */);
}
}
private void createSessionInternal(int delayMs, final MediaRecorder mediaRecorder) {
uiThreadHandler.postDelayed(openCameraTimeoutRunnable, delayMs + OPEN_CAMERA_TIMEOUT);
cameraThreadHandler.postDelayed(new Runnable() {
@Override
public void run() {
createCameraSession(createSessionCallback, cameraSessionEventsHandler, applicationContext,
surfaceHelper, mediaRecorder, cameraName, width, height, framerate);
}
}, delayMs);
}
@Override
public void stopCapture() {
Logging.d(TAG, "Stop capture");
synchronized (stateLock) {
while (sessionOpening) {
Logging.d(TAG, "Stop capture: Waiting for session to open");
ThreadUtils.waitUninterruptibly(stateLock);
}
if (currentSession != null) {
Logging.d(TAG, "Stop capture: Nulling session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
capturerObserver.onCapturerStopped();
} else {
Logging.d(TAG, "Stop capture: No session open");
}
}
Logging.d(TAG, "Stop capture done");
}
@Override
public void changeCaptureFormat(int width, int height, int framerate) {
Logging.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate);
synchronized (stateLock) {
stopCapture();
startCapture(width, height, framerate);
}
}
@Override
public void dispose() {
Logging.d(TAG, "dispose");
stopCapture();
}
@Override
public void switchCamera(final CameraSwitchHandler switchEventsHandler) {
Logging.d(TAG, "switchCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
switchCameraInternal(switchEventsHandler);
}
});
}
@Override
public void addMediaRecorderToCamera(
final MediaRecorder mediaRecorder, final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "addMediaRecorderToCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(mediaRecorder, mediaRecoderEventsHandler);
}
});
}
@Override
public void removeMediaRecorderFromCamera(final MediaRecorderHandler mediaRecoderEventsHandler) {
Logging.d(TAG, "removeMediaRecorderFromCamera");
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
updateMediaRecorderInternal(null /* mediaRecorder */, mediaRecoderEventsHandler);
}
});
}
@Override
public boolean isScreencast() {
return false;
}
public void printStackTrace() {
Thread cameraThread = null;
if (cameraThreadHandler != null) {
cameraThread = cameraThreadHandler.getLooper().getThread();
}
if (cameraThread != null) {
StackTraceElement[] cameraStackTrace = cameraThread.getStackTrace();
if (cameraStackTrace.length > 0) {
Logging.d(TAG, "CameraCapturer stack trace:");
for (StackTraceElement traceElem : cameraStackTrace) {
Logging.d(TAG, traceElem.toString());
}
}
}
}
private void reportCameraSwitchError(String error, CameraSwitchHandler switchEventsHandler) {
Logging.e(TAG, error);
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError(error);
}
}
private void switchCameraInternal(final CameraSwitchHandler switchEventsHandler) {
Logging.d(TAG, "switchCamera internal");
final String[] deviceNames = cameraEnumerator.getDeviceNames();
if (deviceNames.length < 2) {
if (switchEventsHandler != null) {
switchEventsHandler.onCameraSwitchError("No camera to switch to.");
}
return;
}
synchronized (stateLock) {
if (switchState != SwitchState.IDLE) {
reportCameraSwitchError("Camera switch already in progress.", switchEventsHandler);
return;
}
if (mediaRecorderState != MediaRecorderState.IDLE) {
reportCameraSwitchError("switchCamera: media recording is active", switchEventsHandler);
return;
}
if (!sessionOpening && currentSession == null) {
reportCameraSwitchError("switchCamera: camera is not running.", switchEventsHandler);
return;
}
this.switchEventsHandler = switchEventsHandler;
if (sessionOpening) {
switchState = SwitchState.PENDING;
return;
} else {
switchState = SwitchState.IN_PROGRESS;
}
Logging.d(TAG, "switchCamera: Stopping session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
int cameraNameIndex = Arrays.asList(deviceNames).indexOf(cameraName);
cameraName = deviceNames[(cameraNameIndex + 1) % deviceNames.length];
sessionOpening = true;
openAttemptsRemaining = 1;
createSessionInternal(0, null /* mediaRecorder */);
}
Logging.d(TAG, "switchCamera done");
}
private void reportUpdateMediaRecorderError(
String error, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
Logging.e(TAG, error);
if (mediaRecoderEventsHandler != null) {
mediaRecoderEventsHandler.onMediaRecorderError(error);
}
}
private void updateMediaRecorderInternal(
MediaRecorder mediaRecorder, MediaRecorderHandler mediaRecoderEventsHandler) {
checkIsOnCameraThread();
boolean addMediaRecorder = (mediaRecorder != null);
Logging.d(TAG,
"updateMediaRecoderInternal internal. State: " + mediaRecorderState
+ ". Switch state: " + switchState + ". Add MediaRecorder: " + addMediaRecorder);
synchronized (stateLock) {
if ((addMediaRecorder && mediaRecorderState != MediaRecorderState.IDLE)
|| (!addMediaRecorder && mediaRecorderState != MediaRecorderState.ACTIVE)) {
reportUpdateMediaRecorderError(
"Incorrect state for MediaRecorder update.", mediaRecoderEventsHandler);
return;
}
if (switchState != SwitchState.IDLE) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is switching.", mediaRecoderEventsHandler);
return;
}
if (currentSession == null) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is closed.", mediaRecoderEventsHandler);
return;
}
if (sessionOpening) {
reportUpdateMediaRecorderError(
"MediaRecorder update while camera is still opening.", mediaRecoderEventsHandler);
return;
}
this.mediaRecorderEventsHandler = mediaRecoderEventsHandler;
mediaRecorderState =
addMediaRecorder ? MediaRecorderState.IDLE_TO_ACTIVE : MediaRecorderState.ACTIVE_TO_IDLE;
Logging.d(TAG, "updateMediaRecoder: Stopping session");
cameraStatistics.release();
cameraStatistics = null;
final CameraSession oldSession = currentSession;
cameraThreadHandler.post(new Runnable() {
@Override
public void run() {
oldSession.stop();
}
});
currentSession = null;
sessionOpening = true;
openAttemptsRemaining = 1;
createSessionInternal(0, mediaRecorder);
}
Logging.d(TAG, "updateMediaRecoderInternal done");
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != cameraThreadHandler.getLooper().getThread()) {
Logging.e(TAG, "Check is on camera thread failed.");
throw new RuntimeException("Not on camera thread.");
}
}
protected String getCameraName() {
synchronized (stateLock) {
return cameraName;
}
}
abstract protected void createCameraSession(
CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events,
Context applicationContext, SurfaceTextureHelper surfaceTextureHelper,
MediaRecorder mediaRecoder, String cameraName, int width, int height, int framerate);
}

View File

@ -0,0 +1,42 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
interface CameraSession {
enum FailureType { ERROR, DISCONNECTED }
// Callbacks are fired on the camera thread.
public interface CreateSessionCallback {
void onDone(CameraSession session);
void onFailure(FailureType failureType, String error);
}
// Events are fired on the camera thread.
public interface Events {
void onCameraOpening();
void onCameraError(CameraSession session, String error);
void onCameraDisconnected(CameraSession session);
void onCameraClosed(CameraSession session);
void onFrameCaptured(CameraSession session, VideoFrame frame);
// The old way of passing frames. Will be removed eventually.
void onByteBufferFrameCaptured(
CameraSession session, byte[] data, int width, int height, int rotation, long timestamp);
void onTextureFrameCaptured(CameraSession session, int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp);
}
/**
* Stops the capture. Waits until no more calls to capture observer will be made.
* If waitCameraStop is true, also waits for the camera to stop.
*/
void stop();
}

View File

@ -0,0 +1,95 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the
* bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the
* target bitrate by unacceptable margins.
*/
class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
// Change the bitrate at most once every three seconds.
private static final double BITRATE_ADJUSTMENT_SEC = 3.0;
// Maximum bitrate adjustment scale - no more than 4 times.
private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4;
// Amount of adjustment steps to reach maximum scale.
private static final int BITRATE_ADJUSTMENT_STEPS = 20;
private static final double BITS_PER_BYTE = 8.0;
// How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
private double deviationBytes = 0;
private double timeSinceLastAdjustmentMs = 0;
private int bitrateAdjustmentScaleExp = 0;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
// Rescale the accumulator level if the accumulator max decreases
deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
}
super.setTargets(targetBitrateBps, targetFps);
}
@Override
public void reportEncodedFrame(int size) {
if (targetFps == 0) {
return;
}
// Accumulate the difference between actual and expected frame sizes.
double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFps;
deviationBytes += (size - expectedBytesPerFrame);
timeSinceLastAdjustmentMs += 1000.0 / targetFps;
// Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
// shortfall of the target.
double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE;
// Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for
// bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle.
double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes;
deviationBytes = Math.min(deviationBytes, deviationCap);
deviationBytes = Math.max(deviationBytes, -deviationCap);
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
// from the target value.
if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) {
return;
}
if (deviationBytes > deviationThresholdBytes) {
// Encoder generates too high bitrate - need to reduce the scale.
int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5);
bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
// Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS.
// This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS);
deviationBytes = deviationThresholdBytes;
} else if (deviationBytes < -deviationThresholdBytes) {
// Encoder generates too low bitrate - need to increase the scale.
int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5);
bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
// Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS.
// This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS);
deviationBytes = -deviationThresholdBytes;
}
timeSinceLastAdjustmentMs = 0;
}
@Override
public int getAdjustedBitrateBps() {
return (int) (targetBitrateBps
* Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE,
(double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS));
}
}

View File

@ -0,0 +1,313 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import android.view.SurfaceHolder;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
/**
* Holds EGL state and utility methods for handling an egl 1.0 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
class EglBase10 extends EglBase {
// This constant is taken from EGL14.EGL_CONTEXT_CLIENT_VERSION.
private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
private final EGL10 egl;
private EGLContext eglContext;
private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL10.EGL_NO_SURFACE;
// EGL wrapper for an actual EGLContext.
public static class Context extends EglBase.Context {
private final EGLContext eglContext;
public Context(EGLContext eglContext) {
this.eglContext = eglContext;
}
}
// Create a new context with the specified config type, sharing data with sharedContext.
public EglBase10(Context sharedContext, int[] configAttributes) {
this.egl = (EGL10) EGLContext.getEGL();
eglDisplay = getEglDisplay();
eglConfig = getEglConfig(eglDisplay, configAttributes);
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
}
@Override
public void createSurface(Surface surface) {
/**
* We have to wrap Surface in a SurfaceHolder because for some reason eglCreateWindowSurface
* couldn't actually take a Surface object until API 17. Older versions fortunately just call
* SurfaceHolder.getSurface(), so we'll do that. No other methods are relevant.
*/
class FakeSurfaceHolder implements SurfaceHolder {
private final Surface surface;
FakeSurfaceHolder(Surface surface) {
this.surface = surface;
}
@Override
public void addCallback(Callback callback) {}
@Override
public void removeCallback(Callback callback) {}
@Override
public boolean isCreating() {
return false;
}
@Deprecated
@Override
public void setType(int i) {}
@Override
public void setFixedSize(int i, int i2) {}
@Override
public void setSizeFromLayout() {}
@Override
public void setFormat(int i) {}
@Override
public void setKeepScreenOn(boolean b) {}
@Override
public Canvas lockCanvas() {
return null;
}
@Override
public Canvas lockCanvas(Rect rect) {
return null;
}
@Override
public void unlockCanvasAndPost(Canvas canvas) {}
@Override
public Rect getSurfaceFrame() {
return null;
}
@Override
public Surface getSurface() {
return surface;
}
}
createSurfaceInternal(new FakeSurfaceHolder(surface));
}
// Create EGLSurface from the Android SurfaceTexture.
@Override
public void createSurface(SurfaceTexture surfaceTexture) {
createSurfaceInternal(surfaceTexture);
}
// Create EGLSurface from either a SurfaceHolder or a SurfaceTexture.
private void createSurfaceInternal(Object nativeWindow) {
if (!(nativeWindow instanceof SurfaceHolder) && !(nativeWindow instanceof SurfaceTexture)) {
throw new IllegalStateException("Input must be either a SurfaceHolder or SurfaceTexture");
}
checkIsNotReleased();
if (eglSurface != EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL10.EGL_NONE};
eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, nativeWindow, surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException(
"Failed to create window surface: 0x" + Integer.toHexString(egl.eglGetError()));
}
}
// Create dummy 1x1 pixel buffer surface so the context can be made current.
@Override
public void createDummyPbufferSurface() {
createPbufferSurface(1, 1);
}
@Override
public void createPbufferSurface(int width, int height) {
checkIsNotReleased();
if (eglSurface != EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL10.EGL_WIDTH, width, EGL10.EGL_HEIGHT, height, EGL10.EGL_NONE};
eglSurface = egl.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs);
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
+ height + ": 0x" + Integer.toHexString(egl.eglGetError()));
}
}
@Override
public org.webrtc.EglBase.Context getEglBaseContext() {
return new EglBase10.Context(eglContext);
}
@Override
public boolean hasSurface() {
return eglSurface != EGL10.EGL_NO_SURFACE;
}
@Override
public int surfaceWidth() {
final int widthArray[] = new int[1];
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_WIDTH, widthArray);
return widthArray[0];
}
@Override
public int surfaceHeight() {
final int heightArray[] = new int[1];
egl.eglQuerySurface(eglDisplay, eglSurface, EGL10.EGL_HEIGHT, heightArray);
return heightArray[0];
}
@Override
public void releaseSurface() {
if (eglSurface != EGL10.EGL_NO_SURFACE) {
egl.eglDestroySurface(eglDisplay, eglSurface);
eglSurface = EGL10.EGL_NO_SURFACE;
}
}
private void checkIsNotReleased() {
if (eglDisplay == EGL10.EGL_NO_DISPLAY || eglContext == EGL10.EGL_NO_CONTEXT
|| eglConfig == null) {
throw new RuntimeException("This object has been released");
}
}
@Override
public void release() {
checkIsNotReleased();
releaseSurface();
detachCurrent();
egl.eglDestroyContext(eglDisplay, eglContext);
egl.eglTerminate(eglDisplay);
eglContext = EGL10.EGL_NO_CONTEXT;
eglDisplay = EGL10.EGL_NO_DISPLAY;
eglConfig = null;
}
@Override
public void makeCurrent() {
checkIsNotReleased();
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't make current");
}
synchronized (EglBase.lock) {
if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException(
"eglMakeCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
}
}
// Detach the current EGL context, so that it can be made current on another thread.
@Override
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!egl.eglMakeCurrent(
eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT)) {
throw new RuntimeException(
"eglDetachCurrent failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
}
}
@Override
public void swapBuffers() {
checkIsNotReleased();
if (eglSurface == EGL10.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
egl.eglSwapBuffers(eglDisplay, eglSurface);
}
}
@Override
public void swapBuffers(long timeStampNs) {
// Setting presentation time is not supported for EGL 1.0.
swapBuffers();
}
// Return an EGLDisplay, or die trying.
private EGLDisplay getEglDisplay() {
EGLDisplay eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
throw new RuntimeException(
"Unable to get EGL10 display: 0x" + Integer.toHexString(egl.eglGetError()));
}
int[] version = new int[2];
if (!egl.eglInitialize(eglDisplay, version)) {
throw new RuntimeException(
"Unable to initialize EGL10: 0x" + Integer.toHexString(egl.eglGetError()));
}
return eglDisplay;
}
// Return an EGLConfig, or die trying.
private EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!egl.eglChooseConfig(eglDisplay, configAttributes, configs, configs.length, numConfigs)) {
throw new RuntimeException(
"eglChooseConfig failed: 0x" + Integer.toHexString(egl.eglGetError()));
}
if (numConfigs[0] <= 0) {
throw new RuntimeException("Unable to find any matching EGL config");
}
final EGLConfig eglConfig = configs[0];
if (eglConfig == null) {
throw new RuntimeException("eglChooseConfig returned null");
}
return eglConfig;
}
// Return an EGLConfig, or die trying.
private EGLContext createEglContext(
Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
if (sharedContext != null && sharedContext.eglContext == EGL10.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}
int[] contextAttributes = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
EGLContext rootContext =
sharedContext == null ? EGL10.EGL_NO_CONTEXT : sharedContext.eglContext;
final EGLContext eglContext;
synchronized (EglBase.lock) {
eglContext = egl.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes);
}
if (eglContext == EGL10.EGL_NO_CONTEXT) {
throw new RuntimeException(
"Failed to create EGL context: 0x" + Integer.toHexString(egl.eglGetError()));
}
return eglContext;
}
}

View File

@ -0,0 +1,266 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLExt;
import android.opengl.EGLSurface;
import android.view.Surface;
/**
* Holds EGL state and utility methods for handling an EGL14 EGLContext, an EGLDisplay,
* and an EGLSurface.
*/
@TargetApi(18)
class EglBase14 extends EglBase {
private static final String TAG = "EglBase14";
private static final int EGLExt_SDK_VERSION = android.os.Build.VERSION_CODES.JELLY_BEAN_MR2;
private static final int CURRENT_SDK_VERSION = android.os.Build.VERSION.SDK_INT;
private EGLContext eglContext;
private EGLConfig eglConfig;
private EGLDisplay eglDisplay;
private EGLSurface eglSurface = EGL14.EGL_NO_SURFACE;
// EGL 1.4 is supported from API 17. But EGLExt that is used for setting presentation
// time stamp on a surface is supported from 18 so we require 18.
public static boolean isEGL14Supported() {
Logging.d(TAG,
"SDK version: " + CURRENT_SDK_VERSION
+ ". isEGL14Supported: " + (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION));
return (CURRENT_SDK_VERSION >= EGLExt_SDK_VERSION);
}
public static class Context extends EglBase.Context {
private final android.opengl.EGLContext egl14Context;
public Context(android.opengl.EGLContext eglContext) {
this.egl14Context = eglContext;
}
}
// Create a new context with the specified config type, sharing data with sharedContext.
// |sharedContext| may be null.
public EglBase14(EglBase14.Context sharedContext, int[] configAttributes) {
eglDisplay = getEglDisplay();
eglConfig = getEglConfig(eglDisplay, configAttributes);
eglContext = createEglContext(sharedContext, eglDisplay, eglConfig);
}
// Create EGLSurface from the Android Surface.
@Override
public void createSurface(Surface surface) {
createSurfaceInternal(surface);
}
// Create EGLSurface from the Android SurfaceTexture.
@Override
public void createSurface(SurfaceTexture surfaceTexture) {
createSurfaceInternal(surfaceTexture);
}
// Create EGLSurface from either Surface or SurfaceTexture.
private void createSurfaceInternal(Object surface) {
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture)) {
throw new IllegalStateException("Input must be either a Surface or SurfaceTexture");
}
checkIsNotReleased();
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_NONE};
eglSurface = EGL14.eglCreateWindowSurface(eglDisplay, eglConfig, surface, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException(
"Failed to create window surface: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
@Override
public void createDummyPbufferSurface() {
createPbufferSurface(1, 1);
}
@Override
public void createPbufferSurface(int width, int height) {
checkIsNotReleased();
if (eglSurface != EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Already has an EGLSurface");
}
int[] surfaceAttribs = {EGL14.EGL_WIDTH, width, EGL14.EGL_HEIGHT, height, EGL14.EGL_NONE};
eglSurface = EGL14.eglCreatePbufferSurface(eglDisplay, eglConfig, surfaceAttribs, 0);
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("Failed to create pixel buffer surface with size " + width + "x"
+ height + ": 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
@Override
public Context getEglBaseContext() {
return new EglBase14.Context(eglContext);
}
@Override
public boolean hasSurface() {
return eglSurface != EGL14.EGL_NO_SURFACE;
}
@Override
public int surfaceWidth() {
final int widthArray[] = new int[1];
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_WIDTH, widthArray, 0);
return widthArray[0];
}
@Override
public int surfaceHeight() {
final int heightArray[] = new int[1];
EGL14.eglQuerySurface(eglDisplay, eglSurface, EGL14.EGL_HEIGHT, heightArray, 0);
return heightArray[0];
}
@Override
public void releaseSurface() {
if (eglSurface != EGL14.EGL_NO_SURFACE) {
EGL14.eglDestroySurface(eglDisplay, eglSurface);
eglSurface = EGL14.EGL_NO_SURFACE;
}
}
private void checkIsNotReleased() {
if (eglDisplay == EGL14.EGL_NO_DISPLAY || eglContext == EGL14.EGL_NO_CONTEXT
|| eglConfig == null) {
throw new RuntimeException("This object has been released");
}
}
@Override
public void release() {
checkIsNotReleased();
releaseSurface();
detachCurrent();
EGL14.eglDestroyContext(eglDisplay, eglContext);
EGL14.eglReleaseThread();
EGL14.eglTerminate(eglDisplay);
eglContext = EGL14.EGL_NO_CONTEXT;
eglDisplay = EGL14.EGL_NO_DISPLAY;
eglConfig = null;
}
@Override
public void makeCurrent() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't make current");
}
synchronized (EglBase.lock) {
if (!EGL14.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
throw new RuntimeException(
"eglMakeCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
}
// Detach the current EGL context, so that it can be made current on another thread.
@Override
public void detachCurrent() {
synchronized (EglBase.lock) {
if (!EGL14.eglMakeCurrent(
eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
throw new RuntimeException(
"eglDetachCurrent failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
}
}
@Override
public void swapBuffers() {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
}
@Override
public void swapBuffers(long timeStampNs) {
checkIsNotReleased();
if (eglSurface == EGL14.EGL_NO_SURFACE) {
throw new RuntimeException("No EGLSurface - can't swap buffers");
}
synchronized (EglBase.lock) {
// See
// https://android.googlesource.com/platform/frameworks/native/+/tools_r22.2/opengl/specs/EGL_ANDROID_presentation_time.txt
EGLExt.eglPresentationTimeANDROID(eglDisplay, eglSurface, timeStampNs);
EGL14.eglSwapBuffers(eglDisplay, eglSurface);
}
}
// Return an EGLDisplay, or die trying.
private static EGLDisplay getEglDisplay() {
EGLDisplay eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (eglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException(
"Unable to get EGL14 display: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
int[] version = new int[2];
if (!EGL14.eglInitialize(eglDisplay, version, 0, version, 1)) {
throw new RuntimeException(
"Unable to initialize EGL14: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
return eglDisplay;
}
// Return an EGLConfig, or die trying.
private static EGLConfig getEglConfig(EGLDisplay eglDisplay, int[] configAttributes) {
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(
eglDisplay, configAttributes, 0, configs, 0, configs.length, numConfigs, 0)) {
throw new RuntimeException(
"eglChooseConfig failed: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
if (numConfigs[0] <= 0) {
throw new RuntimeException("Unable to find any matching EGL config");
}
final EGLConfig eglConfig = configs[0];
if (eglConfig == null) {
throw new RuntimeException("eglChooseConfig returned null");
}
return eglConfig;
}
// Return an EGLConfig, or die trying.
private static EGLContext createEglContext(
EglBase14.Context sharedContext, EGLDisplay eglDisplay, EGLConfig eglConfig) {
if (sharedContext != null && sharedContext.egl14Context == EGL14.EGL_NO_CONTEXT) {
throw new RuntimeException("Invalid sharedContext");
}
int[] contextAttributes = {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
EGLContext rootContext =
sharedContext == null ? EGL14.EGL_NO_CONTEXT : sharedContext.egl14Context;
final EGLContext eglContext;
synchronized (EglBase.lock) {
eglContext = EGL14.eglCreateContext(eglDisplay, eglConfig, rootContext, contextAttributes, 0);
}
if (eglContext == EGL14.EGL_NO_CONTEXT) {
throw new RuntimeException(
"Failed to create EGL context: 0x" + Integer.toHexString(EGL14.eglGetError()));
}
return eglContext;
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with
* hardware codecs that assume the framerate never changes.
*/
class FramerateBitrateAdjuster extends BaseBitrateAdjuster {
private static final int INITIAL_FPS = 30;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
if (this.targetFps == 0) {
// Framerate-based bitrate adjustment always initializes to the same framerate.
targetFps = INITIAL_FPS;
}
super.setTargets(targetBitrateBps, targetFps);
this.targetBitrateBps *= INITIAL_FPS / this.targetFps;
}
}

View File

@ -0,0 +1,700 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaFormat;
import android.os.SystemClock;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.webrtc.ThreadUtils.ThreadChecker;
/** Android hardware video decoder. */
@TargetApi(16)
@SuppressWarnings("deprecation") // Cannot support API 16 without using deprecated methods.
class HardwareVideoDecoder
implements VideoDecoder, SurfaceTextureHelper.OnTextureFrameAvailableListener {
private static final String TAG = "HardwareVideoDecoder";
// TODO(magjed): Use MediaFormat.KEY_* constants when part of the public API.
private static final String MEDIA_FORMAT_KEY_STRIDE = "stride";
private static final String MEDIA_FORMAT_KEY_SLICE_HEIGHT = "slice-height";
private static final String MEDIA_FORMAT_KEY_CROP_LEFT = "crop-left";
private static final String MEDIA_FORMAT_KEY_CROP_RIGHT = "crop-right";
private static final String MEDIA_FORMAT_KEY_CROP_TOP = "crop-top";
private static final String MEDIA_FORMAT_KEY_CROP_BOTTOM = "crop-bottom";
// MediaCodec.release() occasionally hangs. Release stops waiting and reports failure after
// this timeout.
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
// WebRTC queues input frames quickly in the beginning on the call. Wait for input buffers with a
// long timeout (500 ms) to prevent this from causing the codec to return an error.
private static final int DEQUEUE_INPUT_TIMEOUT_US = 500000;
// Dequeuing an output buffer will block until a buffer is available (up to 100 milliseconds).
// If this timeout is exceeded, the output thread will unblock and check if the decoder is still
// running. If it is, it will block on dequeue again. Otherwise, it will stop and release the
// MediaCodec.
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
private final String codecName;
private final VideoCodecType codecType;
private static class FrameInfo {
final long decodeStartTimeMs;
final int rotation;
FrameInfo(long decodeStartTimeMs, int rotation) {
this.decodeStartTimeMs = decodeStartTimeMs;
this.rotation = rotation;
}
}
private final BlockingDeque<FrameInfo> frameInfos;
private int colorFormat;
// Output thread runs a loop which polls MediaCodec for decoded output buffers. It reformats
// those buffers into VideoFrames and delivers them to the callback. Variable is set on decoder
// thread and is immutable while the codec is running.
private Thread outputThread;
// Checker that ensures work is run on the output thread.
private ThreadChecker outputThreadChecker;
// Checker that ensures work is run on the decoder thread. The decoder thread is owned by the
// caller and must be used to call initDecode, decode, and release.
private ThreadChecker decoderThreadChecker;
private volatile boolean running = false;
private volatile Exception shutdownException = null;
// Prevents the decoder from being released before all output buffers have been released.
private final Object activeOutputBuffersLock = new Object();
private int activeOutputBuffers = 0; // Guarded by activeOutputBuffersLock
// Dimensions (width, height, stride, and sliceHeight) may be accessed by either the decode thread
// or the output thread. Accesses should be protected with this lock.
private final Object dimensionLock = new Object();
private int width;
private int height;
private int stride;
private int sliceHeight;
// Whether the decoder has finished the first frame. The codec may not change output dimensions
// after delivering the first frame. Only accessed on the output thread while the decoder is
// running.
private boolean hasDecodedFirstFrame;
// Whether the decoder has seen a key frame. The first frame must be a key frame. Only accessed
// on the decoder thread.
private boolean keyFrameRequired;
private final EglBase.Context sharedContext;
// Valid and immutable while the decoder is running.
private SurfaceTextureHelper surfaceTextureHelper;
private Surface surface = null;
private static class DecodedTextureMetadata {
final int width;
final int height;
final int rotation;
final long presentationTimestampUs;
final Integer decodeTimeMs;
DecodedTextureMetadata(
int width, int height, int rotation, long presentationTimestampUs, Integer decodeTimeMs) {
this.width = width;
this.height = height;
this.rotation = rotation;
this.presentationTimestampUs = presentationTimestampUs;
this.decodeTimeMs = decodeTimeMs;
}
}
// Metadata for the last frame rendered to the texture. Only accessed on the texture helper's
// thread.
private DecodedTextureMetadata renderedTextureMetadata;
// Decoding proceeds asynchronously. This callback returns decoded frames to the caller. Valid
// and immutable while the decoder is running.
private Callback callback;
// Valid and immutable while the decoder is running.
private MediaCodec codec = null;
HardwareVideoDecoder(
String codecName, VideoCodecType codecType, int colorFormat, EglBase.Context sharedContext) {
if (!isSupportedColorFormat(colorFormat)) {
throw new IllegalArgumentException("Unsupported color format: " + colorFormat);
}
this.codecName = codecName;
this.codecType = codecType;
this.colorFormat = colorFormat;
this.sharedContext = sharedContext;
this.frameInfos = new LinkedBlockingDeque<>();
}
@Override
public VideoCodecStatus initDecode(Settings settings, Callback callback) {
this.decoderThreadChecker = new ThreadChecker();
this.callback = callback;
if (sharedContext != null) {
surfaceTextureHelper = SurfaceTextureHelper.create("decoder-texture-thread", sharedContext);
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
surfaceTextureHelper.startListening(this);
}
return initDecodeInternal(settings.width, settings.height);
}
// Internal variant is used when restarting the codec due to reconfiguration.
private VideoCodecStatus initDecodeInternal(int width, int height) {
decoderThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "initDecodeInternal");
if (outputThread != null) {
Logging.e(TAG, "initDecodeInternal called while the codec is already running");
return VideoCodecStatus.ERROR;
}
// Note: it is not necessary to initialize dimensions under the lock, since the output thread
// is not running.
this.width = width;
this.height = height;
stride = width;
sliceHeight = height;
hasDecodedFirstFrame = false;
keyFrameRequired = true;
try {
codec = MediaCodec.createByCodecName(codecName);
} catch (IOException | IllegalArgumentException e) {
Logging.e(TAG, "Cannot create media decoder " + codecName);
return VideoCodecStatus.ERROR;
}
try {
MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
if (sharedContext == null) {
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
}
codec.configure(format, surface, null, 0);
codec.start();
} catch (IllegalStateException e) {
Logging.e(TAG, "initDecode failed", e);
release();
return VideoCodecStatus.ERROR;
}
running = true;
outputThread = createOutputThread();
outputThread.start();
Logging.d(TAG, "initDecodeInternal done");
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus decode(EncodedImage frame, DecodeInfo info) {
decoderThreadChecker.checkIsOnValidThread();
if (codec == null || callback == null) {
Logging.d(TAG, "decode uninitalized, codec: " + codec + ", callback: " + callback);
return VideoCodecStatus.UNINITIALIZED;
}
if (frame.buffer == null) {
Logging.e(TAG, "decode() - no input data");
return VideoCodecStatus.ERR_PARAMETER;
}
int size = frame.buffer.remaining();
if (size == 0) {
Logging.e(TAG, "decode() - input buffer empty");
return VideoCodecStatus.ERR_PARAMETER;
}
// Load dimensions from shared memory under the dimension lock.
int width, height;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
}
// Check if the resolution changed and reset the codec if necessary.
if (frame.encodedWidth * frame.encodedHeight > 0
&& (frame.encodedWidth != width || frame.encodedHeight != height)) {
VideoCodecStatus status = reinitDecode(frame.encodedWidth, frame.encodedHeight);
if (status != VideoCodecStatus.OK) {
return status;
}
}
if (keyFrameRequired) {
// Need to process a key frame first.
if (frame.frameType != EncodedImage.FrameType.VideoFrameKey) {
Logging.e(TAG, "decode() - key frame required first");
return VideoCodecStatus.ERROR;
}
if (!frame.completeFrame) {
Logging.e(TAG, "decode() - complete frame required first");
return VideoCodecStatus.ERROR;
}
}
int index;
try {
index = codec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT_US);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueInputBuffer failed", e);
return VideoCodecStatus.ERROR;
}
if (index < 0) {
// Decoder is falling behind. No input buffers available.
// The decoder can't simply drop frames; it might lose a key frame.
Logging.e(TAG, "decode() - no HW buffers available; decoder falling behind");
return VideoCodecStatus.ERROR;
}
ByteBuffer buffer;
try {
buffer = codec.getInputBuffers()[index];
} catch (IllegalStateException e) {
Logging.e(TAG, "getInputBuffers failed", e);
return VideoCodecStatus.ERROR;
}
if (buffer.capacity() < size) {
Logging.e(TAG, "decode() - HW buffer too small");
return VideoCodecStatus.ERROR;
}
buffer.put(frame.buffer);
frameInfos.offer(new FrameInfo(SystemClock.elapsedRealtime(), frame.rotation));
try {
codec.queueInputBuffer(index, 0 /* offset */, size,
TimeUnit.NANOSECONDS.toMicros(frame.captureTimeNs), 0 /* flags */);
} catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e);
frameInfos.pollLast();
return VideoCodecStatus.ERROR;
}
if (keyFrameRequired) {
keyFrameRequired = false;
}
return VideoCodecStatus.OK;
}
@Override
public boolean getPrefersLateDecoding() {
return true;
}
@Override
public String getImplementationName() {
return "HardwareVideoDecoder: " + codecName;
}
@Override
public VideoCodecStatus release() {
// TODO(sakal): This is not called on the correct thread but is still called synchronously.
// Re-enable the check once this is called on the correct thread.
// decoderThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "release");
VideoCodecStatus status = releaseInternal();
if (surface != null) {
surface.release();
surface = null;
surfaceTextureHelper.stopListening();
surfaceTextureHelper.dispose();
surfaceTextureHelper = null;
}
callback = null;
frameInfos.clear();
return status;
}
// Internal variant is used when restarting the codec due to reconfiguration.
private VideoCodecStatus releaseInternal() {
if (!running) {
Logging.d(TAG, "release: Decoder is not running.");
return VideoCodecStatus.OK;
}
try {
// The outputThread actually stops and releases the codec once running is false.
running = false;
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
// Log an exception to capture the stack trace and turn it into a TIMEOUT error.
Logging.e(TAG, "Media decoder release timeout", new RuntimeException());
return VideoCodecStatus.TIMEOUT;
}
if (shutdownException != null) {
// Log the exception and turn it into an error. Wrap the exception in a new exception to
// capture both the output thread's stack trace and this thread's stack trace.
Logging.e(TAG, "Media decoder release error", new RuntimeException(shutdownException));
shutdownException = null;
return VideoCodecStatus.ERROR;
}
} finally {
codec = null;
outputThread = null;
}
return VideoCodecStatus.OK;
}
private VideoCodecStatus reinitDecode(int newWidth, int newHeight) {
decoderThreadChecker.checkIsOnValidThread();
VideoCodecStatus status = releaseInternal();
if (status != VideoCodecStatus.OK) {
return status;
}
return initDecodeInternal(newWidth, newHeight);
}
private Thread createOutputThread() {
return new Thread("HardwareVideoDecoder.outputThread") {
@Override
public void run() {
outputThreadChecker = new ThreadChecker();
while (running) {
deliverDecodedFrame();
}
releaseCodecOnOutputThread();
}
};
}
private void deliverDecodedFrame() {
outputThreadChecker.checkIsOnValidThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
// Block until an output buffer is available (up to 100 milliseconds). If the timeout is
// exceeded, deliverDecodedFrame() will be called again on the next iteration of the output
// thread's loop. Blocking here prevents the output thread from busy-waiting while the codec
// is idle.
int result = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
reformat(codec.getOutputFormat());
return;
}
if (result < 0) {
Logging.v(TAG, "dequeueOutputBuffer returned " + result);
return;
}
FrameInfo frameInfo = frameInfos.poll();
Integer decodeTimeMs = null;
int rotation = 0;
if (frameInfo != null) {
decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs);
rotation = frameInfo.rotation;
}
hasDecodedFirstFrame = true;
if (surfaceTextureHelper != null) {
deliverTextureFrame(result, info, rotation, decodeTimeMs);
} else {
deliverByteFrame(result, info, rotation, decodeTimeMs);
}
} catch (IllegalStateException e) {
Logging.e(TAG, "deliverDecodedFrame failed", e);
}
}
private void deliverTextureFrame(final int index, final MediaCodec.BufferInfo info,
final int rotation, final Integer decodeTimeMs) {
// Load dimensions from shared memory under the dimension lock.
final int width, height;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
}
surfaceTextureHelper.getHandler().post(new Runnable() {
@Override
public void run() {
renderedTextureMetadata = new DecodedTextureMetadata(
width, height, rotation, info.presentationTimeUs, decodeTimeMs);
codec.releaseOutputBuffer(index, true);
}
});
}
@Override
public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
VideoFrame.TextureBuffer oesBuffer = surfaceTextureHelper.createTextureBuffer(
renderedTextureMetadata.width, renderedTextureMetadata.height,
RendererCommon.convertMatrixToAndroidGraphicsMatrix(transformMatrix));
VideoFrame frame = new VideoFrame(oesBuffer, renderedTextureMetadata.rotation,
renderedTextureMetadata.presentationTimestampUs * 1000);
callback.onDecodedFrame(frame, renderedTextureMetadata.decodeTimeMs, null /* qp */);
frame.release();
}
private void deliverByteFrame(
int result, MediaCodec.BufferInfo info, int rotation, Integer decodeTimeMs) {
// Load dimensions from shared memory under the dimension lock.
int width, height, stride, sliceHeight;
synchronized (dimensionLock) {
width = this.width;
height = this.height;
stride = this.stride;
sliceHeight = this.sliceHeight;
}
// Output must be at least width * height bytes for Y channel, plus (width / 2) * (height / 2)
// bytes for each of the U and V channels.
if (info.size < width * height * 3 / 2) {
Logging.e(TAG, "Insufficient output buffer size: " + info.size);
return;
}
if (info.size < stride * height * 3 / 2 && sliceHeight == height && stride > width) {
// Some codecs (Exynos) report an incorrect stride. Correct it here.
// Expected size == stride * height * 3 / 2. A bit of algebra gives the correct stride as
// 2 * size / (3 * height).
stride = info.size * 2 / (height * 3);
}
ByteBuffer buffer = codec.getOutputBuffers()[result];
buffer.position(info.offset);
buffer.limit(info.offset + info.size);
buffer = buffer.slice();
final VideoFrame.Buffer frameBuffer;
if (colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
if (sliceHeight % 2 == 0) {
frameBuffer = wrapI420Buffer(buffer, result, stride, sliceHeight, width, height);
} else {
// WebRTC rounds chroma plane size conversions up so we have to repeat the last row.
frameBuffer = copyI420Buffer(buffer, result, stride, sliceHeight, width, height);
}
} else {
// All other supported color formats are NV12.
frameBuffer = wrapNV12Buffer(buffer, result, stride, sliceHeight, width, height);
}
long presentationTimeNs = info.presentationTimeUs * 1000;
VideoFrame frame = new VideoFrame(frameBuffer, rotation, presentationTimeNs);
// Note that qp is parsed on the C++ side.
callback.onDecodedFrame(frame, decodeTimeMs, null /* qp */);
frame.release();
}
private VideoFrame.Buffer wrapNV12Buffer(ByteBuffer buffer, int outputBufferIndex, int stride,
int sliceHeight, int width, int height) {
synchronized (activeOutputBuffersLock) {
activeOutputBuffers++;
}
return new NV12Buffer(width, height, stride, sliceHeight, buffer, () -> {
codec.releaseOutputBuffer(outputBufferIndex, false);
synchronized (activeOutputBuffersLock) {
activeOutputBuffers--;
activeOutputBuffersLock.notifyAll();
}
});
}
private VideoFrame.Buffer copyI420Buffer(ByteBuffer buffer, int outputBufferIndex, int stride,
int sliceHeight, int width, int height) {
final int uvStride = stride / 2;
final int yPos = 0;
final int uPos = yPos + stride * sliceHeight;
final int uEnd = uPos + uvStride * (sliceHeight / 2);
final int vPos = uPos + uvStride * sliceHeight / 2;
final int vEnd = vPos + uvStride * (sliceHeight / 2);
VideoFrame.I420Buffer frameBuffer = I420BufferImpl.allocate(width, height);
ByteBuffer dataY = frameBuffer.getDataY();
dataY.position(0); // Ensure we are in the beginning.
buffer.position(yPos);
buffer.limit(uPos);
dataY.put(buffer);
dataY.position(0); // Go back to beginning.
ByteBuffer dataU = frameBuffer.getDataU();
dataU.position(0); // Ensure we are in the beginning.
buffer.position(uPos);
buffer.limit(uEnd);
dataU.put(buffer);
if (sliceHeight % 2 != 0) {
buffer.position(uEnd - uvStride); // Repeat the last row.
dataU.put(buffer);
}
dataU.position(0); // Go back to beginning.
ByteBuffer dataV = frameBuffer.getDataU();
dataV.position(0); // Ensure we are in the beginning.
buffer.position(vPos);
buffer.limit(vEnd);
dataV.put(buffer);
if (sliceHeight % 2 != 0) {
buffer.position(vEnd - uvStride); // Repeat the last row.
dataV.put(buffer);
}
dataV.position(0); // Go back to beginning.
codec.releaseOutputBuffer(outputBufferIndex, false);
return frameBuffer;
}
private VideoFrame.Buffer wrapI420Buffer(ByteBuffer buffer, int outputBufferIndex, int stride,
int sliceHeight, int width, int height) {
final int uvStride = stride / 2;
final int yPos = 0;
final int uPos = yPos + stride * sliceHeight;
final int uEnd = uPos + uvStride * (sliceHeight / 2);
final int vPos = uPos + uvStride * sliceHeight / 2;
final int vEnd = vPos + uvStride * (sliceHeight / 2);
synchronized (activeOutputBuffersLock) {
activeOutputBuffers++;
}
Runnable releaseCallback = () -> {
codec.releaseOutputBuffer(outputBufferIndex, false);
synchronized (activeOutputBuffersLock) {
activeOutputBuffers--;
activeOutputBuffersLock.notifyAll();
}
};
buffer.position(yPos);
buffer.limit(uPos);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(uEnd);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vEnd);
ByteBuffer dataV = buffer.slice();
return new I420BufferImpl(
width, height, dataY, stride, dataU, uvStride, dataV, uvStride, releaseCallback);
}
private void reformat(MediaFormat format) {
outputThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "Decoder format changed: " + format.toString());
final int newWidth;
final int newHeight;
if (format.containsKey(MEDIA_FORMAT_KEY_CROP_LEFT)
&& format.containsKey(MEDIA_FORMAT_KEY_CROP_RIGHT)
&& format.containsKey(MEDIA_FORMAT_KEY_CROP_BOTTOM)
&& format.containsKey(MEDIA_FORMAT_KEY_CROP_TOP)) {
newWidth = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_RIGHT)
- format.getInteger(MEDIA_FORMAT_KEY_CROP_LEFT);
newHeight = 1 + format.getInteger(MEDIA_FORMAT_KEY_CROP_BOTTOM)
- format.getInteger(MEDIA_FORMAT_KEY_CROP_TOP);
} else {
newWidth = format.getInteger(MediaFormat.KEY_WIDTH);
newHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
}
// Compare to existing width, height, and save values under the dimension lock.
synchronized (dimensionLock) {
if (hasDecodedFirstFrame && (width != newWidth || height != newHeight)) {
stopOnOutputThread(new RuntimeException("Unexpected size change. Configured " + width + "*"
+ height + ". New " + newWidth + "*" + newHeight));
return;
}
width = newWidth;
height = newHeight;
}
// Note: texture mode ignores colorFormat. Hence, if the texture helper is non-null, skip
// color format updates.
if (surfaceTextureHelper == null && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
if (!isSupportedColorFormat(colorFormat)) {
stopOnOutputThread(new IllegalStateException("Unsupported color format: " + colorFormat));
return;
}
}
// Save stride and sliceHeight under the dimension lock.
synchronized (dimensionLock) {
if (format.containsKey(MEDIA_FORMAT_KEY_STRIDE)) {
stride = format.getInteger(MEDIA_FORMAT_KEY_STRIDE);
}
if (format.containsKey(MEDIA_FORMAT_KEY_SLICE_HEIGHT)) {
sliceHeight = format.getInteger(MEDIA_FORMAT_KEY_SLICE_HEIGHT);
}
Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
}
}
private void releaseCodecOnOutputThread() {
outputThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "Releasing MediaCodec on output thread");
waitOutputBuffersReleasedOnOutputThread();
try {
codec.stop();
} catch (Exception e) {
Logging.e(TAG, "Media decoder stop failed", e);
}
try {
codec.release();
} catch (Exception e) {
Logging.e(TAG, "Media decoder release failed", e);
// Propagate exceptions caught during release back to the main thread.
shutdownException = e;
}
Logging.d(TAG, "Release on output thread done");
}
private void waitOutputBuffersReleasedOnOutputThread() {
outputThreadChecker.checkIsOnValidThread();
synchronized (activeOutputBuffersLock) {
while (activeOutputBuffers > 0) {
Logging.d(TAG, "Waiting for all frames to be released.");
try {
activeOutputBuffersLock.wait();
} catch (InterruptedException e) {
Logging.e(TAG, "Interrupted while waiting for output buffers to be released.", e);
return;
}
}
}
}
private void stopOnOutputThread(Exception e) {
outputThreadChecker.checkIsOnValidThread();
running = false;
shutdownException = e;
}
private boolean isSupportedColorFormat(int colorFormat) {
for (int supported : MediaCodecUtils.DECODER_COLOR_FORMATS) {
if (supported == colorFormat) {
return true;
}
}
return false;
}
}

View File

@ -0,0 +1,581 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.graphics.Matrix;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.opengl.GLES20;
import android.os.Bundle;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Deque;
import java.util.Map;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
/** Android hardware video encoder. */
@TargetApi(19)
@SuppressWarnings("deprecation") // Cannot support API level 19 without using deprecated methods.
class HardwareVideoEncoder implements VideoEncoder {
private static final String TAG = "HardwareVideoEncoder";
// Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
// in OMX_Video.h
private static final int VIDEO_ControlRateConstant = 2;
// Key associated with the bitrate control mode value (above). Not present as a MediaFormat
// constant until API level 21.
private static final String KEY_BITRATE_MODE = "bitrate-mode";
private static final int VIDEO_AVC_PROFILE_HIGH = 8;
private static final int VIDEO_AVC_LEVEL_3 = 0x100;
private static final int MAX_VIDEO_FRAMERATE = 30;
// See MAX_ENCODER_Q_SIZE in androidmediaencoder_jni.cc.
private static final int MAX_ENCODER_Q_SIZE = 2;
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
private final String codecName;
private final VideoCodecType codecType;
private final int colorFormat;
private final Map<String, String> params;
private final ColorFormat inputColorFormat;
// Base interval for generating key frames.
private final int keyFrameIntervalSec;
// Interval at which to force a key frame. Used to reduce color distortions caused by some
// Qualcomm video encoders.
private final long forcedKeyFrameNs;
// Presentation timestamp of the last requested (or forced) key frame.
private long lastKeyFrameNs;
private final BitrateAdjuster bitrateAdjuster;
private int adjustedBitrate;
// A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
// pre-populated with all the information that can't be sent through MediaCodec.
private final Deque<EncodedImage.Builder> outputBuilders;
// Thread that delivers encoded frames to the user callback.
private Thread outputThread;
// Whether the encoder is running. Volatile so that the output thread can watch this value and
// exit when the encoder stops.
private volatile boolean running = false;
// Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
// value to send exceptions thrown during release back to the encoder thread.
private volatile Exception shutdownException = null;
// Surface objects for texture-mode encoding.
// EGL context shared with the application. Used to access texture inputs.
private EglBase14.Context textureContext;
// EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
// input surface. Making this base current allows textures from the context to be drawn onto the
// surface.
private EglBase14 textureEglBase;
// Input surface for the codec. The encoder will draw input textures onto this surface.
private Surface textureInputSurface;
// Drawer used to draw input textures onto the codec's input surface.
private GlRectDrawer textureDrawer;
private MediaCodec codec;
private Callback callback;
private boolean automaticResizeOn;
private int width;
private int height;
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
private ByteBuffer configBuffer = null;
/**
* Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
* intervals, and bitrateAdjuster.
*
* @param codecName the hardware codec implementation to use
* @param codecType the type of the given video codec (eg. VP8, VP9, or H264)
* @param colorFormat color format used by the input buffer
* @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
* @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
* used to reduce distortion caused by some codec implementations
* @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
* desired bitrates
* @throws IllegalArgumentException if colorFormat is unsupported
*/
public HardwareVideoEncoder(String codecName, VideoCodecType codecType, int colorFormat,
Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
BitrateAdjuster bitrateAdjuster, EglBase14.Context textureContext) {
this.codecName = codecName;
this.codecType = codecType;
this.colorFormat = colorFormat;
this.params = params;
if (textureContext == null) {
this.inputColorFormat = ColorFormat.valueOf(colorFormat);
} else {
// ColorFormat copies bytes between buffers. It is not used in texture mode.
this.inputColorFormat = null;
}
this.keyFrameIntervalSec = keyFrameIntervalSec;
this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
this.bitrateAdjuster = bitrateAdjuster;
this.outputBuilders = new LinkedBlockingDeque<>();
this.textureContext = textureContext;
}
@Override
public VideoCodecStatus initEncode(Settings settings, Callback callback) {
automaticResizeOn = settings.automaticResizeOn;
return initEncodeInternal(
settings.width, settings.height, settings.startBitrate, settings.maxFramerate, callback);
}
private VideoCodecStatus initEncodeInternal(
int width, int height, int bitrateKbps, int fps, Callback callback) {
Logging.d(
TAG, "initEncode: " + width + " x " + height + ". @ " + bitrateKbps + "kbps. Fps: " + fps);
this.width = width;
this.height = height;
if (bitrateKbps != 0 && fps != 0) {
bitrateAdjuster.setTargets(bitrateKbps * 1000, fps);
}
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
this.callback = callback;
lastKeyFrameNs = -1;
try {
codec = MediaCodec.createByCodecName(codecName);
} catch (IOException | IllegalArgumentException e) {
Logging.e(TAG, "Cannot create media encoder " + codecName);
return VideoCodecStatus.ERROR;
}
try {
MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getAdjustedFramerate());
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
if (codecType == VideoCodecType.H264) {
String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID);
if (profileLevelId == null) {
profileLevelId = VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1;
}
switch (profileLevelId) {
case VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1:
format.setInteger("profile", VIDEO_AVC_PROFILE_HIGH);
format.setInteger("level", VIDEO_AVC_LEVEL_3);
break;
case VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1:
break;
default:
Logging.w(TAG, "Unknown profile level id: " + profileLevelId);
}
}
Logging.d(TAG, "Format: " + format);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (textureContext != null) {
// Texture mode.
textureEglBase = new EglBase14(textureContext, EglBase.CONFIG_RECORDABLE);
textureInputSurface = codec.createInputSurface();
textureEglBase.createSurface(textureInputSurface);
textureDrawer = new GlRectDrawer();
}
codec.start();
} catch (IllegalStateException e) {
Logging.e(TAG, "initEncode failed", e);
release();
return VideoCodecStatus.ERROR;
}
running = true;
outputThread = createOutputThread();
outputThread.start();
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus release() {
try {
if (outputThread == null) {
return VideoCodecStatus.OK;
}
// The outputThread actually stops and releases the codec once running is false.
running = false;
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media encoder release timeout");
return VideoCodecStatus.TIMEOUT;
}
if (shutdownException != null) {
// Log the exception and turn it into an error.
Logging.e(TAG, "Media encoder release exception", shutdownException);
return VideoCodecStatus.ERROR;
}
} finally {
codec = null;
outputThread = null;
outputBuilders.clear();
if (textureDrawer != null) {
textureDrawer.release();
textureDrawer = null;
}
if (textureEglBase != null) {
textureEglBase.release();
textureEglBase = null;
}
if (textureInputSurface != null) {
textureInputSurface.release();
textureInputSurface = null;
}
}
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
if (codec == null) {
return VideoCodecStatus.UNINITIALIZED;
}
// If input resolution changed, restart the codec with the new resolution.
int frameWidth = videoFrame.getBuffer().getWidth();
int frameHeight = videoFrame.getBuffer().getHeight();
if (frameWidth != width || frameHeight != height) {
VideoCodecStatus status = resetCodec(frameWidth, frameHeight);
if (status != VideoCodecStatus.OK) {
return status;
}
}
if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
// Too many frames in the encoder. Drop this frame.
Logging.e(TAG, "Dropped frame, encoder queue full");
return VideoCodecStatus.OK; // See webrtc bug 2887.
}
boolean requestedKeyFrame = false;
for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
if (frameType == EncodedImage.FrameType.VideoFrameKey) {
requestedKeyFrame = true;
}
}
if (requestedKeyFrame || shouldForceKeyFrame(videoFrame.getTimestampNs())) {
requestKeyFrame(videoFrame.getTimestampNs());
}
VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
// Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
// subsampled at one byte per four pixels.
int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
EncodedImage.Builder builder = EncodedImage.builder()
.setCaptureTimeNs(videoFrame.getTimestampNs())
.setCompleteFrame(true)
.setEncodedWidth(videoFrame.getBuffer().getWidth())
.setEncodedHeight(videoFrame.getBuffer().getHeight())
.setRotation(videoFrame.getRotation());
outputBuilders.offer(builder);
if (textureContext != null) {
if (!(videoFrameBuffer instanceof VideoFrame.TextureBuffer)) {
Logging.e(TAG, "Cannot encode non-texture buffer in texture mode");
return VideoCodecStatus.ERROR;
}
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) videoFrameBuffer;
return encodeTextureBuffer(videoFrame, textureBuffer);
} else {
if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) {
Logging.w(TAG, "Encoding texture buffer in byte mode; this may be inefficient");
}
return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
}
}
private VideoCodecStatus encodeTextureBuffer(
VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) {
Matrix matrix = textureBuffer.getTransformMatrix();
float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix);
try {
textureEglBase.makeCurrent();
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
switch (textureBuffer.getType()) {
case OES:
textureDrawer.drawOes(textureBuffer.getTextureId(), transformationMatrix, width, height,
0, 0, width, height);
break;
case RGB:
textureDrawer.drawRgb(textureBuffer.getTextureId(), transformationMatrix, width, height,
0, 0, width, height);
break;
}
textureEglBase.swapBuffers(videoFrame.getTimestampNs());
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
return VideoCodecStatus.ERROR;
}
return VideoCodecStatus.OK;
}
private VideoCodecStatus encodeByteBuffer(
VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
// Frame timestamp rounded to the nearest microsecond.
long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
// No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
int index;
try {
index = codec.dequeueInputBuffer(0 /* timeout */);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueInputBuffer failed", e);
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
if (index == -1) {
// Encoder is falling behind. No input buffers available. Drop the frame.
Logging.e(TAG, "Dropped frame, no input buffers available");
return VideoCodecStatus.OK; // See webrtc bug 2887.
}
ByteBuffer buffer;
try {
buffer = codec.getInputBuffers()[index];
} catch (IllegalStateException e) {
Logging.e(TAG, "getInputBuffers failed", e);
return VideoCodecStatus.ERROR;
}
VideoFrame.I420Buffer i420 = videoFrameBuffer.toI420();
inputColorFormat.fillBufferFromI420(buffer, i420);
i420.release();
try {
codec.queueInputBuffer(
index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
} catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e);
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
// IllegalStateException thrown when the codec is in the wrong state.
return VideoCodecStatus.ERROR;
}
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripTimeMs) {
// No op.
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
if (framerate > MAX_VIDEO_FRAMERATE) {
framerate = MAX_VIDEO_FRAMERATE;
}
bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
return updateBitrate();
}
@Override
public ScalingSettings getScalingSettings() {
return new ScalingSettings(automaticResizeOn);
}
@Override
public String getImplementationName() {
return "HardwareVideoEncoder: " + codecName;
}
private VideoCodecStatus resetCodec(int newWidth, int newHeight) {
VideoCodecStatus status = release();
if (status != VideoCodecStatus.OK) {
return status;
}
// Zero bitrate and framerate indicate not to change the targets.
return initEncodeInternal(newWidth, newHeight, 0, 0, callback);
}
private boolean shouldForceKeyFrame(long presentationTimestampNs) {
return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
}
private void requestKeyFrame(long presentationTimestampNs) {
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
// indicate this in queueInputBuffer() below and guarantee _this_ frame
// be encoded as a key frame, but sadly that flag is ignored. Instead,
// we request a key frame "soon".
try {
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
codec.setParameters(b);
} catch (IllegalStateException e) {
Logging.e(TAG, "requestKeyFrame failed", e);
return;
}
lastKeyFrameNs = presentationTimestampNs;
}
private Thread createOutputThread() {
return new Thread() {
@Override
public void run() {
while (running) {
deliverEncodedImage();
}
releaseCodecOnOutputThread();
}
};
}
private void deliverEncodedImage() {
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
if (index < 0) {
return;
}
ByteBuffer codecOutputBuffer = codec.getOutputBuffers()[index];
codecOutputBuffer.position(info.offset);
codecOutputBuffer.limit(info.offset + info.size);
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
configBuffer = ByteBuffer.allocateDirect(info.size);
configBuffer.put(codecOutputBuffer);
} else {
bitrateAdjuster.reportEncodedFrame(info.size);
if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
updateBitrate();
}
ByteBuffer frameBuffer;
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame && codecType == VideoCodecType.H264) {
Logging.d(TAG,
"Prepending config frame of size " + configBuffer.capacity()
+ " to output buffer with offset " + info.offset + ", size " + info.size);
// For H.264 key frame prepend SPS and PPS NALs at the start.
frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
configBuffer.rewind();
frameBuffer.put(configBuffer);
} else {
frameBuffer = ByteBuffer.allocateDirect(info.size);
}
frameBuffer.put(codecOutputBuffer);
frameBuffer.rewind();
EncodedImage.FrameType frameType = EncodedImage.FrameType.VideoFrameDelta;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
frameType = EncodedImage.FrameType.VideoFrameKey;
}
EncodedImage.Builder builder = outputBuilders.poll();
builder.setBuffer(frameBuffer).setFrameType(frameType);
// TODO(mellem): Set codec-specific info.
callback.onEncodedFrame(builder.createEncodedImage(), new CodecSpecificInfo());
}
codec.releaseOutputBuffer(index, false);
} catch (IllegalStateException e) {
Logging.e(TAG, "deliverOutput failed", e);
}
}
private void releaseCodecOnOutputThread() {
Logging.d(TAG, "Releasing MediaCodec on output thread");
try {
codec.stop();
} catch (Exception e) {
Logging.e(TAG, "Media encoder stop failed", e);
}
try {
codec.release();
} catch (Exception e) {
Logging.e(TAG, "Media encoder release failed", e);
// Propagate exceptions caught during release back to the main thread.
shutdownException = e;
}
Logging.d(TAG, "Release on output thread done");
}
private VideoCodecStatus updateBitrate() {
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
try {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate);
codec.setParameters(params);
return VideoCodecStatus.OK;
} catch (IllegalStateException e) {
Logging.e(TAG, "updateBitrate failed", e);
return VideoCodecStatus.ERROR;
}
}
/**
* Enumeration of supported color formats used for MediaCodec's input.
*/
private static enum ColorFormat {
I420 {
@Override
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) {
buffer.put(i420.getDataY());
buffer.put(i420.getDataU());
buffer.put(i420.getDataV());
}
},
NV12 {
@Override
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) {
buffer.put(i420.getDataY());
// Interleave the bytes from the U and V portions, starting with U.
ByteBuffer u = i420.getDataU();
ByteBuffer v = i420.getDataV();
int i = 0;
while (u.hasRemaining() && v.hasRemaining()) {
buffer.put(u.get());
buffer.put(v.get());
}
}
};
abstract void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420);
static ColorFormat valueOf(int colorFormat) {
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
return I420;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
case MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
return NV12;
default:
throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
}
}
}
}

View File

@ -0,0 +1,44 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Class for holding the native pointer of a histogram. Since there is no way to destroy a
* histogram, please don't create unnecessary instances of this object. This class is thread safe.
*
* Usage example:
* private static final Histogram someMetricHistogram =
* Histogram.createCounts("WebRTC.Video.SomeMetric", 1, 10000, 50);
* someMetricHistogram.addSample(someVariable);
*/
class Histogram {
private final long handle;
private Histogram(long handle) {
this.handle = handle;
}
static public Histogram createCounts(String name, int min, int max, int bucketCount) {
return new Histogram(nativeCreateCounts(name, min, max, bucketCount));
}
static public Histogram createEnumeration(String name, int max) {
return new Histogram(nativeCreateEnumeration(name, max));
}
public void addSample(int sample) {
nativeAddSample(handle, sample);
}
private static native long nativeCreateCounts(String name, int min, int max, int bucketCount);
private static native long nativeCreateEnumeration(String name, int max);
private static native void nativeAddSample(long handle, int sample);
}

View File

@ -0,0 +1,140 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
import org.webrtc.VideoFrame.I420Buffer;
/** Implementation of an I420 VideoFrame buffer. */
class I420BufferImpl implements VideoFrame.I420Buffer {
private final int width;
private final int height;
private final ByteBuffer dataY;
private final ByteBuffer dataU;
private final ByteBuffer dataV;
private final int strideY;
private final int strideU;
private final int strideV;
private final Runnable releaseCallback;
private final Object refCountLock = new Object();
private int refCount;
/** Constructs an I420Buffer backed by existing data. */
I420BufferImpl(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
int strideU, ByteBuffer dataV, int strideV, Runnable releaseCallback) {
this.width = width;
this.height = height;
this.dataY = dataY;
this.dataU = dataU;
this.dataV = dataV;
this.strideY = strideY;
this.strideU = strideU;
this.strideV = strideV;
this.releaseCallback = releaseCallback;
this.refCount = 1;
}
/** Allocates an empty I420Buffer suitable for an image of the given dimensions. */
static I420BufferImpl allocate(int width, int height) {
int chromaHeight = (height + 1) / 2;
int strideUV = (width + 1) / 2;
int yPos = 0;
int uPos = yPos + width * height;
int vPos = uPos + strideUV * chromaHeight;
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height + 2 * strideUV * chromaHeight);
buffer.position(yPos);
buffer.limit(uPos);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(vPos);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + strideUV * chromaHeight);
ByteBuffer dataV = buffer.slice();
return new I420BufferImpl(width, height, dataY, width, dataU, strideUV, dataV, strideUV, null);
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public ByteBuffer getDataY() {
return dataY;
}
@Override
public ByteBuffer getDataU() {
return dataU;
}
@Override
public ByteBuffer getDataV() {
return dataV;
}
@Override
public int getStrideY() {
return strideY;
}
@Override
public int getStrideU() {
return strideU;
}
@Override
public int getStrideV() {
return strideV;
}
@Override
public I420Buffer toI420() {
retain();
return this;
}
@Override
public void retain() {
synchronized (refCountLock) {
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
if (--refCount == 0 && releaseCallback != null) {
releaseCallback.run();
}
}
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return VideoFrame.cropAndScaleI420(
this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
}
}

View File

@ -0,0 +1,18 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Class with static JNI helper functions that are used in many places. */
class JniCommon {
/** Functions to increment/decrement an rtc::RefCountInterface pointer. */
static native void nativeAddRef(long nativeRefCountedPointer);
static native void nativeReleaseRef(long nativeRefCountedPointer);
}

View File

@ -0,0 +1,78 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
/** Container class for static constants and helpers used with MediaCodec. */
@TargetApi(18)
class MediaCodecUtils {
private static final String TAG = "MediaCodecUtils";
// Prefixes for supported hardware encoder/decoder component names.
static final String EXYNOS_PREFIX = "OMX.Exynos.";
static final String INTEL_PREFIX = "OMX.Intel.";
static final String NVIDIA_PREFIX = "OMX.Nvidia.";
static final String QCOM_PREFIX = "OMX.qcom.";
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka = 0x7FA30C01;
static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka = 0x7FA30C02;
static final int COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03;
static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Color formats supported by hardware decoder - in order of preference.
static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka,
MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka,
MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka,
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
// Color formats supported by hardware encoder - in order of preference.
static final int[] ENCODER_COLOR_FORMATS = {
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
// Color formats supported by texture mode encoding - in order of preference.
static final int[] TEXTURE_COLOR_FORMATS = {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
static Integer selectColorFormat(int[] supportedColorFormats, CodecCapabilities capabilities) {
for (int supportedColorFormat : supportedColorFormats) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
return codecColorFormat;
}
}
}
return null;
}
static boolean codecSupportsType(MediaCodecInfo info, VideoCodecType type) {
for (String mimeType : info.getSupportedTypes()) {
if (type.mimeType().equals(mimeType)) {
return true;
}
}
return false;
}
private MediaCodecUtils() {
// This class should not be instantiated.
}
}

View File

@ -0,0 +1,83 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
public class NV12Buffer implements VideoFrame.Buffer {
private final int width;
private final int height;
private final int stride;
private final int sliceHeight;
private final ByteBuffer buffer;
private final Runnable releaseCallback;
private final Object refCountLock = new Object();
private int refCount;
public NV12Buffer(int width, int height, int stride, int sliceHeight, ByteBuffer buffer,
Runnable releaseCallback) {
this.width = width;
this.height = height;
this.stride = stride;
this.sliceHeight = sliceHeight;
this.buffer = buffer;
this.releaseCallback = releaseCallback;
refCount = 1;
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public VideoFrame.I420Buffer toI420() {
return (VideoFrame.I420Buffer) cropAndScale(0, 0, width, height, width, height);
}
@Override
public void retain() {
synchronized (refCountLock) {
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
if (--refCount == 0 && releaseCallback != null) {
releaseCallback.run();
}
}
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, buffer, width,
height, stride, sliceHeight, newBuffer.getDataY(), newBuffer.getStrideY(),
newBuffer.getDataU(), newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
return newBuffer;
}
private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
int scaleWidth, int scaleHeight, ByteBuffer src, int srcWidth, int srcHeight, int srcStride,
int srcSliceHeight, ByteBuffer dstY, int dstStrideY, ByteBuffer dstU, int dstStrideU,
ByteBuffer dstV, int dstStrideV);
}

View File

@ -0,0 +1,77 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
public class NV21Buffer implements VideoFrame.Buffer {
private final byte[] data;
private final int width;
private final int height;
private final Runnable releaseCallback;
private final Object refCountLock = new Object();
private int refCount = 1;
public NV21Buffer(byte[] data, int width, int height, Runnable releaseCallback) {
this.data = data;
this.width = width;
this.height = height;
this.releaseCallback = releaseCallback;
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public VideoFrame.I420Buffer toI420() {
// Cropping converts the frame to I420. Just crop and scale to the whole image.
return (VideoFrame.I420Buffer) cropAndScale(0 /* cropX */, 0 /* cropY */, width /* cropWidth */,
height /* cropHeight */, width /* scaleWidth */, height /* scaleHeight */);
}
@Override
public void retain() {
synchronized (refCountLock) {
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
if (--refCount == 0 && releaseCallback != null) {
releaseCallback.run();
}
}
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
I420BufferImpl newBuffer = I420BufferImpl.allocate(scaleWidth, scaleHeight);
nativeCropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight, data, width,
height, newBuffer.getDataY(), newBuffer.getStrideY(), newBuffer.getDataU(),
newBuffer.getStrideU(), newBuffer.getDataV(), newBuffer.getStrideV());
return newBuffer;
}
private static native void nativeCropAndScale(int cropX, int cropY, int cropWidth, int cropHeight,
int scaleWidth, int scaleHeight, byte[] src, int srcWidth, int srcHeight, ByteBuffer dstY,
int dstStrideY, ByteBuffer dstU, int dstStrideU, ByteBuffer dstV, int dstStrideV);
}

View File

@ -0,0 +1,139 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.Matrix;
import java.nio.ByteBuffer;
/**
* Android texture buffer backed by a SurfaceTextureHelper's texture. The buffer calls
* |releaseCallback| when it is released.
*/
class TextureBufferImpl implements VideoFrame.TextureBuffer {
private final int width;
private final int height;
private final Type type;
private final int id;
private final Matrix transformMatrix;
private final SurfaceTextureHelper surfaceTextureHelper;
private final Runnable releaseCallback;
private final Object refCountLock = new Object();
private int refCount;
public TextureBufferImpl(int width, int height, Type type, int id, Matrix transformMatrix,
SurfaceTextureHelper surfaceTextureHelper, Runnable releaseCallback) {
this.width = width;
this.height = height;
this.type = type;
this.id = id;
this.transformMatrix = transformMatrix;
this.surfaceTextureHelper = surfaceTextureHelper;
this.releaseCallback = releaseCallback;
this.refCount = 1; // Creator implicitly holds a reference.
}
@Override
public VideoFrame.TextureBuffer.Type getType() {
return type;
}
@Override
public int getTextureId() {
return id;
}
@Override
public Matrix getTransformMatrix() {
return transformMatrix;
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public VideoFrame.I420Buffer toI420() {
if (type == Type.RGB) {
throw new RuntimeException("toI420 for RGB frames not implemented yet");
}
// SurfaceTextureHelper requires a stride that is divisible by 8. Round width up.
// See SurfaceTextureHelper for details on the size and format.
int stride = ((width + 7) / 8) * 8;
int uvHeight = (height + 1) / 2;
// Due to the layout used by SurfaceTextureHelper, vPos + stride * uvHeight would overrun the
// buffer. Add one row at the bottom to compensate for this. There will never be data in the
// extra row, but now other code does not have to deal with v stride * v height exceeding the
// buffer's capacity.
int size = stride * (height + uvHeight + 1);
ByteBuffer buffer = ByteBuffer.allocateDirect(size);
surfaceTextureHelper.textureToYUV(buffer, width, height, stride, id,
RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transformMatrix));
int yPos = 0;
int uPos = yPos + stride * height;
// Rows of U and V alternate in the buffer, so V data starts after the first row of U.
int vPos = uPos + stride / 2;
buffer.position(yPos);
buffer.limit(yPos + stride * height);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(uPos + stride * uvHeight);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + stride * uvHeight);
ByteBuffer dataV = buffer.slice();
// SurfaceTextureHelper uses the same stride for Y, U, and V data.
return new I420BufferImpl(width, height, dataY, stride, dataU, stride, dataV, stride, null);
}
@Override
public void retain() {
synchronized (refCountLock) {
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
if (--refCount == 0 && releaseCallback != null) {
releaseCallback.run();
}
}
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
retain();
Matrix newMatrix = new Matrix(transformMatrix);
newMatrix.postScale(cropWidth / (float) width, cropHeight / (float) height);
newMatrix.postTranslate(cropX / (float) width, cropY / (float) height);
return new TextureBufferImpl(
scaleWidth, scaleHeight, type, id, newMatrix, surfaceTextureHelper, new Runnable() {
@Override
public void run() {
release();
}
});
}
}

View File

@ -0,0 +1,28 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Enumeration of supported video codec types. */
enum VideoCodecType {
VP8("video/x-vnd.on2.vp8"),
VP9("video/x-vnd.on2.vp9"),
H264("video/avc");
private final String mimeType;
private VideoCodecType(String mimeType) {
this.mimeType = mimeType;
}
String mimeType() {
return mimeType;
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* VideoDecoder callback that calls VideoDecoderWrapper.OnDecodedFrame for the decoded frames.
*/
class VideoDecoderWrapperCallback implements VideoDecoder.Callback {
private final long nativeDecoder;
public VideoDecoderWrapperCallback(long nativeDecoder) {
this.nativeDecoder = nativeDecoder;
}
@Override
public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) {
nativeOnDecodedFrame(nativeDecoder, frame, decodeTimeMs, qp);
}
private native static void nativeOnDecodedFrame(
long nativeDecoder, VideoFrame frame, Integer decodeTimeMs, Integer qp);
}

View File

@ -0,0 +1,35 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/**
* VideoEncoder callback that calls VideoEncoderWrapper.OnEncodedFrame for the Encoded frames.
*/
class VideoEncoderWrapperCallback implements VideoEncoder.Callback {
private final long nativeEncoder;
public VideoEncoderWrapperCallback(long nativeEncoder) {
this.nativeEncoder = nativeEncoder;
}
@Override
public void onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo info) {
nativeOnEncodedFrame(nativeEncoder, frame.buffer, frame.encodedWidth, frame.encodedHeight,
frame.captureTimeNs, frame.frameType.getNative(), frame.rotation, frame.completeFrame,
frame.qp);
}
private native static void nativeOnEncodedFrame(long nativeEncoder, ByteBuffer buffer,
int encodedWidth, int encodedHeight, long captureTimeNs, int frameType, int rotation,
boolean completeFrame, Integer qp);
}

View File

@ -0,0 +1,106 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.nio.ByteBuffer;
/**
* This class wraps a webrtc::I420BufferInterface into a VideoFrame.I420Buffer.
*/
class WrappedNativeI420Buffer implements VideoFrame.I420Buffer {
private final int width;
private final int height;
private final ByteBuffer dataY;
private final int strideY;
private final ByteBuffer dataU;
private final int strideU;
private final ByteBuffer dataV;
private final int strideV;
private final long nativeBuffer;
WrappedNativeI420Buffer(int width, int height, ByteBuffer dataY, int strideY, ByteBuffer dataU,
int strideU, ByteBuffer dataV, int strideV, long nativeBuffer) {
this.width = width;
this.height = height;
this.dataY = dataY;
this.strideY = strideY;
this.dataU = dataU;
this.strideU = strideU;
this.dataV = dataV;
this.strideV = strideV;
this.nativeBuffer = nativeBuffer;
retain();
}
@Override
public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public ByteBuffer getDataY() {
return dataY;
}
@Override
public ByteBuffer getDataU() {
return dataU;
}
@Override
public ByteBuffer getDataV() {
return dataV;
}
@Override
public int getStrideY() {
return strideY;
}
@Override
public int getStrideU() {
return strideU;
}
@Override
public int getStrideV() {
return strideV;
}
@Override
public VideoFrame.I420Buffer toI420() {
retain();
return this;
}
@Override
public void retain() {
JniCommon.nativeAddRef(nativeBuffer);
}
@Override
public void release() {
JniCommon.nativeReleaseRef(nativeBuffer);
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return VideoFrame.cropAndScaleI420(
this, cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
}
}

View File

@ -0,0 +1,223 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
/**
* Class for converting OES textures to a YUV ByteBuffer. It should be constructed on a thread with
* an active EGL context, and only be used from that thread.
*/
class YuvConverter {
// Vertex coordinates in Normalized Device Coordinates, i.e.
// (-1, -1) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer DEVICE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
-1.0f, -1.0f, // Bottom left.
1.0f, -1.0f, // Bottom right.
-1.0f, 1.0f, // Top left.
1.0f, 1.0f, // Top right.
});
// Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
private static final FloatBuffer TEXTURE_RECTANGLE = GlUtil.createFloatBuffer(new float[] {
0.0f, 0.0f, // Bottom left.
1.0f, 0.0f, // Bottom right.
0.0f, 1.0f, // Top left.
1.0f, 1.0f // Top right.
});
// clang-format off
private static final String VERTEX_SHADER =
"varying vec2 interp_tc;\n"
+ "attribute vec4 in_pos;\n"
+ "attribute vec4 in_tc;\n"
+ "\n"
+ "uniform mat4 texMatrix;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = in_pos;\n"
+ " interp_tc = (texMatrix * in_tc).xy;\n"
+ "}\n";
private static final String FRAGMENT_SHADER =
"#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "varying vec2 interp_tc;\n"
+ "\n"
+ "uniform samplerExternalOES oesTex;\n"
// Difference in texture coordinate corresponding to one
// sub-pixel in the x direction.
+ "uniform vec2 xUnit;\n"
// Color conversion coefficients, including constant term
+ "uniform vec4 coeffs;\n"
+ "\n"
+ "void main() {\n"
// Since the alpha read from the texture is always 1, this could
// be written as a mat4 x vec4 multiply. However, that seems to
// give a worse framerate, possibly because the additional
// multiplies by 1.0 consume resources. TODO(nisse): Could also
// try to do it as a vec3 x mat3x4, followed by an add in of a
// constant vector.
+ " gl_FragColor.r = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 1.5 * xUnit).rgb);\n"
+ " gl_FragColor.g = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc - 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.b = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 0.5 * xUnit).rgb);\n"
+ " gl_FragColor.a = coeffs.a + dot(coeffs.rgb,\n"
+ " texture2D(oesTex, interp_tc + 1.5 * xUnit).rgb);\n"
+ "}\n";
// clang-format on
private final GlTextureFrameBuffer textureFrameBuffer;
private final GlShader shader;
private final int texMatrixLoc;
private final int xUnitLoc;
private final int coeffsLoc;
private final ThreadUtils.ThreadChecker threadChecker = new ThreadUtils.ThreadChecker();
private boolean released = false;
/**
* This class should be constructed on a thread that has an active EGL context.
*/
public YuvConverter() {
threadChecker.checkIsOnValidThread();
textureFrameBuffer = new GlTextureFrameBuffer(GLES20.GL_RGBA);
shader = new GlShader(VERTEX_SHADER, FRAGMENT_SHADER);
shader.useProgram();
texMatrixLoc = shader.getUniformLocation("texMatrix");
xUnitLoc = shader.getUniformLocation("xUnit");
coeffsLoc = shader.getUniformLocation("coeffs");
GLES20.glUniform1i(shader.getUniformLocation("oesTex"), 0);
GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
// Initialize vertex shader attributes.
shader.setVertexAttribArray("in_pos", 2, DEVICE_RECTANGLE);
// If the width is not a multiple of 4 pixels, the texture
// will be scaled up slightly and clipped at the right border.
shader.setVertexAttribArray("in_tc", 2, TEXTURE_RECTANGLE);
}
public void convert(ByteBuffer buf, int width, int height, int stride, int srcTextureId,
float[] transformMatrix) {
threadChecker.checkIsOnValidThread();
if (released) {
throw new IllegalStateException("YuvConverter.convert called on released object");
}
// We draw into a buffer laid out like
//
// +---------+
// | |
// | Y |
// | |
// | |
// +----+----+
// | U | V |
// | | |
// +----+----+
//
// In memory, we use the same stride for all of Y, U and V. The
// U data starts at offset |height| * |stride| from the Y data,
// and the V data starts at at offset |stride/2| from the U
// data, with rows of U and V data alternating.
//
// Now, it would have made sense to allocate a pixel buffer with
// a single byte per pixel (EGL10.EGL_COLOR_BUFFER_TYPE,
// EGL10.EGL_LUMINANCE_BUFFER,), but that seems to be
// unsupported by devices. So do the following hack: Allocate an
// RGBA buffer, of width |stride|/4. To render each of these
// large pixels, sample the texture at 4 different x coordinates
// and store the results in the four components.
//
// Since the V data needs to start on a boundary of such a
// larger pixel, it is not sufficient that |stride| is even, it
// has to be a multiple of 8 pixels.
if (stride % 8 != 0) {
throw new IllegalArgumentException("Invalid stride, must be a multiple of 8");
}
if (stride < width) {
throw new IllegalArgumentException("Invalid stride, must >= width");
}
int y_width = (width + 3) / 4;
int uv_width = (width + 7) / 8;
int uv_height = (height + 1) / 2;
int total_height = height + uv_height;
int size = stride * total_height;
if (buf.capacity() < size) {
throw new IllegalArgumentException("YuvConverter.convert called with too small buffer");
}
// Produce a frame buffer starting at top-left corner, not
// bottom-left.
transformMatrix =
RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.verticalFlipMatrix());
final int frameBufferWidth = stride / 4;
final int frameBufferHeight = total_height;
textureFrameBuffer.setSize(frameBufferWidth, frameBufferHeight);
// Bind our framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureFrameBuffer.getFrameBufferId());
GlUtil.checkNoGLES2Error("glBindFramebuffer");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, srcTextureId);
GLES20.glUniformMatrix4fv(texMatrixLoc, 1, false, transformMatrix, 0);
// Draw Y
GLES20.glViewport(0, 0, y_width, height);
// Matrix * (1;0;0;0) / width. Note that opengl uses column major order.
GLES20.glUniform2f(xUnitLoc, transformMatrix[0] / width, transformMatrix[1] / width);
// Y'UV444 to RGB888, see
// https://en.wikipedia.org/wiki/YUV#Y.27UV444_to_RGB888_conversion.
// We use the ITU-R coefficients for U and V */
GLES20.glUniform4f(coeffsLoc, 0.299f, 0.587f, 0.114f, 0.0f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Draw U
GLES20.glViewport(0, height, uv_width, uv_height);
// Matrix * (1;0;0;0) / (width / 2). Note that opengl uses column major order.
GLES20.glUniform2f(
xUnitLoc, 2.0f * transformMatrix[0] / width, 2.0f * transformMatrix[1] / width);
GLES20.glUniform4f(coeffsLoc, -0.169f, -0.331f, 0.499f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
// Draw V
GLES20.glViewport(stride / 8, height, uv_width, uv_height);
GLES20.glUniform4f(coeffsLoc, 0.499f, -0.418f, -0.0813f, 0.5f);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glReadPixels(
0, 0, frameBufferWidth, frameBufferHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
GlUtil.checkNoGLES2Error("YuvConverter.convert");
// Restore normal framebuffer.
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
// Unbind texture. Reportedly needed on some devices to get
// the texture updated from the camera.
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
}
public void release() {
threadChecker.checkIsOnValidThread();
released = true;
shader.release();
textureFrameBuffer.release();
}
}