Implement org.webrtc.VideoEncoder using the android MediaCodec.

BUG=webrtc:7760

Change-Id: I22134fe616d5c5b77148c80f01f1ea1119ae786c
Reviewed-on: https://chromium-review.googlesource.com/526074
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Peter Thatcher <pthatcher@webrtc.org>
Commit-Queue: Bjorn Mellem <mellem@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18573}
This commit is contained in:
Bjorn Mellem
2017-06-12 09:21:03 -07:00
committed by Commit Bot
parent 7be7883a01
commit 5c4eebb62b
9 changed files with 748 additions and 14 deletions

View File

@ -193,6 +193,7 @@ android_library("libjingle_peerconnection_java") {
"api/org/webrtc/SurfaceViewRenderer.java", "api/org/webrtc/SurfaceViewRenderer.java",
"api/org/webrtc/VideoCapturer.java", "api/org/webrtc/VideoCapturer.java",
"api/org/webrtc/VideoCodecInfo.java", "api/org/webrtc/VideoCodecInfo.java",
"api/org/webrtc/VideoCodecStatus.java",
"api/org/webrtc/VideoDecoder.java", "api/org/webrtc/VideoDecoder.java",
"api/org/webrtc/VideoEncoder.java", "api/org/webrtc/VideoEncoder.java",
"api/org/webrtc/VideoFileRenderer.java", "api/org/webrtc/VideoFileRenderer.java",
@ -201,12 +202,17 @@ android_library("libjingle_peerconnection_java") {
"api/org/webrtc/VideoSource.java", "api/org/webrtc/VideoSource.java",
"api/org/webrtc/VideoTrack.java", "api/org/webrtc/VideoTrack.java",
"src/java/org/webrtc/AndroidVideoTrackSourceObserver.java", "src/java/org/webrtc/AndroidVideoTrackSourceObserver.java",
"src/java/org/webrtc/BitrateAdjuster.java",
"src/java/org/webrtc/BaseBitrateAdjuster.java",
"src/java/org/webrtc/Camera1Session.java", "src/java/org/webrtc/Camera1Session.java",
"src/java/org/webrtc/Camera2Session.java", "src/java/org/webrtc/Camera2Session.java",
"src/java/org/webrtc/CameraCapturer.java", "src/java/org/webrtc/CameraCapturer.java",
"src/java/org/webrtc/CameraSession.java", "src/java/org/webrtc/CameraSession.java",
"src/java/org/webrtc/DynamicBitrateAdjuster.java",
"src/java/org/webrtc/EglBase10.java", "src/java/org/webrtc/EglBase10.java",
"src/java/org/webrtc/EglBase14.java", "src/java/org/webrtc/EglBase14.java",
"src/java/org/webrtc/FramerateBitrateAdjuster.java",
"src/java/org/webrtc/HardwareVideoEncoder.java",
"src/java/org/webrtc/Histogram.java", "src/java/org/webrtc/Histogram.java",
"src/java/org/webrtc/YuvConverter.java", "src/java/org/webrtc/YuvConverter.java",
] ]

View File

@ -46,11 +46,11 @@ public class EncodedImage {
this.qp = qp; this.qp = qp;
} }
public Builder builder() { public static Builder builder() {
return new Builder(); return new Builder();
} }
public class Builder { public static class Builder {
private ByteBuffer buffer; private ByteBuffer buffer;
private int encodedWidth; private int encodedWidth;
private int encodedHeight; private int encodedHeight;

View File

@ -0,0 +1,41 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* Status codes reported by video encoding/decoding components. This should be kept in sync with
* video_error_codes.h.
*/
public enum VideoCodecStatus {
REQUEST_SLI(2),
NO_OUTPUT(1),
OK(0),
ERROR(-1),
LEVEL_EXCEEDED(-2),
MEMORY(-3),
ERR_PARAMETER(-4),
ERR_SIZE(-5),
TIMEOUT(-6),
UNINITIALIZED(-7),
ERR_REQUEST_SLI(-12),
FALLBACK_SOFTWARE(-13),
TARGET_BITRATE_OVERSHOOT(-14);
private final int number;
private VideoCodecStatus(int number) {
this.number = number;
}
public int getNumber() {
return number;
}
}

View File

@ -18,9 +18,17 @@ public interface VideoEncoder {
/** Settings passed to the encoder by WebRTC. */ /** Settings passed to the encoder by WebRTC. */
public class Settings { public class Settings {
public final int numberOfCores; public final int numberOfCores;
public final int width;
public final int height;
public final int startBitrate; // Kilobits per second.
public final int maxFramerate;
public Settings(int numberOfCores) { public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate) {
this.numberOfCores = numberOfCores; this.numberOfCores = numberOfCores;
this.width = width;
this.height = height;
this.startBitrate = startBitrate;
this.maxFramerate = maxFramerate;
} }
} }
@ -49,23 +57,23 @@ public interface VideoEncoder {
*/ */
public class BitrateAllocation { public class BitrateAllocation {
// First index is the spatial layer and second the temporal layer. // First index is the spatial layer and second the temporal layer.
public final long[][] bitratesBbs; public final int[][] bitratesBbs;
/** /**
* Initializes the allocation with a two dimensional array of bitrates. The first index of the * Initializes the allocation with a two dimensional array of bitrates. The first index of the
* array is the spatial layer and the second index in the temporal layer. * array is the spatial layer and the second index in the temporal layer.
*/ */
public BitrateAllocation(long[][] bitratesBbs) { public BitrateAllocation(int[][] bitratesBbs) {
this.bitratesBbs = bitratesBbs; this.bitratesBbs = bitratesBbs;
} }
/** /**
* Gets the total bitrate allocated for all layers. * Gets the total bitrate allocated for all layers.
*/ */
public long getSum() { public int getSum() {
long sum = 0; int sum = 0;
for (long[] spatialLayer : bitratesBbs) { for (int[] spatialLayer : bitratesBbs) {
for (long bitrate : spatialLayer) { for (int bitrate : spatialLayer) {
sum += bitrate; sum += bitrate;
} }
} }
@ -101,24 +109,24 @@ public interface VideoEncoder {
/** /**
* Initializes the encoding process. Call before any calls to encode. * Initializes the encoding process. Call before any calls to encode.
*/ */
void initEncode(Settings settings, Callback encodeCallback); VideoCodecStatus initEncode(Settings settings, Callback encodeCallback);
/** /**
* Releases the encoder. No more calls to encode will be made after this call. * Releases the encoder. No more calls to encode will be made after this call.
*/ */
void release(); VideoCodecStatus release();
/** /**
* Requests the encoder to encode a frame. * Requests the encoder to encode a frame.
*/ */
void encode(VideoFrame frame, EncodeInfo info); VideoCodecStatus encode(VideoFrame frame, EncodeInfo info);
/** /**
* Informs the encoder of the packet loss and the round-trip time of the network. * Informs the encoder of the packet loss and the round-trip time of the network.
* *
* @param packetLoss How many packets are lost on average per 255 packets. * @param packetLoss How many packets are lost on average per 255 packets.
* @param roundTripTimeMs Round-trip time of the network in milliseconds. * @param roundTripTimeMs Round-trip time of the network in milliseconds.
*/ */
void setChannelParameters(short packetLoss, long roundTripTimeMs); VideoCodecStatus setChannelParameters(short packetLoss, long roundTripTimeMs);
/** Sets the bitrate allocation and the target framerate for the encoder. */ /** Sets the bitrate allocation and the target framerate for the encoder. */
void setRateAllocation(BitrateAllocation allocation, long framerate); VideoCodecStatus setRateAllocation(BitrateAllocation allocation, int framerate);
/** Any encoder that wants to use WebRTC provided quality scaler must implement this method. */ /** Any encoder that wants to use WebRTC provided quality scaler must implement this method. */
ScalingSettings getScalingSettings(); ScalingSettings getScalingSettings();
/** Should return a descriptive name for the implementation. */ /** Should return a descriptive name for the implementation. */

View File

@ -0,0 +1,38 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** BitrateAdjuster that tracks bitrate and framerate but does not adjust them. */
class BaseBitrateAdjuster implements BitrateAdjuster {
protected int targetBitrateBps = 0;
protected int targetFps = 0;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
this.targetBitrateBps = targetBitrateBps;
this.targetFps = targetFps;
}
@Override
public void reportEncodedFrame(int size) {
// No op.
}
@Override
public int getAdjustedBitrateBps() {
return targetBitrateBps;
}
@Override
public int getAdjustedFramerate() {
return targetFps;
}
}

View File

@ -0,0 +1,31 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/** Object that adjusts the bitrate of a hardware codec. */
interface BitrateAdjuster {
/**
* Sets the target bitrate in bits per second and framerate in frames per second.
*/
void setTargets(int targetBitrateBps, int targetFps);
/**
* Reports that a frame of the given size has been encoded. Returns true if the bitrate should
* be adjusted.
*/
void reportEncodedFrame(int size);
/** Gets the current bitrate. */
int getAdjustedBitrateBps();
/** Gets the current framerate. */
int getAdjustedFramerate();
}

View File

@ -0,0 +1,95 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* BitrateAdjuster that tracks the bandwidth produced by an encoder and dynamically adjusts the
* bitrate. Used for hardware codecs that pay attention to framerate but still deviate from the
* target bitrate by unacceptable margins.
*/
class DynamicBitrateAdjuster extends BaseBitrateAdjuster {
// Change the bitrate at most once every three seconds.
private static final double BITRATE_ADJUSTMENT_SEC = 3.0;
// Maximum bitrate adjustment scale - no more than 4 times.
private static final double BITRATE_ADJUSTMENT_MAX_SCALE = 4;
// Amount of adjustment steps to reach maximum scale.
private static final int BITRATE_ADJUSTMENT_STEPS = 20;
private static final double BITS_PER_BYTE = 8.0;
// How far the codec has deviated above (or below) the target bitrate (tracked in bytes).
private double deviationBytes = 0;
private double timeSinceLastAdjustmentMs = 0;
private int bitrateAdjustmentScaleExp = 0;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
if (this.targetBitrateBps > 0 && targetBitrateBps < this.targetBitrateBps) {
// Rescale the accumulator level if the accumulator max decreases
deviationBytes = deviationBytes * targetBitrateBps / this.targetBitrateBps;
}
super.setTargets(targetBitrateBps, targetFps);
}
@Override
public void reportEncodedFrame(int size) {
if (targetFps == 0) {
return;
}
// Accumulate the difference between actual and expected frame sizes.
double expectedBytesPerFrame = (targetBitrateBps / BITS_PER_BYTE) / targetFps;
deviationBytes += (size - expectedBytesPerFrame);
timeSinceLastAdjustmentMs += 1000.0 / targetFps;
// Adjust the bitrate when the encoder accumulates one second's worth of data in excess or
// shortfall of the target.
double deviationThresholdBytes = targetBitrateBps / BITS_PER_BYTE;
// Cap the deviation, i.e., don't let it grow beyond some level to avoid using too old data for
// bitrate adjustment. This also prevents taking more than 3 "steps" in a given 3-second cycle.
double deviationCap = BITRATE_ADJUSTMENT_SEC * deviationThresholdBytes;
deviationBytes = Math.min(deviationBytes, deviationCap);
deviationBytes = Math.max(deviationBytes, -deviationCap);
// Do bitrate adjustment every 3 seconds if actual encoder bitrate deviates too much
// from the target value.
if (timeSinceLastAdjustmentMs <= 1000 * BITRATE_ADJUSTMENT_SEC) {
return;
}
if (deviationBytes > deviationThresholdBytes) {
// Encoder generates too high bitrate - need to reduce the scale.
int bitrateAdjustmentInc = (int) (deviationBytes / deviationThresholdBytes + 0.5);
bitrateAdjustmentScaleExp -= bitrateAdjustmentInc;
// Don't let the adjustment scale drop below -BITRATE_ADJUSTMENT_STEPS.
// This sets a minimum exponent of -1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
bitrateAdjustmentScaleExp = Math.max(bitrateAdjustmentScaleExp, -BITRATE_ADJUSTMENT_STEPS);
deviationBytes = deviationThresholdBytes;
} else if (deviationBytes < -deviationThresholdBytes) {
// Encoder generates too low bitrate - need to increase the scale.
int bitrateAdjustmentInc = (int) (-deviationBytes / deviationThresholdBytes + 0.5);
bitrateAdjustmentScaleExp += bitrateAdjustmentInc;
// Don't let the adjustment scale exceed BITRATE_ADJUSTMENT_STEPS.
// This sets a maximum exponent of 1 (bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS).
bitrateAdjustmentScaleExp = Math.min(bitrateAdjustmentScaleExp, BITRATE_ADJUSTMENT_STEPS);
deviationBytes = -deviationThresholdBytes;
}
timeSinceLastAdjustmentMs = 0;
}
@Override
public int getAdjustedBitrateBps() {
return (int) (targetBitrateBps
* Math.pow(BITRATE_ADJUSTMENT_MAX_SCALE,
(double) bitrateAdjustmentScaleExp / BITRATE_ADJUSTMENT_STEPS));
}
}

View File

@ -0,0 +1,30 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
/**
* BitrateAdjuster that adjusts the bitrate to compensate for changes in the framerate. Used with
* hardware codecs that assume the framerate never changes.
*/
class FramerateBitrateAdjuster extends BaseBitrateAdjuster {
private static final int INITIAL_FPS = 30;
@Override
public void setTargets(int targetBitrateBps, int targetFps) {
if (this.targetFps == 0) {
// Framerate-based bitrate adjustment always initializes to the same framerate.
targetFps = INITIAL_FPS;
}
super.setTargets(targetBitrateBps, targetFps);
this.targetBitrateBps *= INITIAL_FPS / this.targetFps;
}
}

View File

@ -0,0 +1,485 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Bundle;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Deque;
import java.util.Set;
import java.util.concurrent.LinkedBlockingDeque;
/** Android hardware video encoder. */
@TargetApi(19)
@SuppressWarnings("deprecation") // Cannot support API level 19 without using deprecated methods.
class HardwareVideoEncoder implements VideoEncoder {
private static final String TAG = "HardwareVideoEncoder";
// Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
// in OMX_Video.h
private static final int VIDEO_ControlRateConstant = 2;
// Key associated with the bitrate control mode value (above). Not present as a MediaFormat
// constant until API level 21.
private static final String KEY_BITRATE_MODE = "bitrate-mode";
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
private static final int MAX_VIDEO_FRAMERATE = 30;
// See MAX_ENCODER_Q_SIZE in androidmediaencoder_jni.cc.
private static final int MAX_ENCODER_Q_SIZE = 2;
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
// TODO(mellem): Maybe move mime types to the factory or a common location.
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
private static final String H264_MIME_TYPE = "video/avc";
private static final Set<String> SUPPORTED_MIME_TYPES =
new HashSet<>(Arrays.asList(VP8_MIME_TYPE, VP9_MIME_TYPE, H264_MIME_TYPE));
private final String codecName;
private final String mimeType;
private final int colorFormat;
private final ColorFormat inputColorFormat;
// Base interval for generating key frames.
private final int keyFrameIntervalSec;
// Interval at which to force a key frame. Used to reduce color distortions caused by some
// Qualcomm video encoders.
private final long forcedKeyFrameMs;
// Presentation timestamp of the last requested (or forced) key frame.
private long lastKeyFrameMs;
private final BitrateAdjuster bitrateAdjuster;
private int adjustedBitrate;
// A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
// pre-populated with all the information that can't be sent through MediaCodec.
private final Deque<EncodedImage.Builder> outputBuilders;
// Thread that delivers encoded frames to the user callback.
private Thread outputThread;
// Whether the encoder is running. Volatile so that the output thread can watch this value and
// exit when the encoder stops.
private volatile boolean running = false;
// Any exception thrown during shutdown. The output thread releases the MediaCodec and uses this
// value to send exceptions thrown during release back to the encoder thread.
private volatile Exception shutdownException = null;
private MediaCodec codec;
private Callback callback;
private int width;
private int height;
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
private ByteBuffer configBuffer = null;
/**
* Creates a new HardwareVideoEncoder with the given codecName, mimeType, colorFormat, key frame
* intervals, and bitrateAdjuster.
*
* @param codecName the hardware codec implementation to use
* @param mimeType MIME type of the codec's output; must be one of "video/x-vnd.on2.vp8",
* "video/x-vnd.on2.vp9", or "video/avc"
* @param colorFormat color format used by the input buffer
* @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
* @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
* used to reduce distortion caused by some codec implementations
* @param bitrateAdjuster algorithm used to correct codec implementations that do not produce the
* desired bitrates
* @throws IllegalArgumentException if either mimeType or colorFormat is unsupported
*/
public HardwareVideoEncoder(String codecName, String mimeType, int colorFormat,
int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster) {
if (!SUPPORTED_MIME_TYPES.contains(mimeType)) {
throw new IllegalArgumentException("Unsupported MIME type: " + mimeType);
}
this.codecName = codecName;
this.mimeType = mimeType;
this.colorFormat = colorFormat;
this.inputColorFormat = ColorFormat.valueOf(colorFormat);
this.keyFrameIntervalSec = keyFrameIntervalSec;
this.forcedKeyFrameMs = forceKeyFrameIntervalMs;
this.bitrateAdjuster = bitrateAdjuster;
this.outputBuilders = new LinkedBlockingDeque<>();
}
@Override
public VideoCodecStatus initEncode(Settings settings, Callback callback) {
return initEncodeInternal(
settings.width, settings.height, settings.startBitrate, settings.maxFramerate, callback);
}
private VideoCodecStatus initEncodeInternal(
int width, int height, int bitrateKbps, int fps, Callback callback) {
Logging.d(
TAG, "initEncode: " + width + " x " + height + ". @ " + bitrateKbps + "kbps. Fps: " + fps);
this.width = width;
this.height = height;
if (bitrateKbps != 0 && fps != 0) {
bitrateAdjuster.setTargets(bitrateKbps * 1000, fps);
}
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
this.callback = callback;
lastKeyFrameMs = -1;
codec = createCodecByName(codecName);
if (codec == null) {
Logging.e(TAG, "Cannot create media encoder " + codecName);
return VideoCodecStatus.ERROR;
}
try {
MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
format.setInteger(KEY_BITRATE_MODE, VIDEO_ControlRateConstant);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
format.setInteger(MediaFormat.KEY_FRAME_RATE, bitrateAdjuster.getAdjustedFramerate());
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
Logging.d(TAG, "Format: " + format);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.start();
} catch (IllegalStateException e) {
Logging.e(TAG, "initEncode failed", e);
release();
return VideoCodecStatus.ERROR;
}
running = true;
outputThread = createOutputThread();
outputThread.start();
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus release() {
try {
// The outputThread actually stops and releases the codec once running is false.
running = false;
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media encoder release timeout");
return VideoCodecStatus.TIMEOUT;
}
if (shutdownException != null) {
// Log the exception and turn it into an error.
Logging.e(TAG, "Media encoder release exception", shutdownException);
return VideoCodecStatus.ERROR;
}
} finally {
codec = null;
outputThread = null;
outputBuilders.clear();
}
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
if (codec == null) {
return VideoCodecStatus.UNINITIALIZED;
}
// If input resolution changed, restart the codec with the new resolution.
int frameWidth = videoFrame.getWidth();
int frameHeight = videoFrame.getHeight();
if (frameWidth != width || frameHeight != height) {
VideoCodecStatus status = resetCodec(frameWidth, frameHeight);
if (status != VideoCodecStatus.OK) {
return status;
}
}
// No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
int index;
try {
index = codec.dequeueInputBuffer(0 /* timeout */);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueInputBuffer failed", e);
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
if (index == -1) {
// Encoder is falling behind. No input buffers available. Drop the frame.
Logging.e(TAG, "Dropped frame, no input buffers available");
return VideoCodecStatus.OK; // See webrtc bug 2887.
}
if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
// Too many frames in the encoder. Drop this frame.
Logging.e(TAG, "Dropped frame, encoder queue full");
return VideoCodecStatus.OK; // See webrtc bug 2887.
}
// TODO(mellem): Add support for input surfaces and textures.
ByteBuffer buffer;
try {
buffer = codec.getInputBuffers()[index];
} catch (IllegalStateException e) {
Logging.e(TAG, "getInputBuffers failed", e);
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420();
inputColorFormat.fillBufferFromI420(buffer, i420);
boolean requestedKeyFrame = false;
for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
if (frameType == EncodedImage.FrameType.VideoFrameKey) {
requestedKeyFrame = true;
}
}
// Frame timestamp rounded to the nearest microsecond and millisecond.
long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
long presentationTimestampMs = (presentationTimestampUs + 500) / 1000;
if (requestedKeyFrame || shouldForceKeyFrame(presentationTimestampMs)) {
requestKeyFrame(presentationTimestampMs);
}
// Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
// subsampled at one byte per four pixels.
int bufferSize = videoFrame.getBuffer().getHeight() * videoFrame.getBuffer().getWidth() * 3 / 2;
EncodedImage.Builder builder = EncodedImage.builder()
.setTimeStampMs(presentationTimestampMs)
.setCaptureTimeMs(presentationTimestampMs)
.setCompleteFrame(true)
.setEncodedWidth(videoFrame.getWidth())
.setEncodedHeight(videoFrame.getHeight())
.setRotation(videoFrame.getRotation());
outputBuilders.offer(builder);
try {
codec.queueInputBuffer(
index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
} catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e);
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
// IllegalStateException thrown when the codec is in the wrong state.
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripTimeMs) {
// No op.
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
if (framerate > MAX_VIDEO_FRAMERATE) {
framerate = MAX_VIDEO_FRAMERATE;
}
bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
return updateBitrate();
}
@Override
public ScalingSettings getScalingSettings() {
// TODO(mellem): Implement scaling settings.
return null;
}
@Override
public String getImplementationName() {
return "HardwareVideoEncoder: " + codecName;
}
private VideoCodecStatus resetCodec(int newWidth, int newHeight) {
VideoCodecStatus status = release();
if (status != VideoCodecStatus.OK) {
return status;
}
// Zero bitrate and framerate indicate not to change the targets.
return initEncodeInternal(newWidth, newHeight, 0, 0, callback);
}
private boolean shouldForceKeyFrame(long presentationTimestampMs) {
return forcedKeyFrameMs > 0 && presentationTimestampMs > lastKeyFrameMs + forcedKeyFrameMs;
}
private void requestKeyFrame(long presentationTimestampMs) {
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
// indicate this in queueInputBuffer() below and guarantee _this_ frame
// be encoded as a key frame, but sadly that flag is ignored. Instead,
// we request a key frame "soon".
try {
Bundle b = new Bundle();
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
codec.setParameters(b);
} catch (IllegalStateException e) {
Logging.e(TAG, "requestKeyFrame failed", e);
return;
}
lastKeyFrameMs = presentationTimestampMs;
}
private Thread createOutputThread() {
return new Thread() {
@Override
public void run() {
while (running) {
deliverEncodedImage();
}
releaseCodecOnOutputThread();
}
};
}
private void deliverEncodedImage() {
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
if (index < 0) {
return;
}
ByteBuffer codecOutputBuffer = codec.getOutputBuffers()[index];
codecOutputBuffer.position(info.offset);
codecOutputBuffer.limit(info.offset + info.size);
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
configBuffer = ByteBuffer.allocateDirect(info.size);
configBuffer.put(codecOutputBuffer);
} else {
bitrateAdjuster.reportEncodedFrame(info.size);
if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
updateBitrate();
}
ByteBuffer frameBuffer;
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame && mimeType.equals(H264_MIME_TYPE)) {
Logging.d(TAG,
"Prepending config frame of size " + configBuffer.capacity()
+ " to output buffer with offset " + info.offset + ", size " + info.size);
// For H.264 key frame prepend SPS and PPS NALs at the start.
frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
configBuffer.rewind();
frameBuffer.put(configBuffer);
} else {
frameBuffer = ByteBuffer.allocateDirect(info.size);
}
frameBuffer.put(codecOutputBuffer);
frameBuffer.rewind();
EncodedImage.FrameType frameType = EncodedImage.FrameType.VideoFrameDelta;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
frameType = EncodedImage.FrameType.VideoFrameKey;
}
EncodedImage.Builder builder = outputBuilders.poll();
builder.setBuffer(frameBuffer).setFrameType(frameType);
// TODO(mellem): Set codec-specific info.
callback.onEncodedFrame(builder.createEncodedImage(), new CodecSpecificInfo());
}
codec.releaseOutputBuffer(index, false);
} catch (IllegalStateException e) {
Logging.e(TAG, "deliverOutput failed", e);
}
}
private void releaseCodecOnOutputThread() {
Logging.d(TAG, "Releasing MediaCodec on output thread");
try {
codec.stop();
} catch (Exception e) {
Logging.e(TAG, "Media encoder stop failed", e);
}
try {
codec.release();
} catch (Exception e) {
Logging.e(TAG, "Media encoder release failed", e);
// Propagate exceptions caught during release back to the main thread.
shutdownException = e;
}
Logging.d(TAG, "Release on output thread done");
}
private VideoCodecStatus updateBitrate() {
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
try {
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, adjustedBitrate);
codec.setParameters(params);
return VideoCodecStatus.OK;
} catch (IllegalStateException e) {
Logging.e(TAG, "updateBitrate failed", e);
return VideoCodecStatus.ERROR;
}
}
private static MediaCodec createCodecByName(String codecName) {
try {
return MediaCodec.createByCodecName(codecName);
} catch (IOException | IllegalArgumentException e) {
Logging.e(TAG, "createCodecByName failed", e);
return null;
}
}
/**
* Enumeration of supported color formats used for MediaCodec's input.
*/
private static enum ColorFormat {
I420 {
@Override
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) {
buffer.put(i420.getDataY());
buffer.put(i420.getDataU());
buffer.put(i420.getDataV());
}
},
NV12 {
@Override
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) {
buffer.put(i420.getDataY());
// Interleave the bytes from the U and V portions, starting with U.
ByteBuffer u = i420.getDataU();
ByteBuffer v = i420.getDataV();
int i = 0;
while (u.hasRemaining() && v.hasRemaining()) {
buffer.put(u.get());
buffer.put(v.get());
}
}
};
abstract void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420);
static ColorFormat valueOf(int colorFormat) {
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
return I420;
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
return NV12;
default:
throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat);
}
}
}
}