Add texture support to HardwareVideoEncoder.
HardwareVideoEncoderFactory can now take an EglBase.Context on creation. When it does, it creates video encoders in texture mode. It uses the COLOR_FormatSurface colorFormat. It passes the EglBase.Context to the HardwareVideoEncoder. The HardwareVideoEncoder sets up an input surface for its codec and handles incoming frames by drawing them onto the input surface. BUG=webrtc:7760 R=pthatcher@webrtc.org, sakal@webrtc.org Review-Url: https://codereview.webrtc.org/2977153003 . Cr-Commit-Position: refs/heads/master@{#19083}
This commit is contained in:
@ -55,14 +55,28 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||
private static final String H264_CONSTRAINED_HIGH_3_1 =
|
||||
H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1;
|
||||
|
||||
private final EglBase14.Context sharedContext;
|
||||
private final boolean enableIntelVp8Encoder;
|
||||
private final boolean enableH264HighProfile;
|
||||
|
||||
public HardwareVideoEncoderFactory(boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
|
||||
public HardwareVideoEncoderFactory(
|
||||
EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
|
||||
// Texture mode requires EglBase14.
|
||||
if (sharedContext instanceof EglBase14.Context) {
|
||||
this.sharedContext = (EglBase14.Context) sharedContext;
|
||||
} else {
|
||||
Logging.w(TAG, "No shared EglBase.Context. Encoders will not use texture mode.");
|
||||
this.sharedContext = null;
|
||||
}
|
||||
this.enableIntelVp8Encoder = enableIntelVp8Encoder;
|
||||
this.enableH264HighProfile = enableH264HighProfile;
|
||||
}
|
||||
|
||||
@Deprecated
|
||||
public HardwareVideoEncoderFactory(boolean enableIntelVp8Encoder, boolean enableH264HighProfile) {
|
||||
this(null, enableIntelVp8Encoder, enableH264HighProfile);
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoEncoder createEncoder(VideoCodecInfo input) {
|
||||
VideoCodecType type = VideoCodecType.valueOf(input.name);
|
||||
@ -74,11 +88,14 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||
|
||||
String codecName = info.getName();
|
||||
String mime = type.mimeType();
|
||||
int colorFormat = MediaCodecUtils.selectColorFormat(
|
||||
MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime));
|
||||
int colorFormat = MediaCodecUtils.selectColorFormat(sharedContext == null
|
||||
? MediaCodecUtils.ENCODER_COLOR_FORMATS
|
||||
: MediaCodecUtils.TEXTURE_COLOR_FORMATS,
|
||||
info.getCapabilitiesForType(mime));
|
||||
|
||||
return new HardwareVideoEncoder(codecName, type, colorFormat, getKeyFrameIntervalSec(type),
|
||||
getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName));
|
||||
getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName),
|
||||
sharedContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -127,8 +144,10 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
|
||||
return false;
|
||||
}
|
||||
// Check for a supported color format.
|
||||
if (MediaCodecUtils.selectColorFormat(
|
||||
MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
|
||||
if (MediaCodecUtils.selectColorFormat(sharedContext == null
|
||||
? MediaCodecUtils.ENCODER_COLOR_FORMATS
|
||||
: MediaCodecUtils.TEXTURE_COLOR_FORMATS,
|
||||
info.getCapabilitiesForType(type.mimeType()))
|
||||
== null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -273,6 +273,36 @@ public class RendererCommon {
|
||||
return matrix;
|
||||
}
|
||||
|
||||
/** Converts android.graphics.Matrix to a float[16] matrix array. */
|
||||
public static float[] convertMatrixFromAndroidGraphicsMatrix(android.graphics.Matrix matrix) {
|
||||
float[] values = new float[9];
|
||||
matrix.getValues(values);
|
||||
|
||||
// The android.graphics.Matrix looks like this:
|
||||
// [x1 y1 w1]
|
||||
// [x2 y2 w2]
|
||||
// [x3 y3 w3]
|
||||
// We want to contruct a matrix that looks like this:
|
||||
// [x1 y1 0 w1]
|
||||
// [x2 y2 0 w2]
|
||||
// [ 0 0 1 0]
|
||||
// [x3 y3 0 w3]
|
||||
// Since it is stored in column-major order, it looks like this:
|
||||
// [x1 x2 0 x3
|
||||
// y1 y2 0 y3
|
||||
// 0 0 1 0
|
||||
// w1 w2 0 w3]
|
||||
// clang-format off
|
||||
float[] matrix4x4 = {
|
||||
values[0 * 3 + 0], values[1 * 3 + 0], 0, values[2 * 3 + 0],
|
||||
values[0 * 3 + 1], values[1 * 3 + 1], 0, values[2 * 3 + 1],
|
||||
0, 0, 1, 0,
|
||||
values[0 * 3 + 2], values[1 * 3 + 2], 0, values[2 * 3 + 2],
|
||||
};
|
||||
// clang-format on
|
||||
return matrix4x4;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate display size based on scaling type, video aspect ratio, and maximum display size.
|
||||
*/
|
||||
|
||||
@ -15,6 +15,8 @@ import static org.junit.Assert.assertTrue;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.Matrix;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.support.test.filters.SmallTest;
|
||||
import android.util.Log;
|
||||
import java.nio.ByteBuffer;
|
||||
@ -44,8 +46,25 @@ public class HardwareVideoEncoderTest {
|
||||
return;
|
||||
}
|
||||
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
|
||||
assertEquals(encoder.initEncode(SETTINGS, null), VideoCodecStatus.OK);
|
||||
assertEquals(encoder.release(), VideoCodecStatus.OK);
|
||||
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
|
||||
assertEquals(VideoCodecStatus.OK, encoder.release());
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testInitializeUsingTextures() {
|
||||
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
|
||||
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory(
|
||||
eglBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.w(TAG, "No hardware encoding support, skipping testInitializeUsingTextures");
|
||||
return;
|
||||
}
|
||||
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
|
||||
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
|
||||
assertEquals(VideoCodecStatus.OK, encoder.release());
|
||||
eglBase.release();
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -93,4 +112,91 @@ public class HardwareVideoEncoderTest {
|
||||
|
||||
assertEquals(encoder.release(), VideoCodecStatus.OK);
|
||||
}
|
||||
|
||||
@Test
|
||||
@SmallTest
|
||||
public void testEncodeTextures() throws InterruptedException {
|
||||
final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
|
||||
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory(
|
||||
eglOesBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
|
||||
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
|
||||
if (supportedCodecs.length == 0) {
|
||||
Log.w(TAG, "No hardware encoding support, skipping testEncodeTextures");
|
||||
return;
|
||||
}
|
||||
|
||||
eglOesBase.createDummyPbufferSurface();
|
||||
eglOesBase.makeCurrent();
|
||||
final int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
|
||||
|
||||
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
|
||||
|
||||
final long presentationTimestampUs = 20000;
|
||||
final CountDownLatch encodeDone = new CountDownLatch(1);
|
||||
|
||||
VideoEncoder.Callback callback = new VideoEncoder.Callback() {
|
||||
@Override
|
||||
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
|
||||
assertTrue(image.buffer.capacity() > 0);
|
||||
assertEquals(image.encodedWidth, SETTINGS.width);
|
||||
assertEquals(image.encodedHeight, SETTINGS.height);
|
||||
assertEquals(image.captureTimeMs, presentationTimestampUs / 1000);
|
||||
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
|
||||
assertEquals(image.rotation, 0);
|
||||
assertTrue(image.completeFrame);
|
||||
|
||||
encodeDone.countDown();
|
||||
}
|
||||
};
|
||||
|
||||
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
|
||||
|
||||
VideoFrame.TextureBuffer buffer = new VideoFrame.TextureBuffer() {
|
||||
@Override
|
||||
public VideoFrame.TextureBuffer.Type getType() {
|
||||
return VideoFrame.TextureBuffer.Type.OES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTextureId() {
|
||||
return oesTextureId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getWidth() {
|
||||
return SETTINGS.width;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getHeight() {
|
||||
return SETTINGS.height;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VideoFrame.I420Buffer toI420() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void retain() {}
|
||||
|
||||
@Override
|
||||
public void release() {}
|
||||
};
|
||||
VideoFrame frame =
|
||||
new VideoFrame(buffer, 0 /* rotation */, presentationTimestampUs * 1000, new Matrix());
|
||||
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
|
||||
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
|
||||
|
||||
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
|
||||
GlUtil.checkNoGLES2Error("encodeTexture");
|
||||
|
||||
// It should be Ok to delete the texture after calling encodeTexture.
|
||||
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
|
||||
|
||||
ThreadUtils.awaitUninterruptibly(encodeDone);
|
||||
|
||||
assertEquals(encoder.release(), VideoCodecStatus.OK);
|
||||
eglOesBase.release();
|
||||
}
|
||||
}
|
||||
|
||||
@ -11,10 +11,13 @@
|
||||
package org.webrtc;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.graphics.Matrix;
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaFormat;
|
||||
import android.opengl.GLES20;
|
||||
import android.os.Bundle;
|
||||
import android.view.Surface;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Arrays;
|
||||
@ -73,6 +76,19 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
// value to send exceptions thrown during release back to the encoder thread.
|
||||
private volatile Exception shutdownException = null;
|
||||
|
||||
// Surface objects for texture-mode encoding.
|
||||
|
||||
// EGL context shared with the application. Used to access texture inputs.
|
||||
private EglBase14.Context textureContext;
|
||||
// EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
|
||||
// input surface. Making this base current allows textures from the context to be drawn onto the
|
||||
// surface.
|
||||
private EglBase14 textureEglBase;
|
||||
// Input surface for the codec. The encoder will draw input textures onto this surface.
|
||||
private Surface textureInputSurface;
|
||||
// Drawer used to draw input textures onto the codec's input surface.
|
||||
private GlRectDrawer textureDrawer;
|
||||
|
||||
private MediaCodec codec;
|
||||
private Callback callback;
|
||||
|
||||
@ -97,15 +113,22 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
* @throws IllegalArgumentException if colorFormat is unsupported
|
||||
*/
|
||||
public HardwareVideoEncoder(String codecName, VideoCodecType codecType, int colorFormat,
|
||||
int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster) {
|
||||
int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster,
|
||||
EglBase14.Context textureContext) {
|
||||
this.codecName = codecName;
|
||||
this.codecType = codecType;
|
||||
this.colorFormat = colorFormat;
|
||||
if (textureContext == null) {
|
||||
this.inputColorFormat = ColorFormat.valueOf(colorFormat);
|
||||
} else {
|
||||
// ColorFormat copies bytes between buffers. It is not used in texture mode.
|
||||
this.inputColorFormat = null;
|
||||
}
|
||||
this.keyFrameIntervalSec = keyFrameIntervalSec;
|
||||
this.forcedKeyFrameMs = forceKeyFrameIntervalMs;
|
||||
this.bitrateAdjuster = bitrateAdjuster;
|
||||
this.outputBuilders = new LinkedBlockingDeque<>();
|
||||
this.textureContext = textureContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -144,6 +167,15 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
|
||||
Logging.d(TAG, "Format: " + format);
|
||||
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
|
||||
if (textureContext != null) {
|
||||
// Texture mode.
|
||||
textureEglBase = new EglBase14(textureContext, EglBase.CONFIG_RECORDABLE);
|
||||
textureInputSurface = codec.createInputSurface();
|
||||
textureEglBase.createSurface(textureInputSurface);
|
||||
textureDrawer = new GlRectDrawer();
|
||||
}
|
||||
|
||||
codec.start();
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "initEncode failed", e);
|
||||
@ -161,6 +193,9 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
@Override
|
||||
public VideoCodecStatus release() {
|
||||
try {
|
||||
if (outputThread == null) {
|
||||
return VideoCodecStatus.OK;
|
||||
}
|
||||
// The outputThread actually stops and releases the codec once running is false.
|
||||
running = false;
|
||||
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
|
||||
@ -176,6 +211,19 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
codec = null;
|
||||
outputThread = null;
|
||||
outputBuilders.clear();
|
||||
|
||||
if (textureDrawer != null) {
|
||||
textureDrawer.release();
|
||||
textureDrawer = null;
|
||||
}
|
||||
if (textureEglBase != null) {
|
||||
textureEglBase.release();
|
||||
textureEglBase = null;
|
||||
}
|
||||
if (textureInputSurface != null) {
|
||||
textureInputSurface.release();
|
||||
textureInputSurface = null;
|
||||
}
|
||||
}
|
||||
return VideoCodecStatus.OK;
|
||||
}
|
||||
@ -196,37 +244,12 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
}
|
||||
}
|
||||
|
||||
// No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
|
||||
int index;
|
||||
try {
|
||||
index = codec.dequeueInputBuffer(0 /* timeout */);
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "dequeueInputBuffer failed", e);
|
||||
return VideoCodecStatus.FALLBACK_SOFTWARE;
|
||||
}
|
||||
|
||||
if (index == -1) {
|
||||
// Encoder is falling behind. No input buffers available. Drop the frame.
|
||||
Logging.e(TAG, "Dropped frame, no input buffers available");
|
||||
return VideoCodecStatus.OK; // See webrtc bug 2887.
|
||||
}
|
||||
if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
|
||||
// Too many frames in the encoder. Drop this frame.
|
||||
Logging.e(TAG, "Dropped frame, encoder queue full");
|
||||
return VideoCodecStatus.OK; // See webrtc bug 2887.
|
||||
}
|
||||
|
||||
// TODO(mellem): Add support for input surfaces and textures.
|
||||
ByteBuffer buffer;
|
||||
try {
|
||||
buffer = codec.getInputBuffers()[index];
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "getInputBuffers failed", e);
|
||||
return VideoCodecStatus.FALLBACK_SOFTWARE;
|
||||
}
|
||||
VideoFrame.I420Buffer i420 = videoFrame.getBuffer().toI420();
|
||||
inputColorFormat.fillBufferFromI420(buffer, i420);
|
||||
|
||||
boolean requestedKeyFrame = false;
|
||||
for (EncodedImage.FrameType frameType : encodeInfo.frameTypes) {
|
||||
if (frameType == EncodedImage.FrameType.VideoFrameKey) {
|
||||
@ -241,9 +264,10 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
requestKeyFrame(presentationTimestampMs);
|
||||
}
|
||||
|
||||
VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
|
||||
// Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
|
||||
// subsampled at one byte per four pixels.
|
||||
int bufferSize = videoFrame.getBuffer().getHeight() * videoFrame.getBuffer().getWidth() * 3 / 2;
|
||||
int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
|
||||
EncodedImage.Builder builder = EncodedImage.builder()
|
||||
.setCaptureTimeMs(presentationTimestampMs)
|
||||
.setCompleteFrame(true)
|
||||
@ -251,6 +275,80 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
.setEncodedHeight(videoFrame.getHeight())
|
||||
.setRotation(videoFrame.getRotation());
|
||||
outputBuilders.offer(builder);
|
||||
|
||||
if (textureContext != null) {
|
||||
if (!(videoFrameBuffer instanceof VideoFrame.TextureBuffer)) {
|
||||
Logging.e(TAG, "Cannot encode non-texture buffer in texture mode");
|
||||
return VideoCodecStatus.ERROR;
|
||||
}
|
||||
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) videoFrameBuffer;
|
||||
return encodeTextureBuffer(videoFrame, textureBuffer);
|
||||
} else {
|
||||
if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) {
|
||||
Logging.w(TAG, "Encoding texture buffer in byte mode; this may be inefficient");
|
||||
}
|
||||
return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize, presentationTimestampUs);
|
||||
}
|
||||
}
|
||||
|
||||
private VideoCodecStatus encodeTextureBuffer(
|
||||
VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) {
|
||||
Matrix matrix = videoFrame.getTransformMatrix();
|
||||
float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix);
|
||||
|
||||
try {
|
||||
textureEglBase.makeCurrent();
|
||||
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
|
||||
// but it's a workaround for bug webrtc:5147.
|
||||
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
|
||||
switch (textureBuffer.getType()) {
|
||||
case OES:
|
||||
textureDrawer.drawOes(textureBuffer.getTextureId(), transformationMatrix, width, height,
|
||||
0, 0, width, height);
|
||||
break;
|
||||
case RGB:
|
||||
textureDrawer.drawRgb(textureBuffer.getTextureId(), transformationMatrix, width, height,
|
||||
0, 0, width, height);
|
||||
break;
|
||||
}
|
||||
textureEglBase.swapBuffers(videoFrame.getTimestampNs());
|
||||
} catch (RuntimeException e) {
|
||||
Logging.e(TAG, "encodeTexture failed", e);
|
||||
// Keep the output builders in sync with buffers in the codec.
|
||||
outputBuilders.pollLast();
|
||||
return VideoCodecStatus.ERROR;
|
||||
}
|
||||
return VideoCodecStatus.OK;
|
||||
}
|
||||
|
||||
private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame,
|
||||
VideoFrame.Buffer videoFrameBuffer, int bufferSize, long presentationTimestampUs) {
|
||||
// No timeout. Don't block for an input buffer, drop frames if the encoder falls behind.
|
||||
int index;
|
||||
try {
|
||||
index = codec.dequeueInputBuffer(0 /* timeout */);
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "dequeueInputBuffer failed", e);
|
||||
return VideoCodecStatus.FALLBACK_SOFTWARE;
|
||||
}
|
||||
|
||||
if (index == -1) {
|
||||
// Encoder is falling behind. No input buffers available. Drop the frame.
|
||||
Logging.e(TAG, "Dropped frame, no input buffers available");
|
||||
return VideoCodecStatus.OK; // See webrtc bug 2887.
|
||||
}
|
||||
|
||||
ByteBuffer buffer;
|
||||
try {
|
||||
buffer = codec.getInputBuffers()[index];
|
||||
} catch (IllegalStateException e) {
|
||||
Logging.e(TAG, "getInputBuffers failed", e);
|
||||
return VideoCodecStatus.ERROR;
|
||||
}
|
||||
VideoFrame.I420Buffer i420 = videoFrameBuffer.toI420();
|
||||
inputColorFormat.fillBufferFromI420(buffer, i420);
|
||||
i420.release();
|
||||
|
||||
try {
|
||||
codec.queueInputBuffer(
|
||||
index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
|
||||
@ -259,7 +357,7 @@ class HardwareVideoEncoder implements VideoEncoder {
|
||||
// Keep the output builders in sync with buffers in the codec.
|
||||
outputBuilders.pollLast();
|
||||
// IllegalStateException thrown when the codec is in the wrong state.
|
||||
return VideoCodecStatus.FALLBACK_SOFTWARE;
|
||||
return VideoCodecStatus.ERROR;
|
||||
}
|
||||
return VideoCodecStatus.OK;
|
||||
}
|
||||
|
||||
@ -48,6 +48,9 @@ class MediaCodecUtils {
|
||||
MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
|
||||
MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m};
|
||||
|
||||
// Color formats supported by texture mode encoding - in order of preference.
|
||||
static final int[] TEXTURE_COLOR_FORMATS = {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface};
|
||||
|
||||
static Integer selectColorFormat(int[] supportedColorFormats, CodecCapabilities capabilities) {
|
||||
for (int supportedColorFormat : supportedColorFormats) {
|
||||
for (int codecColorFormat : capabilities.colorFormats) {
|
||||
|
||||
Reference in New Issue
Block a user