Revert "Improve unit testing for HardwareVideoEncoder and fix bugs."

This reverts commit 7a2bfd22e69f14e2af989b9e30ddd834f585caa9.

Reason for revert: Breaks external test.

Original change's description:
> Improve unit testing for HardwareVideoEncoder and fix bugs.
> 
> Improves the unit testing for HardwareVideoEncoder and fixes bugs in it.
> The main added feature is support for dynamically switching between
> texture and byte buffer modes.
> 
> Bug: webrtc:7760
> Change-Id: Iaffe6b7700047c7d0f9a7b89a6118f6ff932cd9b
> Reviewed-on: https://webrtc-review.googlesource.com/2682
> Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
> Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#19963}

TBR=magjed@webrtc.org,sakal@webrtc.org

Change-Id: If1e283a8429c994ad061c7a8320d76633bd0d66b
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:7760
Reviewed-on: https://webrtc-review.googlesource.com/3640
Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19964}
This commit is contained in:
Sami Kalliomäki
2017-09-26 08:17:08 +00:00
committed by Commit Bot
parent 7a2bfd22e6
commit daea5bf2de
4 changed files with 333 additions and 574 deletions

View File

@ -72,13 +72,13 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
String codecName = info.getName();
String mime = type.mimeType();
Integer surfaceColorFormat = MediaCodecUtils.selectColorFormat(
MediaCodecUtils.TEXTURE_COLOR_FORMATS, info.getCapabilitiesForType(mime));
Integer yuvColorFormat = MediaCodecUtils.selectColorFormat(
MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime));
int colorFormat = MediaCodecUtils.selectColorFormat(sharedContext == null
? MediaCodecUtils.ENCODER_COLOR_FORMATS
: MediaCodecUtils.TEXTURE_COLOR_FORMATS,
info.getCapabilitiesForType(mime));
return new HardwareVideoEncoder(codecName, type, surfaceColorFormat, yuvColorFormat,
input.params, getKeyFrameIntervalSec(type), getForcedKeyFrameIntervalMs(type, codecName),
return new HardwareVideoEncoder(codecName, type, colorFormat, input.params,
getKeyFrameIntervalSec(type), getForcedKeyFrameIntervalMs(type, codecName),
createBitrateAdjuster(type, codecName), sharedContext);
}
@ -128,8 +128,10 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
return false;
}
// Check for a supported color format.
if (MediaCodecUtils.selectColorFormat(
MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
if (MediaCodecUtils.selectColorFormat(sharedContext == null
? MediaCodecUtils.ENCODER_COLOR_FORMATS
: MediaCodecUtils.TEXTURE_COLOR_FORMATS,
info.getCapabilitiesForType(type.mimeType()))
== null) {
return false;
}

View File

@ -33,7 +33,7 @@
<!-- tools:ignore needed for chromium-junit4 tag. crbug.com/640116
TODO(sakal): Remove once the tag is no longer needed. -->
<instrumentation android:name="org.chromium.base.test.BaseChromiumAndroidJUnitRunner"
<instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
tools:ignore="MissingPrefix"
android:targetPackage="org.webrtc"
android:label="Tests for WebRTC Android SDK"

View File

@ -11,157 +11,147 @@
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import android.annotation.TargetApi;
import android.graphics.Matrix;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.support.test.filters.SmallTest;
import android.util.Log;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.chromium.base.test.params.BaseJUnit4RunnerDelegate;
import org.chromium.base.test.params.ParameterAnnotations.ClassParameter;
import org.chromium.base.test.params.ParameterAnnotations.UseRunnerDelegate;
import org.chromium.base.test.params.ParameterSet;
import org.chromium.base.test.params.ParameterizedRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import java.util.concurrent.CountDownLatch;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
@TargetApi(16)
@RunWith(ParameterizedRunner.class)
@UseRunnerDelegate(BaseJUnit4RunnerDelegate.class)
@RunWith(BaseJUnit4ClassRunner.class)
public class HardwareVideoEncoderTest {
@ClassParameter private static List<ParameterSet> CLASS_PARAMS = new ArrayList<>();
static {
CLASS_PARAMS.add(new ParameterSet()
.value(false /* useTextures */, false /* useEglContext */)
.name("I420WithoutEglContext"));
CLASS_PARAMS.add(new ParameterSet()
.value(true /* useTextures */, false /* useEglContext */)
.name("TextureWithoutEglContext"));
CLASS_PARAMS.add(new ParameterSet()
.value(true /* useTextures */, true /* useEglContext */)
.name("TextureWithEglContext"));
}
private final boolean useTextures;
private final boolean useEglContext;
public HardwareVideoEncoderTest(boolean useTextures, boolean useEglContext) {
this.useTextures = useTextures;
this.useEglContext = useEglContext;
}
final static String TAG = "HardwareVideoEncoderTest";
final static String TAG = "MediaCodecVideoEncoderTest";
private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
private static final boolean ENABLE_H264_HIGH_PROFILE = true;
private static final VideoEncoder.Settings SETTINGS =
new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */,
30 /* fps */, true /* automaticResizeOn */);
private static final int ENCODE_TIMEOUT_MS = 1000;
private static final int NUM_TEST_FRAMES = 10;
private static final int NUM_ENCODE_TRIES = 100;
private static final int ENCODE_RETRY_SLEEP_MS = 1;
// # Mock classes
/**
* Mock encoder callback that allows easy verification of the general properties of the encoded
* frame such as width and height.
*/
private static class MockEncoderCallback implements VideoEncoder.Callback {
private BlockingQueue<EncodedImage> frameQueue = new LinkedBlockingQueue<>();
public void onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo info) {
assertNotNull(frame);
assertNotNull(info);
frameQueue.offer(frame);
@Test
@SmallTest
public void testInitializeUsingYuvBuffer() {
HardwareVideoEncoderFactory factory =
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testInitializeUsingYuvBuffer");
return;
}
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
assertEquals(VideoCodecStatus.OK, encoder.release());
}
public EncodedImage poll() {
try {
EncodedImage image = frameQueue.poll(ENCODE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertNotNull("Timed out waiting for the frame to be encoded.", image);
return image;
} catch (InterruptedException e) {
throw new RuntimeException(e);
@Test
@SmallTest
public void testInitializeUsingTextures() {
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory(
eglBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testInitializeUsingTextures");
return;
}
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
assertEquals(VideoCodecStatus.OK, encoder.release());
eglBase.release();
}
public void assertFrameEncoded(VideoFrame frame) {
final VideoFrame.Buffer buffer = frame.getBuffer();
final EncodedImage image = poll();
@Test
@SmallTest
public void testEncodeYuvBuffer() throws InterruptedException {
HardwareVideoEncoderFactory factory =
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testEncodeYuvBuffer");
return;
}
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
final long presentationTimestampNs = 20000;
final CountDownLatch encodeDone = new CountDownLatch(1);
VideoEncoder.Callback callback = new VideoEncoder.Callback() {
@Override
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
assertTrue(image.buffer.capacity() > 0);
assertEquals(image.encodedWidth, buffer.getWidth());
assertEquals(image.encodedHeight, buffer.getHeight());
assertEquals(image.captureTimeNs, frame.getTimestampNs());
assertEquals(image.rotation, frame.getRotation());
assertEquals(image.encodedWidth, SETTINGS.width);
assertEquals(image.encodedHeight, SETTINGS.height);
assertEquals(image.captureTimeNs, presentationTimestampNs);
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
assertEquals(image.rotation, 0);
assertTrue(image.completeFrame);
encodeDone.countDown();
}
};
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
ThreadUtils.awaitUninterruptibly(encodeDone);
assertEquals(encoder.release(), VideoCodecStatus.OK);
}
/** A common base class for the texture and I420 buffer that implements reference counting. */
private static abstract class MockBufferBase implements VideoFrame.Buffer {
protected final int width;
protected final int height;
private final Runnable releaseCallback;
private final Object refCountLock = new Object();
private int refCount = 1;
public MockBufferBase(int width, int height, Runnable releaseCallback) {
this.width = width;
this.height = height;
this.releaseCallback = releaseCallback;
@Test
@SmallTest
public void testEncodeTextures() throws InterruptedException {
final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory(
eglOesBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testEncodeTextures");
return;
}
eglOesBase.createDummyPbufferSurface();
eglOesBase.makeCurrent();
final int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
final long presentationTimestampNs = 20000;
final CountDownLatch encodeDone = new CountDownLatch(1);
VideoEncoder.Callback callback = new VideoEncoder.Callback() {
@Override
public int getWidth() {
return width;
}
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
assertTrue(image.buffer.capacity() > 0);
assertEquals(image.encodedWidth, SETTINGS.width);
assertEquals(image.encodedHeight, SETTINGS.height);
assertEquals(image.captureTimeNs, presentationTimestampNs);
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
assertEquals(image.rotation, 0);
assertTrue(image.completeFrame);
@Override
public int getHeight() {
return height;
encodeDone.countDown();
}
};
@Override
public void retain() {
synchronized (refCountLock) {
assertTrue("Buffer retained after being destroyed.", refCount > 0);
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
assertTrue("Buffer released too many times.", --refCount >= 0);
if (refCount == 0) {
releaseCallback.run();
}
}
}
}
private static class MockTextureBuffer
extends MockBufferBase implements VideoFrame.TextureBuffer {
private final int textureId;
public MockTextureBuffer(int textureId, int width, int height, Runnable releaseCallback) {
super(width, height, releaseCallback);
this.textureId = textureId;
}
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
VideoFrame.TextureBuffer buffer = new VideoFrame.TextureBuffer() {
@Override
public VideoFrame.TextureBuffer.Type getType() {
return VideoFrame.TextureBuffer.Type.OES;
@ -169,7 +159,7 @@ public class HardwareVideoEncoderTest {
@Override
public int getTextureId() {
return textureId;
return oesTextureId;
}
@Override
@ -178,250 +168,45 @@ public class HardwareVideoEncoderTest {
}
@Override
public VideoFrame.I420Buffer toI420() {
return I420BufferImpl.allocate(width, height);
public int getWidth() {
return SETTINGS.width;
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
retain();
return new MockTextureBuffer(textureId, scaleWidth, scaleHeight, this ::release);
}
}
private static class MockI420Buffer extends MockBufferBase implements VideoFrame.I420Buffer {
private final I420BufferImpl realBuffer;
public MockI420Buffer(int width, int height, Runnable releaseCallback) {
super(width, height, releaseCallback);
// We never release this but it is not a problem in practice because the release is a no-op.
realBuffer = I420BufferImpl.allocate(width, height);
}
@Override
public ByteBuffer getDataY() {
return realBuffer.getDataY();
}
@Override
public ByteBuffer getDataU() {
return realBuffer.getDataU();
}
@Override
public ByteBuffer getDataV() {
return realBuffer.getDataV();
}
@Override
public int getStrideY() {
return realBuffer.getStrideY();
}
@Override
public int getStrideU() {
return realBuffer.getStrideU();
}
@Override
public int getStrideV() {
return realBuffer.getStrideV();
public int getHeight() {
return SETTINGS.height;
}
@Override
public VideoFrame.I420Buffer toI420() {
retain();
return this;
return null;
}
@Override
public void retain() {}
@Override
public void release() {}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return realBuffer.cropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
}
}
// # Test fields
private Object referencedFramesLock = new Object();
private int referencedFrames = 0;
private Runnable releaseFrameCallback = new Runnable() {
public void run() {
synchronized (referencedFramesLock) {
--referencedFrames;
}
return null;
}
};
private EglBase14 eglBase;
private long lastTimestampNs;
// # Helper methods
private VideoEncoderFactory createEncoderFactory(EglBase.Context eglContext) {
return new HardwareVideoEncoderFactory(
eglContext, ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
}
private VideoEncoder createEncoder() {
VideoEncoderFactory factory =
createEncoderFactory(useTextures ? eglBase.getEglBaseContext() : null);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
return factory.createEncoder(supportedCodecs[0]);
}
private VideoFrame generateI420Frame(int width, int height) {
synchronized (referencedFramesLock) {
++referencedFrames;
}
lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate;
VideoFrame.Buffer buffer = new MockI420Buffer(width, height, releaseFrameCallback);
return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs);
}
private VideoFrame generateTextureFrame(int width, int height) {
synchronized (referencedFramesLock) {
++referencedFrames;
}
final int textureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate;
VideoFrame.Buffer buffer =
new MockTextureBuffer(textureId, width, height, releaseFrameCallback);
return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs);
}
private VideoFrame generateFrame(int width, int height) {
return useTextures ? generateTextureFrame(width, height) : generateI420Frame(width, height);
}
private void testEncodeFrame(
VideoEncoder encoder, VideoFrame frame, VideoEncoder.EncodeInfo info) {
int numTries = 0;
// It takes a while for the encoder to become ready so try until it accepts the frame.
while (true) {
++numTries;
final VideoCodecStatus returnValue = encoder.encode(frame, info);
switch (returnValue) {
case OK:
return; // Success
case NO_OUTPUT:
if (numTries < NUM_ENCODE_TRIES) {
try {
Thread.sleep(ENCODE_RETRY_SLEEP_MS); // Try again.
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
break;
} else {
fail("encoder.encode keeps returning NO_OUTPUT");
}
default:
fail("encoder.encode returned: " + returnValue); // Error
}
}
}
// # Tests
@Before
public void setUp() {
eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
lastTimestampNs = System.nanoTime();
}
@After
public void tearDown() {
eglBase.release();
synchronized (referencedFramesLock) {
assertEquals("All frames were not released", 0, referencedFrames);
}
}
@Test
@SmallTest
public void testInitialize() {
VideoEncoder encoder = createEncoder();
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
assertEquals(VideoCodecStatus.OK, encoder.release());
}
@Test
@SmallTest
public void testEncode() {
VideoEncoder encoder = createEncoder();
MockEncoderCallback callback = new MockEncoderCallback();
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
for (int i = 0; i < NUM_TEST_FRAMES; i++) {
Log.d(TAG, "Test frame: " + i);
VideoFrame frame = generateFrame(SETTINGS.width, SETTINGS.height);
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
testEncodeFrame(encoder, frame, info);
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
callback.assertFrameEncoded(frame);
frame.release();
}
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
GlUtil.checkNoGLES2Error("encodeTexture");
assertEquals(VideoCodecStatus.OK, encoder.release());
}
// It should be Ok to delete the texture after calling encodeTexture.
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
@Test
@SmallTest
public void testEncodeAltenatingBuffers() {
VideoEncoder encoder = createEncoder();
MockEncoderCallback callback = new MockEncoderCallback();
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
ThreadUtils.awaitUninterruptibly(encodeDone);
for (int i = 0; i < NUM_TEST_FRAMES; i++) {
Log.d(TAG, "Test frame: " + i);
VideoFrame frame;
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
frame = generateTextureFrame(SETTINGS.width, SETTINGS.height);
testEncodeFrame(encoder, frame, info);
callback.assertFrameEncoded(frame);
frame.release();
frame = generateI420Frame(SETTINGS.width, SETTINGS.height);
testEncodeFrame(encoder, frame, info);
callback.assertFrameEncoded(frame);
frame.release();
}
assertEquals(VideoCodecStatus.OK, encoder.release());
}
@Test
@SmallTest
public void testEncodeDifferentSizes() {
VideoEncoder encoder = createEncoder();
MockEncoderCallback callback = new MockEncoderCallback();
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
VideoFrame frame;
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
frame = generateFrame(SETTINGS.width / 2, SETTINGS.height / 2);
testEncodeFrame(encoder, frame, info);
callback.assertFrameEncoded(frame);
frame.release();
frame = generateFrame(SETTINGS.width, SETTINGS.height);
testEncodeFrame(encoder, frame, info);
callback.assertFrameEncoded(frame);
frame.release();
frame = generateFrame(SETTINGS.width / 4, SETTINGS.height / 4);
testEncodeFrame(encoder, frame, info);
callback.assertFrameEncoded(frame);
frame.release();
assertEquals(VideoCodecStatus.OK, encoder.release());
assertEquals(encoder.release(), VideoCodecStatus.OK);
eglOesBase.release();
}
}

View File

@ -22,10 +22,8 @@ import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Deque;
import java.util.Map;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit;
import org.webrtc.ThreadUtils.ThreadChecker;
/** Android hardware video encoder. */
@TargetApi(19)
@ -51,60 +49,29 @@ class HardwareVideoEncoder implements VideoEncoder {
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
// --- Initialized on construction.
private final String codecName;
private final VideoCodecType codecType;
private final Integer surfaceColorFormat;
private final Integer yuvColorFormat;
private final YuvFormat yuvFormat;
private final int colorFormat;
private final Map<String, String> params;
private final int keyFrameIntervalSec; // Base interval for generating key frames.
private final ColorFormat inputColorFormat;
// Base interval for generating key frames.
private final int keyFrameIntervalSec;
// Interval at which to force a key frame. Used to reduce color distortions caused by some
// Qualcomm video encoders.
private final long forcedKeyFrameNs;
private final BitrateAdjuster bitrateAdjuster;
// EGL context shared with the application. Used to access texture inputs.
private final EglBase14.Context sharedContext;
// Drawer used to draw input textures onto the codec's input surface.
private final GlRectDrawer textureDrawer = new GlRectDrawer();
private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
// A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
// pre-populated with all the information that can't be sent through MediaCodec.
private final BlockingDeque<EncodedImage.Builder> outputBuilders = new LinkedBlockingDeque<>();
private final ThreadChecker encodeThreadChecker = new ThreadChecker();
private final ThreadChecker outputThreadChecker = new ThreadChecker();
// --- Set on initialize and immutable until release.
private Callback callback;
private boolean automaticResizeOn;
// --- Valid and immutable while an encoding session is running.
private MediaCodec codec;
// Thread that delivers encoded frames to the user callback.
private Thread outputThread;
// EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
// input surface. Making this base current allows textures from the context to be drawn onto the
// surface.
private EglBase14 textureEglBase;
// Input surface for the codec. The encoder will draw input textures onto this surface.
private Surface textureInputSurface;
private int width;
private int height;
private boolean useSurfaceMode;
// --- Only accessed from the encoding thread.
// Presentation timestamp of the last requested (or forced) key frame.
private long lastKeyFrameNs;
// --- Only accessed on the output thread.
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
private ByteBuffer configBuffer = null;
private final BitrateAdjuster bitrateAdjuster;
private int adjustedBitrate;
// A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
// pre-populated with all the information that can't be sent through MediaCodec.
private final Deque<EncodedImage.Builder> outputBuilders;
// Thread that delivers encoded frames to the user callback.
private Thread outputThread;
// Whether the encoder is running. Volatile so that the output thread can watch this value and
// exit when the encoder stops.
private volatile boolean running = false;
@ -112,14 +79,36 @@ class HardwareVideoEncoder implements VideoEncoder {
// value to send exceptions thrown during release back to the encoder thread.
private volatile Exception shutdownException = null;
// Surface objects for texture-mode encoding.
// EGL context shared with the application. Used to access texture inputs.
private EglBase14.Context textureContext;
// EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
// input surface. Making this base current allows textures from the context to be drawn onto the
// surface.
private EglBase14 textureEglBase;
// Input surface for the codec. The encoder will draw input textures onto this surface.
private Surface textureInputSurface;
// Drawer used to draw input textures onto the codec's input surface.
private GlRectDrawer textureDrawer;
private MediaCodec codec;
private Callback callback;
private boolean automaticResizeOn;
private int width;
private int height;
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
private ByteBuffer configBuffer = null;
/**
* Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
* intervals, and bitrateAdjuster.
*
* @param codecName the hardware codec implementation to use
* @param codecType the type of the given video codec (eg. VP8, VP9, or H264)
* @param surfaceColorFormat color format for surface mode or null if not available
* @param yuvColorFormat color format for bytebuffer mode
* @param colorFormat color format used by the input buffer
* @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
* @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
* used to reduce distortion caused by some codec implementations
@ -127,45 +116,46 @@ class HardwareVideoEncoder implements VideoEncoder {
* desired bitrates
* @throws IllegalArgumentException if colorFormat is unsupported
*/
public HardwareVideoEncoder(String codecName, VideoCodecType codecType,
Integer surfaceColorFormat, Integer yuvColorFormat, Map<String, String> params,
int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster,
EglBase14.Context sharedContext) {
public HardwareVideoEncoder(String codecName, VideoCodecType codecType, int colorFormat,
Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
BitrateAdjuster bitrateAdjuster, EglBase14.Context textureContext) {
this.codecName = codecName;
this.codecType = codecType;
this.surfaceColorFormat = surfaceColorFormat;
this.yuvColorFormat = yuvColorFormat;
this.yuvFormat = YuvFormat.valueOf(yuvColorFormat);
this.colorFormat = colorFormat;
this.params = params;
if (textureContext == null) {
this.inputColorFormat = ColorFormat.valueOf(colorFormat);
} else {
// ColorFormat copies bytes between buffers. It is not used in texture mode.
this.inputColorFormat = null;
}
this.keyFrameIntervalSec = keyFrameIntervalSec;
this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
this.bitrateAdjuster = bitrateAdjuster;
this.sharedContext = sharedContext;
this.outputBuilders = new LinkedBlockingDeque<>();
this.textureContext = textureContext;
}
@Override
public VideoCodecStatus initEncode(Settings settings, Callback callback) {
encodeThreadChecker.checkIsOnValidThread();
this.callback = callback;
automaticResizeOn = settings.automaticResizeOn;
this.width = settings.width;
this.height = settings.height;
useSurfaceMode = canUseSurface();
if (settings.startBitrate != 0 && settings.maxFramerate != 0) {
bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate);
return initEncodeInternal(
settings.width, settings.height, settings.startBitrate, settings.maxFramerate, callback);
}
private VideoCodecStatus initEncodeInternal(
int width, int height, int bitrateKbps, int fps, Callback callback) {
Logging.d(
TAG, "initEncode: " + width + " x " + height + ". @ " + bitrateKbps + "kbps. Fps: " + fps);
this.width = width;
this.height = height;
if (bitrateKbps != 0 && fps != 0) {
bitrateAdjuster.setTargets(bitrateKbps * 1000, fps);
}
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
Logging.d(TAG,
"initEncode: " + width + " x " + height + ". @ " + settings.startBitrate
+ "kbps. Fps: " + settings.maxFramerate + " Use surface mode: " + useSurfaceMode);
return initEncodeInternal();
}
private VideoCodecStatus initEncodeInternal() {
encodeThreadChecker.checkIsOnValidThread();
this.callback = callback;
lastKeyFrameNs = -1;
@ -175,8 +165,6 @@ class HardwareVideoEncoder implements VideoEncoder {
Logging.e(TAG, "Cannot create media encoder " + codecName);
return VideoCodecStatus.ERROR;
}
final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat;
try {
MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
@ -201,25 +189,24 @@ class HardwareVideoEncoder implements VideoEncoder {
}
}
Logging.d(TAG, "Format: " + format);
codec.configure(
format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (useSurfaceMode) {
textureEglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
if (textureContext != null) {
// Texture mode.
textureEglBase = new EglBase14(textureContext, EglBase.CONFIG_RECORDABLE);
textureInputSurface = codec.createInputSurface();
textureEglBase.createSurface(textureInputSurface);
textureEglBase.makeCurrent();
textureDrawer = new GlRectDrawer();
}
codec.start();
} catch (IllegalStateException e) {
Logging.e(TAG, "initEncodeInternal failed", e);
Logging.e(TAG, "initEncode failed", e);
release();
return VideoCodecStatus.ERROR;
}
running = true;
outputThreadChecker.detachThread();
outputThread = createOutputThread();
outputThread.start();
@ -228,28 +215,30 @@ class HardwareVideoEncoder implements VideoEncoder {
@Override
public VideoCodecStatus release() {
encodeThreadChecker.checkIsOnValidThread();
final VideoCodecStatus returnValue;
try {
if (outputThread == null) {
returnValue = VideoCodecStatus.OK;
} else {
return VideoCodecStatus.OK;
}
// The outputThread actually stops and releases the codec once running is false.
running = false;
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media encoder release timeout");
returnValue = VideoCodecStatus.TIMEOUT;
} else if (shutdownException != null) {
return VideoCodecStatus.TIMEOUT;
}
if (shutdownException != null) {
// Log the exception and turn it into an error.
Logging.e(TAG, "Media encoder release exception", shutdownException);
returnValue = VideoCodecStatus.ERROR;
} else {
returnValue = VideoCodecStatus.OK;
}
return VideoCodecStatus.ERROR;
}
} finally {
codec = null;
outputThread = null;
outputBuilders.clear();
if (textureDrawer != null) {
textureDrawer.release();
videoFrameDrawer.release();
textureDrawer = null;
}
if (textureEglBase != null) {
textureEglBase.release();
textureEglBase = null;
@ -258,30 +247,21 @@ class HardwareVideoEncoder implements VideoEncoder {
textureInputSurface.release();
textureInputSurface = null;
}
outputBuilders.clear();
codec = null;
outputThread = null;
return returnValue;
}
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
encodeThreadChecker.checkIsOnValidThread();
if (codec == null) {
return VideoCodecStatus.UNINITIALIZED;
}
final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer;
// If input resolution changed, restart the codec with the new resolution.
final int frameWidth = videoFrame.getBuffer().getWidth();
final int frameHeight = videoFrame.getBuffer().getHeight();
final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer;
if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) {
VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
int frameWidth = videoFrame.getBuffer().getWidth();
int frameHeight = videoFrame.getBuffer().getHeight();
if (frameWidth != width || frameHeight != height) {
VideoCodecStatus status = resetCodec(frameWidth, frameHeight);
if (status != VideoCodecStatus.OK) {
return status;
}
@ -290,7 +270,7 @@ class HardwareVideoEncoder implements VideoEncoder {
if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
// Too many frames in the encoder. Drop this frame.
Logging.e(TAG, "Dropped frame, encoder queue full");
return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
return VideoCodecStatus.OK; // See webrtc bug 2887.
}
boolean requestedKeyFrame = false;
@ -304,6 +284,7 @@ class HardwareVideoEncoder implements VideoEncoder {
requestKeyFrame(videoFrame.getTimestampNs());
}
VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
// Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
// subsampled at one byte per four pixels.
int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
@ -315,35 +296,46 @@ class HardwareVideoEncoder implements VideoEncoder {
.setRotation(videoFrame.getRotation());
outputBuilders.offer(builder);
final VideoCodecStatus returnValue;
if (useSurfaceMode) {
returnValue = encodeTextureBuffer(videoFrame);
if (textureContext != null) {
if (!(videoFrameBuffer instanceof VideoFrame.TextureBuffer)) {
Logging.e(TAG, "Cannot encode non-texture buffer in texture mode");
return VideoCodecStatus.ERROR;
}
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) videoFrameBuffer;
return encodeTextureBuffer(videoFrame, textureBuffer);
} else {
returnValue = encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) {
Logging.w(TAG, "Encoding texture buffer in byte mode; this may be inefficient");
}
return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
}
}
// Check if the queue was successful.
if (returnValue != VideoCodecStatus.OK) {
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
}
private VideoCodecStatus encodeTextureBuffer(
VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) {
Matrix matrix = textureBuffer.getTransformMatrix();
float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix);
return returnValue;
}
private VideoCodecStatus encodeTextureBuffer(VideoFrame videoFrame) {
encodeThreadChecker.checkIsOnValidThread();
try {
textureEglBase.makeCurrent();
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// It is not necessary to release this frame because it doesn't own the buffer.
VideoFrame derotatedFrame =
new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
switch (textureBuffer.getType()) {
case OES:
textureDrawer.drawOes(textureBuffer.getTextureId(), transformationMatrix, width, height,
0, 0, width, height);
break;
case RGB:
textureDrawer.drawRgb(textureBuffer.getTextureId(), transformationMatrix, width, height,
0, 0, width, height);
break;
}
textureEglBase.swapBuffers(videoFrame.getTimestampNs());
} catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e);
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
return VideoCodecStatus.ERROR;
}
return VideoCodecStatus.OK;
@ -351,7 +343,6 @@ class HardwareVideoEncoder implements VideoEncoder {
private VideoCodecStatus encodeByteBuffer(
VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
encodeThreadChecker.checkIsOnValidThread();
// Frame timestamp rounded to the nearest microsecond.
long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
@ -361,13 +352,13 @@ class HardwareVideoEncoder implements VideoEncoder {
index = codec.dequeueInputBuffer(0 /* timeout */);
} catch (IllegalStateException e) {
Logging.e(TAG, "dequeueInputBuffer failed", e);
return VideoCodecStatus.ERROR;
return VideoCodecStatus.FALLBACK_SOFTWARE;
}
if (index == -1) {
// Encoder is falling behind. No input buffers available. Drop the frame.
Logging.d(TAG, "Dropped frame, no input buffers available");
return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
Logging.e(TAG, "Dropped frame, no input buffers available");
return VideoCodecStatus.OK; // See webrtc bug 2887.
}
ByteBuffer buffer;
@ -377,13 +368,17 @@ class HardwareVideoEncoder implements VideoEncoder {
Logging.e(TAG, "getInputBuffers failed", e);
return VideoCodecStatus.ERROR;
}
yuvFormat.fillBuffer(buffer, videoFrameBuffer);
VideoFrame.I420Buffer i420 = videoFrameBuffer.toI420();
inputColorFormat.fillBufferFromI420(buffer, i420);
i420.release();
try {
codec.queueInputBuffer(
index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
} catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e);
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
// IllegalStateException thrown when the codec is in the wrong state.
return VideoCodecStatus.ERROR;
}
@ -392,51 +387,43 @@ class HardwareVideoEncoder implements VideoEncoder {
@Override
public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripTimeMs) {
encodeThreadChecker.checkIsOnValidThread();
return VideoCodecStatus.OK; // No op.
}
@Override
public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
encodeThreadChecker.checkIsOnValidThread();
if (framerate > MAX_VIDEO_FRAMERATE) {
framerate = MAX_VIDEO_FRAMERATE;
}
bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
// No op.
return VideoCodecStatus.OK;
}
@Override
public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
if (framerate > MAX_VIDEO_FRAMERATE) {
framerate = MAX_VIDEO_FRAMERATE;
}
bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
return updateBitrate();
}
@Override
public ScalingSettings getScalingSettings() {
encodeThreadChecker.checkIsOnValidThread();
return new ScalingSettings(automaticResizeOn);
}
@Override
public String getImplementationName() {
encodeThreadChecker.checkIsOnValidThread();
return "HardwareVideoEncoder: " + codecName;
}
private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) {
encodeThreadChecker.checkIsOnValidThread();
private VideoCodecStatus resetCodec(int newWidth, int newHeight) {
VideoCodecStatus status = release();
if (status != VideoCodecStatus.OK) {
return status;
}
width = newWidth;
height = newHeight;
useSurfaceMode = newUseSurfaceMode;
return initEncodeInternal();
// Zero bitrate and framerate indicate not to change the targets.
return initEncodeInternal(newWidth, newHeight, 0, 0, callback);
}
private boolean shouldForceKeyFrame(long presentationTimestampNs) {
encodeThreadChecker.checkIsOnValidThread();
return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
}
private void requestKeyFrame(long presentationTimestampNs) {
encodeThreadChecker.checkIsOnValidThread();
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
// indicate this in queueInputBuffer() below and guarantee _this_ frame
// be encoded as a key frame, but sadly that flag is ignored. Instead,
@ -465,7 +452,6 @@ class HardwareVideoEncoder implements VideoEncoder {
}
private void deliverEncodedImage() {
outputThreadChecker.checkIsOnValidThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
@ -487,12 +473,8 @@ class HardwareVideoEncoder implements VideoEncoder {
updateBitrate();
}
final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
}
final ByteBuffer frameBuffer;
ByteBuffer frameBuffer;
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
if (isKeyFrame && codecType == VideoCodecType.H264) {
Logging.d(TAG,
"Prepending config frame of size " + configBuffer.capacity()
@ -507,10 +489,11 @@ class HardwareVideoEncoder implements VideoEncoder {
frameBuffer.put(codecOutputBuffer);
frameBuffer.rewind();
final EncodedImage.FrameType frameType = isKeyFrame
? EncodedImage.FrameType.VideoFrameKey
: EncodedImage.FrameType.VideoFrameDelta;
EncodedImage.FrameType frameType = EncodedImage.FrameType.VideoFrameDelta;
if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
frameType = EncodedImage.FrameType.VideoFrameKey;
}
EncodedImage.Builder builder = outputBuilders.poll();
builder.setBuffer(frameBuffer).setFrameType(frameType);
// TODO(mellem): Set codec-specific info.
@ -523,7 +506,6 @@ class HardwareVideoEncoder implements VideoEncoder {
}
private void releaseCodecOnOutputThread() {
outputThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "Releasing MediaCodec on output thread");
try {
codec.stop();
@ -537,12 +519,10 @@ class HardwareVideoEncoder implements VideoEncoder {
// Propagate exceptions caught during release back to the main thread.
shutdownException = e;
}
configBuffer = null;
Logging.d(TAG, "Release on output thread done");
}
private VideoCodecStatus updateBitrate() {
outputThreadChecker.checkIsOnValidThread();
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
try {
Bundle params = new Bundle();
@ -555,45 +535,37 @@ class HardwareVideoEncoder implements VideoEncoder {
}
}
private boolean canUseSurface() {
return sharedContext != null && surfaceColorFormat != null;
}
/**
* Enumeration of supported YUV color formats used for MediaCodec's input.
* Enumeration of supported color formats used for MediaCodec's input.
*/
private static enum YuvFormat {
private static enum ColorFormat {
I420 {
@Override
void fillBuffer(ByteBuffer inputBuffer, VideoFrame.Buffer buffer) {
VideoFrame.I420Buffer i420 = buffer.toI420();
inputBuffer.put(i420.getDataY());
inputBuffer.put(i420.getDataU());
inputBuffer.put(i420.getDataV());
i420.release();
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) {
buffer.put(i420.getDataY());
buffer.put(i420.getDataU());
buffer.put(i420.getDataV());
}
},
NV12 {
@Override
void fillBuffer(ByteBuffer inputBuffer, VideoFrame.Buffer buffer) {
VideoFrame.I420Buffer i420 = buffer.toI420();
inputBuffer.put(i420.getDataY());
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) {
buffer.put(i420.getDataY());
// Interleave the bytes from the U and V portions, starting with U.
ByteBuffer u = i420.getDataU();
ByteBuffer v = i420.getDataV();
int i = 0;
while (u.hasRemaining() && v.hasRemaining()) {
inputBuffer.put(u.get());
inputBuffer.put(v.get());
buffer.put(u.get());
buffer.put(v.get());
}
i420.release();
}
};
abstract void fillBuffer(ByteBuffer inputBuffer, VideoFrame.Buffer buffer);
abstract void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420);
static YuvFormat valueOf(int colorFormat) {
static ColorFormat valueOf(int colorFormat) {
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
return I420;