Reland "Improve unit testing for HardwareVideoEncoder and fix bugs."

This is a reland of 7a2bfd22e69f14e2af989b9e30ddd834f585caa9
Original change's description:
> Improve unit testing for HardwareVideoEncoder and fix bugs.
> 
> Improves the unit testing for HardwareVideoEncoder and fixes bugs in it.
> The main added feature is support for dynamically switching between
> texture and byte buffer modes.
> 
> Bug: webrtc:7760
> Change-Id: Iaffe6b7700047c7d0f9a7b89a6118f6ff932cd9b
> Reviewed-on: https://webrtc-review.googlesource.com/2682
> Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
> Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#19963}

Bug: webrtc:7760
Change-Id: I605647da456525de8e535cc66cab9d0b3f14240b
Reviewed-on: https://webrtc-review.googlesource.com/3641
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#20013}
This commit is contained in:
Sami Kalliomäki
2017-09-25 17:15:08 +02:00
committed by Commit Bot
parent 4580217b56
commit cff9ee650e
4 changed files with 563 additions and 322 deletions

View File

@ -72,13 +72,13 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
String codecName = info.getName(); String codecName = info.getName();
String mime = type.mimeType(); String mime = type.mimeType();
int colorFormat = MediaCodecUtils.selectColorFormat(sharedContext == null Integer surfaceColorFormat = MediaCodecUtils.selectColorFormat(
? MediaCodecUtils.ENCODER_COLOR_FORMATS MediaCodecUtils.TEXTURE_COLOR_FORMATS, info.getCapabilitiesForType(mime));
: MediaCodecUtils.TEXTURE_COLOR_FORMATS, Integer yuvColorFormat = MediaCodecUtils.selectColorFormat(
info.getCapabilitiesForType(mime)); MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime));
return new HardwareVideoEncoder(codecName, type, colorFormat, input.params, return new HardwareVideoEncoder(codecName, type, surfaceColorFormat, yuvColorFormat,
getKeyFrameIntervalSec(type), getForcedKeyFrameIntervalMs(type, codecName), input.params, getKeyFrameIntervalSec(type), getForcedKeyFrameIntervalMs(type, codecName),
createBitrateAdjuster(type, codecName), sharedContext); createBitrateAdjuster(type, codecName), sharedContext);
} }
@ -128,10 +128,8 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
return false; return false;
} }
// Check for a supported color format. // Check for a supported color format.
if (MediaCodecUtils.selectColorFormat(sharedContext == null if (MediaCodecUtils.selectColorFormat(
? MediaCodecUtils.ENCODER_COLOR_FORMATS MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType()))
: MediaCodecUtils.TEXTURE_COLOR_FORMATS,
info.getCapabilitiesForType(type.mimeType()))
== null) { == null) {
return false; return false;
} }

View File

@ -33,7 +33,7 @@
<!-- tools:ignore needed for chromium-junit4 tag. crbug.com/640116 <!-- tools:ignore needed for chromium-junit4 tag. crbug.com/640116
TODO(sakal): Remove once the tag is no longer needed. --> TODO(sakal): Remove once the tag is no longer needed. -->
<instrumentation android:name="android.support.test.runner.AndroidJUnitRunner" <instrumentation android:name="org.chromium.base.test.BaseChromiumAndroidJUnitRunner"
tools:ignore="MissingPrefix" tools:ignore="MissingPrefix"
android:targetPackage="org.webrtc" android:targetPackage="org.webrtc"
android:label="Tests for WebRTC Android SDK" android:label="Tests for WebRTC Android SDK"

View File

@ -11,202 +11,417 @@
package org.webrtc; package org.webrtc;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.graphics.Matrix; import android.graphics.Matrix;
import android.opengl.GLES11Ext; import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.support.test.filters.SmallTest; import android.support.test.filters.SmallTest;
import android.util.Log; import android.util.Log;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch; import java.util.ArrayList;
import org.chromium.base.test.BaseJUnit4ClassRunner; import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.chromium.base.test.params.BaseJUnit4RunnerDelegate;
import org.chromium.base.test.params.ParameterAnnotations.ClassParameter;
import org.chromium.base.test.params.ParameterAnnotations.UseRunnerDelegate;
import org.chromium.base.test.params.ParameterSet;
import org.chromium.base.test.params.ParameterizedRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@TargetApi(16) @TargetApi(16)
@RunWith(BaseJUnit4ClassRunner.class) @RunWith(ParameterizedRunner.class)
@UseRunnerDelegate(BaseJUnit4RunnerDelegate.class)
public class HardwareVideoEncoderTest { public class HardwareVideoEncoderTest {
final static String TAG = "MediaCodecVideoEncoderTest"; @ClassParameter private static List<ParameterSet> CLASS_PARAMS = new ArrayList<>();
static {
CLASS_PARAMS.add(new ParameterSet()
.value(false /* useTextures */, false /* useEglContext */)
.name("I420WithoutEglContext"));
CLASS_PARAMS.add(new ParameterSet()
.value(true /* useTextures */, false /* useEglContext */)
.name("TextureWithoutEglContext"));
CLASS_PARAMS.add(new ParameterSet()
.value(true /* useTextures */, true /* useEglContext */)
.name("TextureWithEglContext"));
}
private final boolean useTextures;
private final boolean useEglContext;
public HardwareVideoEncoderTest(boolean useTextures, boolean useEglContext) {
this.useTextures = useTextures;
this.useEglContext = useEglContext;
}
final static String TAG = "HardwareVideoEncoderTest";
private static final boolean ENABLE_INTEL_VP8_ENCODER = true; private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
private static final boolean ENABLE_H264_HIGH_PROFILE = true; private static final boolean ENABLE_H264_HIGH_PROFILE = true;
private static final VideoEncoder.Settings SETTINGS = private static final VideoEncoder.Settings SETTINGS =
new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */, new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */,
30 /* fps */, true /* automaticResizeOn */); 30 /* fps */, true /* automaticResizeOn */);
private static final int ENCODE_TIMEOUT_MS = 1000;
private static final int NUM_TEST_FRAMES = 10;
private static final int NUM_ENCODE_TRIES = 100;
private static final int ENCODE_RETRY_SLEEP_MS = 1;
@Test // # Mock classes
@SmallTest /**
public void testInitializeUsingYuvBuffer() { * Mock encoder callback that allows easy verification of the general properties of the encoded
HardwareVideoEncoderFactory factory = * frame such as width and height.
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE); */
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs(); private static class MockEncoderCallback implements VideoEncoder.Callback {
if (supportedCodecs.length == 0) { private BlockingQueue<EncodedImage> frameQueue = new LinkedBlockingQueue<>();
Log.w(TAG, "No hardware encoding support, skipping testInitializeUsingYuvBuffer");
return; public void onEncodedFrame(EncodedImage frame, VideoEncoder.CodecSpecificInfo info) {
assertNotNull(frame);
assertNotNull(info);
frameQueue.offer(frame);
}
public EncodedImage poll() {
try {
EncodedImage image = frameQueue.poll(ENCODE_TIMEOUT_MS, TimeUnit.MILLISECONDS);
assertNotNull("Timed out waiting for the frame to be encoded.", image);
return image;
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
public void assertFrameEncoded(VideoFrame frame) {
final VideoFrame.Buffer buffer = frame.getBuffer();
final EncodedImage image = poll();
assertTrue(image.buffer.capacity() > 0);
assertEquals(image.encodedWidth, buffer.getWidth());
assertEquals(image.encodedHeight, buffer.getHeight());
assertEquals(image.captureTimeNs, frame.getTimestampNs());
assertEquals(image.rotation, frame.getRotation());
} }
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
assertEquals(VideoCodecStatus.OK, encoder.release());
} }
@Test /** A common base class for the texture and I420 buffer that implements reference counting. */
@SmallTest private static abstract class MockBufferBase implements VideoFrame.Buffer {
public void testInitializeUsingTextures() { protected final int width;
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN); protected final int height;
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory( private final Runnable releaseCallback;
eglBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE); private final Object refCountLock = new Object();
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs(); private int refCount = 1;
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testInitializeUsingTextures"); public MockBufferBase(int width, int height, Runnable releaseCallback) {
return; this.width = width;
this.height = height;
this.releaseCallback = releaseCallback;
} }
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null)); @Override
assertEquals(VideoCodecStatus.OK, encoder.release()); public int getWidth() {
return width;
}
@Override
public int getHeight() {
return height;
}
@Override
public void retain() {
synchronized (refCountLock) {
assertTrue("Buffer retained after being destroyed.", refCount > 0);
++refCount;
}
}
@Override
public void release() {
synchronized (refCountLock) {
assertTrue("Buffer released too many times.", --refCount >= 0);
if (refCount == 0) {
releaseCallback.run();
}
}
}
}
private static class MockTextureBuffer
extends MockBufferBase implements VideoFrame.TextureBuffer {
private final int textureId;
public MockTextureBuffer(int textureId, int width, int height, Runnable releaseCallback) {
super(width, height, releaseCallback);
this.textureId = textureId;
}
@Override
public VideoFrame.TextureBuffer.Type getType() {
return VideoFrame.TextureBuffer.Type.OES;
}
@Override
public int getTextureId() {
return textureId;
}
@Override
public Matrix getTransformMatrix() {
return new Matrix();
}
@Override
public VideoFrame.I420Buffer toI420() {
return I420BufferImpl.allocate(width, height);
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
retain();
return new MockTextureBuffer(textureId, scaleWidth, scaleHeight, this ::release);
}
}
private static class MockI420Buffer extends MockBufferBase implements VideoFrame.I420Buffer {
private final I420BufferImpl realBuffer;
public MockI420Buffer(int width, int height, Runnable releaseCallback) {
super(width, height, releaseCallback);
// We never release this but it is not a problem in practice because the release is a no-op.
realBuffer = I420BufferImpl.allocate(width, height);
}
@Override
public ByteBuffer getDataY() {
return realBuffer.getDataY();
}
@Override
public ByteBuffer getDataU() {
return realBuffer.getDataU();
}
@Override
public ByteBuffer getDataV() {
return realBuffer.getDataV();
}
@Override
public int getStrideY() {
return realBuffer.getStrideY();
}
@Override
public int getStrideU() {
return realBuffer.getStrideU();
}
@Override
public int getStrideV() {
return realBuffer.getStrideV();
}
@Override
public VideoFrame.I420Buffer toI420() {
retain();
return this;
}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return realBuffer.cropAndScale(cropX, cropY, cropWidth, cropHeight, scaleWidth, scaleHeight);
}
}
// # Test fields
private Object referencedFramesLock = new Object();
private int referencedFrames = 0;
private Runnable releaseFrameCallback = new Runnable() {
public void run() {
synchronized (referencedFramesLock) {
--referencedFrames;
}
}
};
private EglBase14 eglBase;
private long lastTimestampNs;
// # Helper methods
private VideoEncoderFactory createEncoderFactory(EglBase.Context eglContext) {
return new HardwareVideoEncoderFactory(
eglContext, ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
}
private VideoEncoder createEncoder() {
VideoEncoderFactory factory =
createEncoderFactory(useTextures ? eglBase.getEglBaseContext() : null);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
return factory.createEncoder(supportedCodecs[0]);
}
private VideoFrame generateI420Frame(int width, int height) {
synchronized (referencedFramesLock) {
++referencedFrames;
}
lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate;
VideoFrame.Buffer buffer = new MockI420Buffer(width, height, releaseFrameCallback);
return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs);
}
private VideoFrame generateTextureFrame(int width, int height) {
synchronized (referencedFramesLock) {
++referencedFrames;
}
final int textureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
lastTimestampNs += TimeUnit.SECONDS.toNanos(1) / SETTINGS.maxFramerate;
VideoFrame.Buffer buffer =
new MockTextureBuffer(textureId, width, height, releaseFrameCallback);
return new VideoFrame(buffer, 0 /* rotation */, lastTimestampNs);
}
private VideoFrame generateFrame(int width, int height) {
return useTextures ? generateTextureFrame(width, height) : generateI420Frame(width, height);
}
private void testEncodeFrame(
VideoEncoder encoder, VideoFrame frame, VideoEncoder.EncodeInfo info) {
int numTries = 0;
// It takes a while for the encoder to become ready so try until it accepts the frame.
while (true) {
++numTries;
final VideoCodecStatus returnValue = encoder.encode(frame, info);
switch (returnValue) {
case OK:
return; // Success
case NO_OUTPUT:
if (numTries < NUM_ENCODE_TRIES) {
try {
Thread.sleep(ENCODE_RETRY_SLEEP_MS); // Try again.
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
break;
} else {
fail("encoder.encode keeps returning NO_OUTPUT");
}
default:
fail("encoder.encode returned: " + returnValue); // Error
}
}
}
// # Tests
@Before
public void setUp() {
eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
eglBase.createDummyPbufferSurface();
eglBase.makeCurrent();
lastTimestampNs = System.nanoTime();
}
@After
public void tearDown() {
eglBase.release(); eglBase.release();
synchronized (referencedFramesLock) {
assertEquals("All frames were not released", 0, referencedFrames);
}
} }
@Test @Test
@SmallTest @SmallTest
public void testEncodeYuvBuffer() throws InterruptedException { public void testInitialize() {
HardwareVideoEncoderFactory factory = VideoEncoder encoder = createEncoder();
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE); assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs(); assertEquals(VideoCodecStatus.OK, encoder.release());
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testEncodeYuvBuffer");
return;
}
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
final long presentationTimestampNs = 20000;
final CountDownLatch encodeDone = new CountDownLatch(1);
VideoEncoder.Callback callback = new VideoEncoder.Callback() {
@Override
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
assertTrue(image.buffer.capacity() > 0);
assertEquals(image.encodedWidth, SETTINGS.width);
assertEquals(image.encodedHeight, SETTINGS.height);
assertEquals(image.captureTimeNs, presentationTimestampNs);
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
assertEquals(image.rotation, 0);
assertTrue(image.completeFrame);
encodeDone.countDown();
}
};
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
ThreadUtils.awaitUninterruptibly(encodeDone);
assertEquals(encoder.release(), VideoCodecStatus.OK);
} }
@Test @Test
@SmallTest @SmallTest
public void testEncodeTextures() throws InterruptedException { public void testEncode() {
final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER); VideoEncoder encoder = createEncoder();
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory( MockEncoderCallback callback = new MockEncoderCallback();
eglOesBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE); assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
if (supportedCodecs.length == 0) { for (int i = 0; i < NUM_TEST_FRAMES; i++) {
Log.w(TAG, "No hardware encoding support, skipping testEncodeTextures"); Log.d(TAG, "Test frame: " + i);
return; VideoFrame frame = generateFrame(SETTINGS.width, SETTINGS.height);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
testEncodeFrame(encoder, frame, info);
callback.assertFrameEncoded(frame);
frame.release();
} }
eglOesBase.createDummyPbufferSurface(); assertEquals(VideoCodecStatus.OK, encoder.release());
eglOesBase.makeCurrent(); }
final int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]); @Test
@SmallTest
public void testEncodeAltenatingBuffers() {
VideoEncoder encoder = createEncoder();
MockEncoderCallback callback = new MockEncoderCallback();
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
final long presentationTimestampNs = 20000; for (int i = 0; i < NUM_TEST_FRAMES; i++) {
final CountDownLatch encodeDone = new CountDownLatch(1); Log.d(TAG, "Test frame: " + i);
VideoFrame frame;
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
VideoEncoder.Callback callback = new VideoEncoder.Callback() { frame = generateTextureFrame(SETTINGS.width, SETTINGS.height);
@Override testEncodeFrame(encoder, frame, info);
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) { callback.assertFrameEncoded(frame);
assertTrue(image.buffer.capacity() > 0); frame.release();
assertEquals(image.encodedWidth, SETTINGS.width);
assertEquals(image.encodedHeight, SETTINGS.height);
assertEquals(image.captureTimeNs, presentationTimestampNs);
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
assertEquals(image.rotation, 0);
assertTrue(image.completeFrame);
encodeDone.countDown(); frame = generateI420Frame(SETTINGS.width, SETTINGS.height);
} testEncodeFrame(encoder, frame, info);
}; callback.assertFrameEncoded(frame);
frame.release();
}
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK); assertEquals(VideoCodecStatus.OK, encoder.release());
}
VideoFrame.TextureBuffer buffer = new VideoFrame.TextureBuffer() { @Test
@Override @SmallTest
public VideoFrame.TextureBuffer.Type getType() { public void testEncodeDifferentSizes() {
return VideoFrame.TextureBuffer.Type.OES; VideoEncoder encoder = createEncoder();
} MockEncoderCallback callback = new MockEncoderCallback();
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, callback));
@Override VideoFrame frame;
public int getTextureId() {
return oesTextureId;
}
@Override
public Matrix getTransformMatrix() {
return new Matrix();
}
@Override
public int getWidth() {
return SETTINGS.width;
}
@Override
public int getHeight() {
return SETTINGS.height;
}
@Override
public VideoFrame.I420Buffer toI420() {
return null;
}
@Override
public void retain() {}
@Override
public void release() {}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return null;
}
};
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo( VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey}); new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameDelta});
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK); frame = generateFrame(SETTINGS.width / 2, SETTINGS.height / 2);
GlUtil.checkNoGLES2Error("encodeTexture"); testEncodeFrame(encoder, frame, info);
callback.assertFrameEncoded(frame);
frame.release();
// It should be Ok to delete the texture after calling encodeTexture. frame = generateFrame(SETTINGS.width, SETTINGS.height);
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0); testEncodeFrame(encoder, frame, info);
callback.assertFrameEncoded(frame);
frame.release();
ThreadUtils.awaitUninterruptibly(encodeDone); frame = generateFrame(SETTINGS.width / 4, SETTINGS.height / 4);
testEncodeFrame(encoder, frame, info);
callback.assertFrameEncoded(frame);
frame.release();
assertEquals(encoder.release(), VideoCodecStatus.OK); assertEquals(VideoCodecStatus.OK, encoder.release());
eglOesBase.release();
} }
} }

View File

@ -22,8 +22,10 @@ import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Deque; import java.util.Deque;
import java.util.Map; import java.util.Map;
import java.util.concurrent.BlockingDeque;
import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.webrtc.ThreadUtils.ThreadChecker;
/** Android hardware video encoder. */ /** Android hardware video encoder. */
@TargetApi(19) @TargetApi(19)
@ -49,29 +51,60 @@ class HardwareVideoEncoder implements VideoEncoder {
private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000;
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
// --- Initialized on construction.
private final String codecName; private final String codecName;
private final VideoCodecType codecType; private final VideoCodecType codecType;
private final int colorFormat; private final Integer surfaceColorFormat;
private final Integer yuvColorFormat;
private final YuvFormat yuvFormat;
private final Map<String, String> params; private final Map<String, String> params;
private final ColorFormat inputColorFormat; private final int keyFrameIntervalSec; // Base interval for generating key frames.
// Base interval for generating key frames.
private final int keyFrameIntervalSec;
// Interval at which to force a key frame. Used to reduce color distortions caused by some // Interval at which to force a key frame. Used to reduce color distortions caused by some
// Qualcomm video encoders. // Qualcomm video encoders.
private final long forcedKeyFrameNs; private final long forcedKeyFrameNs;
private final BitrateAdjuster bitrateAdjuster;
// EGL context shared with the application. Used to access texture inputs.
private final EglBase14.Context sharedContext;
// Drawer used to draw input textures onto the codec's input surface.
private final GlRectDrawer textureDrawer = new GlRectDrawer();
private final VideoFrameDrawer videoFrameDrawer = new VideoFrameDrawer();
// A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
// pre-populated with all the information that can't be sent through MediaCodec.
private final BlockingDeque<EncodedImage.Builder> outputBuilders = new LinkedBlockingDeque<>();
private final ThreadChecker encodeThreadChecker = new ThreadChecker();
private final ThreadChecker outputThreadChecker = new ThreadChecker();
// --- Set on initialize and immutable until release.
private Callback callback;
private boolean automaticResizeOn;
// --- Valid and immutable while an encoding session is running.
private MediaCodec codec;
// Thread that delivers encoded frames to the user callback.
private Thread outputThread;
// EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
// input surface. Making this base current allows textures from the context to be drawn onto the
// surface.
private EglBase14 textureEglBase;
// Input surface for the codec. The encoder will draw input textures onto this surface.
private Surface textureInputSurface;
private int width;
private int height;
private boolean useSurfaceMode;
// --- Only accessed from the encoding thread.
// Presentation timestamp of the last requested (or forced) key frame. // Presentation timestamp of the last requested (or forced) key frame.
private long lastKeyFrameNs; private long lastKeyFrameNs;
private final BitrateAdjuster bitrateAdjuster; // --- Only accessed on the output thread.
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
private ByteBuffer configBuffer = null;
private int adjustedBitrate; private int adjustedBitrate;
// A queue of EncodedImage.Builders that correspond to frames in the codec. These builders are
// pre-populated with all the information that can't be sent through MediaCodec.
private final Deque<EncodedImage.Builder> outputBuilders;
// Thread that delivers encoded frames to the user callback.
private Thread outputThread;
// Whether the encoder is running. Volatile so that the output thread can watch this value and // Whether the encoder is running. Volatile so that the output thread can watch this value and
// exit when the encoder stops. // exit when the encoder stops.
private volatile boolean running = false; private volatile boolean running = false;
@ -79,36 +112,14 @@ class HardwareVideoEncoder implements VideoEncoder {
// value to send exceptions thrown during release back to the encoder thread. // value to send exceptions thrown during release back to the encoder thread.
private volatile Exception shutdownException = null; private volatile Exception shutdownException = null;
// Surface objects for texture-mode encoding.
// EGL context shared with the application. Used to access texture inputs.
private EglBase14.Context textureContext;
// EGL base wrapping the shared texture context. Holds hooks to both the shared context and the
// input surface. Making this base current allows textures from the context to be drawn onto the
// surface.
private EglBase14 textureEglBase;
// Input surface for the codec. The encoder will draw input textures onto this surface.
private Surface textureInputSurface;
// Drawer used to draw input textures onto the codec's input surface.
private GlRectDrawer textureDrawer;
private MediaCodec codec;
private Callback callback;
private boolean automaticResizeOn;
private int width;
private int height;
// Contents of the last observed config frame output by the MediaCodec. Used by H.264.
private ByteBuffer configBuffer = null;
/** /**
* Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame * Creates a new HardwareVideoEncoder with the given codecName, codecType, colorFormat, key frame
* intervals, and bitrateAdjuster. * intervals, and bitrateAdjuster.
* *
* @param codecName the hardware codec implementation to use * @param codecName the hardware codec implementation to use
* @param codecType the type of the given video codec (eg. VP8, VP9, or H264) * @param codecType the type of the given video codec (eg. VP8, VP9, or H264)
* @param colorFormat color format used by the input buffer * @param surfaceColorFormat color format for surface mode or null if not available
* @param yuvColorFormat color format for bytebuffer mode
* @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec * @param keyFrameIntervalSec interval in seconds between key frames; used to initialize the codec
* @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested; * @param forceKeyFrameIntervalMs interval at which to force a key frame if one is not requested;
* used to reduce distortion caused by some codec implementations * used to reduce distortion caused by some codec implementations
@ -116,46 +127,45 @@ class HardwareVideoEncoder implements VideoEncoder {
* desired bitrates * desired bitrates
* @throws IllegalArgumentException if colorFormat is unsupported * @throws IllegalArgumentException if colorFormat is unsupported
*/ */
public HardwareVideoEncoder(String codecName, VideoCodecType codecType, int colorFormat, public HardwareVideoEncoder(String codecName, VideoCodecType codecType,
Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, Integer surfaceColorFormat, Integer yuvColorFormat, Map<String, String> params,
BitrateAdjuster bitrateAdjuster, EglBase14.Context textureContext) { int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster,
EglBase14.Context sharedContext) {
this.codecName = codecName; this.codecName = codecName;
this.codecType = codecType; this.codecType = codecType;
this.colorFormat = colorFormat; this.surfaceColorFormat = surfaceColorFormat;
this.yuvColorFormat = yuvColorFormat;
this.yuvFormat = YuvFormat.valueOf(yuvColorFormat);
this.params = params; this.params = params;
if (textureContext == null) {
this.inputColorFormat = ColorFormat.valueOf(colorFormat);
} else {
// ColorFormat copies bytes between buffers. It is not used in texture mode.
this.inputColorFormat = null;
}
this.keyFrameIntervalSec = keyFrameIntervalSec; this.keyFrameIntervalSec = keyFrameIntervalSec;
this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs); this.forcedKeyFrameNs = TimeUnit.MILLISECONDS.toNanos(forceKeyFrameIntervalMs);
this.bitrateAdjuster = bitrateAdjuster; this.bitrateAdjuster = bitrateAdjuster;
this.outputBuilders = new LinkedBlockingDeque<>(); this.sharedContext = sharedContext;
this.textureContext = textureContext;
} }
@Override @Override
public VideoCodecStatus initEncode(Settings settings, Callback callback) { public VideoCodecStatus initEncode(Settings settings, Callback callback) {
encodeThreadChecker.checkIsOnValidThread();
this.callback = callback;
automaticResizeOn = settings.automaticResizeOn; automaticResizeOn = settings.automaticResizeOn;
this.width = settings.width;
this.height = settings.height;
useSurfaceMode = canUseSurface();
return initEncodeInternal( if (settings.startBitrate != 0 && settings.maxFramerate != 0) {
settings.width, settings.height, settings.startBitrate, settings.maxFramerate, callback); bitrateAdjuster.setTargets(settings.startBitrate * 1000, settings.maxFramerate);
}
private VideoCodecStatus initEncodeInternal(
int width, int height, int bitrateKbps, int fps, Callback callback) {
Logging.d(
TAG, "initEncode: " + width + " x " + height + ". @ " + bitrateKbps + "kbps. Fps: " + fps);
this.width = width;
this.height = height;
if (bitrateKbps != 0 && fps != 0) {
bitrateAdjuster.setTargets(bitrateKbps * 1000, fps);
} }
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
this.callback = callback; Logging.d(TAG,
"initEncode: " + width + " x " + height + ". @ " + settings.startBitrate
+ "kbps. Fps: " + settings.maxFramerate + " Use surface mode: " + useSurfaceMode);
return initEncodeInternal();
}
private VideoCodecStatus initEncodeInternal() {
encodeThreadChecker.checkIsOnValidThread();
lastKeyFrameNs = -1; lastKeyFrameNs = -1;
@ -165,6 +175,8 @@ class HardwareVideoEncoder implements VideoEncoder {
Logging.e(TAG, "Cannot create media encoder " + codecName); Logging.e(TAG, "Cannot create media encoder " + codecName);
return VideoCodecStatus.ERROR; return VideoCodecStatus.ERROR;
} }
final int colorFormat = useSurfaceMode ? surfaceColorFormat : yuvColorFormat;
try { try {
MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height); MediaFormat format = MediaFormat.createVideoFormat(codecType.mimeType(), width, height);
format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate); format.setInteger(MediaFormat.KEY_BIT_RATE, adjustedBitrate);
@ -189,24 +201,25 @@ class HardwareVideoEncoder implements VideoEncoder {
} }
} }
Logging.d(TAG, "Format: " + format); Logging.d(TAG, "Format: " + format);
codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); codec.configure(
format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (textureContext != null) { if (useSurfaceMode) {
// Texture mode. textureEglBase = new EglBase14(sharedContext, EglBase.CONFIG_RECORDABLE);
textureEglBase = new EglBase14(textureContext, EglBase.CONFIG_RECORDABLE);
textureInputSurface = codec.createInputSurface(); textureInputSurface = codec.createInputSurface();
textureEglBase.createSurface(textureInputSurface); textureEglBase.createSurface(textureInputSurface);
textureDrawer = new GlRectDrawer(); textureEglBase.makeCurrent();
} }
codec.start(); codec.start();
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Logging.e(TAG, "initEncode failed", e); Logging.e(TAG, "initEncodeInternal failed", e);
release(); release();
return VideoCodecStatus.ERROR; return VideoCodecStatus.ERROR;
} }
running = true; running = true;
outputThreadChecker.detachThread();
outputThread = createOutputThread(); outputThread = createOutputThread();
outputThread.start(); outputThread.start();
@ -215,53 +228,60 @@ class HardwareVideoEncoder implements VideoEncoder {
@Override @Override
public VideoCodecStatus release() { public VideoCodecStatus release() {
try { encodeThreadChecker.checkIsOnValidThread();
if (outputThread == null) {
return VideoCodecStatus.OK; final VideoCodecStatus returnValue;
} if (outputThread == null) {
returnValue = VideoCodecStatus.OK;
} else {
// The outputThread actually stops and releases the codec once running is false. // The outputThread actually stops and releases the codec once running is false.
running = false; running = false;
if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) { if (!ThreadUtils.joinUninterruptibly(outputThread, MEDIA_CODEC_RELEASE_TIMEOUT_MS)) {
Logging.e(TAG, "Media encoder release timeout"); Logging.e(TAG, "Media encoder release timeout");
return VideoCodecStatus.TIMEOUT; returnValue = VideoCodecStatus.TIMEOUT;
} } else if (shutdownException != null) {
if (shutdownException != null) {
// Log the exception and turn it into an error. // Log the exception and turn it into an error.
Logging.e(TAG, "Media encoder release exception", shutdownException); Logging.e(TAG, "Media encoder release exception", shutdownException);
return VideoCodecStatus.ERROR; returnValue = VideoCodecStatus.ERROR;
} } else {
} finally { returnValue = VideoCodecStatus.OK;
codec = null;
outputThread = null;
outputBuilders.clear();
if (textureDrawer != null) {
textureDrawer.release();
textureDrawer = null;
}
if (textureEglBase != null) {
textureEglBase.release();
textureEglBase = null;
}
if (textureInputSurface != null) {
textureInputSurface.release();
textureInputSurface = null;
} }
} }
return VideoCodecStatus.OK;
textureDrawer.release();
videoFrameDrawer.release();
if (textureEglBase != null) {
textureEglBase.release();
textureEglBase = null;
}
if (textureInputSurface != null) {
textureInputSurface.release();
textureInputSurface = null;
}
outputBuilders.clear();
codec = null;
outputThread = null;
return returnValue;
} }
@Override @Override
public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) {
encodeThreadChecker.checkIsOnValidThread();
if (codec == null) { if (codec == null) {
return VideoCodecStatus.UNINITIALIZED; return VideoCodecStatus.UNINITIALIZED;
} }
final VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
final boolean isTextureBuffer = videoFrameBuffer instanceof VideoFrame.TextureBuffer;
// If input resolution changed, restart the codec with the new resolution. // If input resolution changed, restart the codec with the new resolution.
int frameWidth = videoFrame.getBuffer().getWidth(); final int frameWidth = videoFrame.getBuffer().getWidth();
int frameHeight = videoFrame.getBuffer().getHeight(); final int frameHeight = videoFrame.getBuffer().getHeight();
if (frameWidth != width || frameHeight != height) { final boolean shouldUseSurfaceMode = canUseSurface() && isTextureBuffer;
VideoCodecStatus status = resetCodec(frameWidth, frameHeight); if (frameWidth != width || frameHeight != height || shouldUseSurfaceMode != useSurfaceMode) {
VideoCodecStatus status = resetCodec(frameWidth, frameHeight, shouldUseSurfaceMode);
if (status != VideoCodecStatus.OK) { if (status != VideoCodecStatus.OK) {
return status; return status;
} }
@ -270,7 +290,7 @@ class HardwareVideoEncoder implements VideoEncoder {
if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) { if (outputBuilders.size() > MAX_ENCODER_Q_SIZE) {
// Too many frames in the encoder. Drop this frame. // Too many frames in the encoder. Drop this frame.
Logging.e(TAG, "Dropped frame, encoder queue full"); Logging.e(TAG, "Dropped frame, encoder queue full");
return VideoCodecStatus.OK; // See webrtc bug 2887. return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
} }
boolean requestedKeyFrame = false; boolean requestedKeyFrame = false;
@ -284,7 +304,6 @@ class HardwareVideoEncoder implements VideoEncoder {
requestKeyFrame(videoFrame.getTimestampNs()); requestKeyFrame(videoFrame.getTimestampNs());
} }
VideoFrame.Buffer videoFrameBuffer = videoFrame.getBuffer();
// Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are // Number of bytes in the video buffer. Y channel is sampled at one byte per pixel; U and V are
// subsampled at one byte per four pixels. // subsampled at one byte per four pixels.
int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2; int bufferSize = videoFrameBuffer.getHeight() * videoFrameBuffer.getWidth() * 3 / 2;
@ -296,46 +315,35 @@ class HardwareVideoEncoder implements VideoEncoder {
.setRotation(videoFrame.getRotation()); .setRotation(videoFrame.getRotation());
outputBuilders.offer(builder); outputBuilders.offer(builder);
if (textureContext != null) { final VideoCodecStatus returnValue;
if (!(videoFrameBuffer instanceof VideoFrame.TextureBuffer)) { if (useSurfaceMode) {
Logging.e(TAG, "Cannot encode non-texture buffer in texture mode"); returnValue = encodeTextureBuffer(videoFrame);
return VideoCodecStatus.ERROR;
}
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) videoFrameBuffer;
return encodeTextureBuffer(videoFrame, textureBuffer);
} else { } else {
if (videoFrameBuffer instanceof VideoFrame.TextureBuffer) { returnValue = encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
Logging.w(TAG, "Encoding texture buffer in byte mode; this may be inefficient");
}
return encodeByteBuffer(videoFrame, videoFrameBuffer, bufferSize);
} }
// Check if the queue was successful.
if (returnValue != VideoCodecStatus.OK) {
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
}
return returnValue;
} }
private VideoCodecStatus encodeTextureBuffer( private VideoCodecStatus encodeTextureBuffer(VideoFrame videoFrame) {
VideoFrame videoFrame, VideoFrame.TextureBuffer textureBuffer) { encodeThreadChecker.checkIsOnValidThread();
Matrix matrix = textureBuffer.getTransformMatrix();
float[] transformationMatrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(matrix);
try { try {
textureEglBase.makeCurrent();
// TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway, // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
// but it's a workaround for bug webrtc:5147. // but it's a workaround for bug webrtc:5147.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
switch (textureBuffer.getType()) { // It is not necessary to release this frame because it doesn't own the buffer.
case OES: VideoFrame derotatedFrame =
textureDrawer.drawOes(textureBuffer.getTextureId(), transformationMatrix, width, height, new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs());
0, 0, width, height); videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */);
break;
case RGB:
textureDrawer.drawRgb(textureBuffer.getTextureId(), transformationMatrix, width, height,
0, 0, width, height);
break;
}
textureEglBase.swapBuffers(videoFrame.getTimestampNs()); textureEglBase.swapBuffers(videoFrame.getTimestampNs());
} catch (RuntimeException e) { } catch (RuntimeException e) {
Logging.e(TAG, "encodeTexture failed", e); Logging.e(TAG, "encodeTexture failed", e);
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
return VideoCodecStatus.ERROR; return VideoCodecStatus.ERROR;
} }
return VideoCodecStatus.OK; return VideoCodecStatus.OK;
@ -343,6 +351,7 @@ class HardwareVideoEncoder implements VideoEncoder {
private VideoCodecStatus encodeByteBuffer( private VideoCodecStatus encodeByteBuffer(
VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) { VideoFrame videoFrame, VideoFrame.Buffer videoFrameBuffer, int bufferSize) {
encodeThreadChecker.checkIsOnValidThread();
// Frame timestamp rounded to the nearest microsecond. // Frame timestamp rounded to the nearest microsecond.
long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000; long presentationTimestampUs = (videoFrame.getTimestampNs() + 500) / 1000;
@ -352,13 +361,13 @@ class HardwareVideoEncoder implements VideoEncoder {
index = codec.dequeueInputBuffer(0 /* timeout */); index = codec.dequeueInputBuffer(0 /* timeout */);
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Logging.e(TAG, "dequeueInputBuffer failed", e); Logging.e(TAG, "dequeueInputBuffer failed", e);
return VideoCodecStatus.FALLBACK_SOFTWARE; return VideoCodecStatus.ERROR;
} }
if (index == -1) { if (index == -1) {
// Encoder is falling behind. No input buffers available. Drop the frame. // Encoder is falling behind. No input buffers available. Drop the frame.
Logging.e(TAG, "Dropped frame, no input buffers available"); Logging.d(TAG, "Dropped frame, no input buffers available");
return VideoCodecStatus.OK; // See webrtc bug 2887. return VideoCodecStatus.NO_OUTPUT; // See webrtc bug 2887.
} }
ByteBuffer buffer; ByteBuffer buffer;
@ -368,17 +377,13 @@ class HardwareVideoEncoder implements VideoEncoder {
Logging.e(TAG, "getInputBuffers failed", e); Logging.e(TAG, "getInputBuffers failed", e);
return VideoCodecStatus.ERROR; return VideoCodecStatus.ERROR;
} }
VideoFrame.I420Buffer i420 = videoFrameBuffer.toI420(); yuvFormat.fillBuffer(buffer, videoFrameBuffer);
inputColorFormat.fillBufferFromI420(buffer, i420);
i420.release();
try { try {
codec.queueInputBuffer( codec.queueInputBuffer(
index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */); index, 0 /* offset */, bufferSize, presentationTimestampUs, 0 /* flags */);
} catch (IllegalStateException e) { } catch (IllegalStateException e) {
Logging.e(TAG, "queueInputBuffer failed", e); Logging.e(TAG, "queueInputBuffer failed", e);
// Keep the output builders in sync with buffers in the codec.
outputBuilders.pollLast();
// IllegalStateException thrown when the codec is in the wrong state. // IllegalStateException thrown when the codec is in the wrong state.
return VideoCodecStatus.ERROR; return VideoCodecStatus.ERROR;
} }
@ -387,43 +392,51 @@ class HardwareVideoEncoder implements VideoEncoder {
@Override @Override
public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripTimeMs) { public VideoCodecStatus setChannelParameters(short packetLoss, long roundTripTimeMs) {
// No op. encodeThreadChecker.checkIsOnValidThread();
return VideoCodecStatus.OK; return VideoCodecStatus.OK; // No op.
} }
@Override @Override
public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) { public VideoCodecStatus setRateAllocation(BitrateAllocation bitrateAllocation, int framerate) {
encodeThreadChecker.checkIsOnValidThread();
if (framerate > MAX_VIDEO_FRAMERATE) { if (framerate > MAX_VIDEO_FRAMERATE) {
framerate = MAX_VIDEO_FRAMERATE; framerate = MAX_VIDEO_FRAMERATE;
} }
bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate); bitrateAdjuster.setTargets(bitrateAllocation.getSum(), framerate);
return updateBitrate(); return VideoCodecStatus.OK;
} }
@Override @Override
public ScalingSettings getScalingSettings() { public ScalingSettings getScalingSettings() {
encodeThreadChecker.checkIsOnValidThread();
return new ScalingSettings(automaticResizeOn); return new ScalingSettings(automaticResizeOn);
} }
@Override @Override
public String getImplementationName() { public String getImplementationName() {
encodeThreadChecker.checkIsOnValidThread();
return "HardwareVideoEncoder: " + codecName; return "HardwareVideoEncoder: " + codecName;
} }
private VideoCodecStatus resetCodec(int newWidth, int newHeight) { private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) {
encodeThreadChecker.checkIsOnValidThread();
VideoCodecStatus status = release(); VideoCodecStatus status = release();
if (status != VideoCodecStatus.OK) { if (status != VideoCodecStatus.OK) {
return status; return status;
} }
// Zero bitrate and framerate indicate not to change the targets. width = newWidth;
return initEncodeInternal(newWidth, newHeight, 0, 0, callback); height = newHeight;
useSurfaceMode = newUseSurfaceMode;
return initEncodeInternal();
} }
private boolean shouldForceKeyFrame(long presentationTimestampNs) { private boolean shouldForceKeyFrame(long presentationTimestampNs) {
encodeThreadChecker.checkIsOnValidThread();
return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs; return forcedKeyFrameNs > 0 && presentationTimestampNs > lastKeyFrameNs + forcedKeyFrameNs;
} }
private void requestKeyFrame(long presentationTimestampNs) { private void requestKeyFrame(long presentationTimestampNs) {
encodeThreadChecker.checkIsOnValidThread();
// Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
// indicate this in queueInputBuffer() below and guarantee _this_ frame // indicate this in queueInputBuffer() below and guarantee _this_ frame
// be encoded as a key frame, but sadly that flag is ignored. Instead, // be encoded as a key frame, but sadly that flag is ignored. Instead,
@ -452,6 +465,7 @@ class HardwareVideoEncoder implements VideoEncoder {
} }
private void deliverEncodedImage() { private void deliverEncodedImage() {
outputThreadChecker.checkIsOnValidThread();
try { try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US); int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
@ -473,8 +487,12 @@ class HardwareVideoEncoder implements VideoEncoder {
updateBitrate(); updateBitrate();
} }
ByteBuffer frameBuffer; final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0; if (isKeyFrame) {
Logging.d(TAG, "Sync frame generated");
}
final ByteBuffer frameBuffer;
if (isKeyFrame && codecType == VideoCodecType.H264) { if (isKeyFrame && codecType == VideoCodecType.H264) {
Logging.d(TAG, Logging.d(TAG,
"Prepending config frame of size " + configBuffer.capacity() "Prepending config frame of size " + configBuffer.capacity()
@ -489,11 +507,10 @@ class HardwareVideoEncoder implements VideoEncoder {
frameBuffer.put(codecOutputBuffer); frameBuffer.put(codecOutputBuffer);
frameBuffer.rewind(); frameBuffer.rewind();
EncodedImage.FrameType frameType = EncodedImage.FrameType.VideoFrameDelta; final EncodedImage.FrameType frameType = isKeyFrame
if (isKeyFrame) { ? EncodedImage.FrameType.VideoFrameKey
Logging.d(TAG, "Sync frame generated"); : EncodedImage.FrameType.VideoFrameDelta;
frameType = EncodedImage.FrameType.VideoFrameKey;
}
EncodedImage.Builder builder = outputBuilders.poll(); EncodedImage.Builder builder = outputBuilders.poll();
builder.setBuffer(frameBuffer).setFrameType(frameType); builder.setBuffer(frameBuffer).setFrameType(frameType);
// TODO(mellem): Set codec-specific info. // TODO(mellem): Set codec-specific info.
@ -506,6 +523,7 @@ class HardwareVideoEncoder implements VideoEncoder {
} }
private void releaseCodecOnOutputThread() { private void releaseCodecOnOutputThread() {
outputThreadChecker.checkIsOnValidThread();
Logging.d(TAG, "Releasing MediaCodec on output thread"); Logging.d(TAG, "Releasing MediaCodec on output thread");
try { try {
codec.stop(); codec.stop();
@ -519,10 +537,12 @@ class HardwareVideoEncoder implements VideoEncoder {
// Propagate exceptions caught during release back to the main thread. // Propagate exceptions caught during release back to the main thread.
shutdownException = e; shutdownException = e;
} }
configBuffer = null;
Logging.d(TAG, "Release on output thread done"); Logging.d(TAG, "Release on output thread done");
} }
private VideoCodecStatus updateBitrate() { private VideoCodecStatus updateBitrate() {
outputThreadChecker.checkIsOnValidThread();
adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps();
try { try {
Bundle params = new Bundle(); Bundle params = new Bundle();
@ -535,37 +555,45 @@ class HardwareVideoEncoder implements VideoEncoder {
} }
} }
private boolean canUseSurface() {
return sharedContext != null && surfaceColorFormat != null;
}
/** /**
* Enumeration of supported color formats used for MediaCodec's input. * Enumeration of supported YUV color formats used for MediaCodec's input.
*/ */
private static enum ColorFormat { private static enum YuvFormat {
I420 { I420 {
@Override @Override
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) { void fillBuffer(ByteBuffer inputBuffer, VideoFrame.Buffer buffer) {
buffer.put(i420.getDataY()); VideoFrame.I420Buffer i420 = buffer.toI420();
buffer.put(i420.getDataU()); inputBuffer.put(i420.getDataY());
buffer.put(i420.getDataV()); inputBuffer.put(i420.getDataU());
inputBuffer.put(i420.getDataV());
i420.release();
} }
}, },
NV12 { NV12 {
@Override @Override
void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420) { void fillBuffer(ByteBuffer inputBuffer, VideoFrame.Buffer buffer) {
buffer.put(i420.getDataY()); VideoFrame.I420Buffer i420 = buffer.toI420();
inputBuffer.put(i420.getDataY());
// Interleave the bytes from the U and V portions, starting with U. // Interleave the bytes from the U and V portions, starting with U.
ByteBuffer u = i420.getDataU(); ByteBuffer u = i420.getDataU();
ByteBuffer v = i420.getDataV(); ByteBuffer v = i420.getDataV();
int i = 0; int i = 0;
while (u.hasRemaining() && v.hasRemaining()) { while (u.hasRemaining() && v.hasRemaining()) {
buffer.put(u.get()); inputBuffer.put(u.get());
buffer.put(v.get()); inputBuffer.put(v.get());
} }
i420.release();
} }
}; };
abstract void fillBufferFromI420(ByteBuffer buffer, VideoFrame.I420Buffer i420); abstract void fillBuffer(ByteBuffer inputBuffer, VideoFrame.Buffer buffer);
static ColorFormat valueOf(int colorFormat) { static YuvFormat valueOf(int colorFormat) {
switch (colorFormat) { switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
return I420; return I420;