Add unit tests for hardware video codecs.

Bug: webrtc:9594
Change-Id: I4529a5123997e0309bde1b931bb6d99bea8c0dfd
Reviewed-on: https://webrtc-review.googlesource.com/92399
Commit-Queue: Sami Kalliomäki <sakal@webrtc.org>
Reviewed-by: Magnus Jedvert <magjed@webrtc.org>
Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24223}
This commit is contained in:
Sami Kalliomäki
2018-08-08 11:29:23 +02:00
committed by Commit Bot
parent 39a44b2134
commit a381871dbf
13 changed files with 1326 additions and 24 deletions

View File

@ -572,8 +572,12 @@ if (rtc_include_tests) {
"examples/androidjunit/src/org/appspot/apprtc/BluetoothManagerTest.java",
"examples/androidjunit/src/org/appspot/apprtc/DirectRTCClientTest.java",
"examples/androidjunit/src/org/appspot/apprtc/TCPChannelClientTest.java",
"sdk/android/tests/src/org/webrtc/GlGenericDrawerTest.java",
"sdk/android/tests/src/org/webrtc/CameraEnumerationTest.java",
"sdk/android/tests/src/org/webrtc/CodecTestHelper.java",
"sdk/android/tests/src/org/webrtc/FakeMediaCodecWrapper.java",
"sdk/android/tests/src/org/webrtc/GlGenericDrawerTest.java",
"sdk/android/tests/src/org/webrtc/HardwareVideoEncoderTest.java",
"sdk/android/tests/src/org/webrtc/HardwareVideoDecoderTest.java",
"sdk/android/tests/src/org/webrtc/ScalingSettingsTest.java",
]
@ -581,6 +585,7 @@ if (rtc_include_tests) {
"examples:AppRTCMobile_javalib",
"sdk/android:libjingle_peerconnection_java",
"//base:base_java_test_support",
"//third_party/google-truth:google_truth_java",
]
}
}

View File

@ -391,6 +391,9 @@ if (is_android) {
"src/java/org/webrtc/FramerateBitrateAdjuster.java",
"src/java/org/webrtc/HardwareVideoDecoder.java",
"src/java/org/webrtc/HardwareVideoEncoder.java",
"src/java/org/webrtc/MediaCodecWrapper.java",
"src/java/org/webrtc/MediaCodecWrapperFactory.java",
"src/java/org/webrtc/MediaCodecWrapperFactoryImpl.java",
"src/java/org/webrtc/MediaCodecUtils.java",
"src/java/org/webrtc/NV12Buffer.java",
"src/java/org/webrtc/VideoCodecType.java",

View File

@ -55,7 +55,7 @@ public class HardwareVideoDecoderFactory implements VideoDecoderFactory {
}
CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType());
return new HardwareVideoDecoder(info.getName(), type,
return new HardwareVideoDecoder(new MediaCodecWrapperFactoryImpl(), info.getName(), type,
MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities),
sharedContext);
}

View File

@ -92,9 +92,10 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
}
}
return new HardwareVideoEncoder(codecName, type, surfaceColorFormat, yuvColorFormat,
input.params, getKeyFrameIntervalSec(type), getForcedKeyFrameIntervalMs(type, codecName),
createBitrateAdjuster(type, codecName), sharedContext);
return new HardwareVideoEncoder(new MediaCodecWrapperFactoryImpl(), codecName, type,
surfaceColorFormat, yuvColorFormat, input.params, getKeyFrameIntervalSec(type),
getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName),
sharedContext);
}
@Override

View File

@ -52,6 +52,7 @@ class HardwareVideoDecoder implements VideoDecoder, VideoSink {
// MediaCodec.
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
private final String codecName;
private final VideoCodecType codecType;
@ -123,13 +124,14 @@ class HardwareVideoDecoder implements VideoDecoder, VideoSink {
@Nullable private Callback callback;
// Valid and immutable while the decoder is running.
@Nullable private MediaCodec codec = null;
@Nullable private MediaCodecWrapper codec = null;
HardwareVideoDecoder(
String codecName, VideoCodecType codecType, int colorFormat, EglBase.Context sharedContext) {
HardwareVideoDecoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
VideoCodecType codecType, int colorFormat, EglBase.Context sharedContext) {
if (!isSupportedColorFormat(colorFormat)) {
throw new IllegalArgumentException("Unsupported color format: " + colorFormat);
}
this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
this.codecName = codecName;
this.codecType = codecType;
this.colorFormat = colorFormat;
@ -143,7 +145,7 @@ class HardwareVideoDecoder implements VideoDecoder, VideoSink {
this.callback = callback;
if (sharedContext != null) {
surfaceTextureHelper = SurfaceTextureHelper.create("decoder-texture-thread", sharedContext);
surfaceTextureHelper = createSurfaceTextureHelper();
surface = new Surface(surfaceTextureHelper.getSurfaceTexture());
surfaceTextureHelper.startListening(this);
}
@ -170,7 +172,7 @@ class HardwareVideoDecoder implements VideoDecoder, VideoSink {
keyFrameRequired = true;
try {
codec = MediaCodec.createByCodecName(codecName);
codec = mediaCodecWrapperFactory.createByCodecName(codecName);
} catch (IOException | IllegalArgumentException e) {
Logging.e(TAG, "Cannot create media decoder " + codecName);
return VideoCodecStatus.FALLBACK_SOFTWARE;
@ -304,7 +306,7 @@ class HardwareVideoDecoder implements VideoDecoder, VideoSink {
Logging.d(TAG, "release");
VideoCodecStatus status = releaseInternal();
if (surface != null) {
surface.release();
releaseSurface();
surface = null;
surfaceTextureHelper.stopListening();
surfaceTextureHelper.dispose();
@ -368,7 +370,8 @@ class HardwareVideoDecoder implements VideoDecoder, VideoSink {
};
}
private void deliverDecodedFrame() {
// Visible for testing.
protected void deliverDecodedFrame() {
outputThreadChecker.checkIsOnValidThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
@ -527,16 +530,16 @@ class HardwareVideoDecoder implements VideoDecoder, VideoSink {
final int vPos = uPos + uvStride * sliceHeight / 2;
final int vEnd = vPos + uvStride * chromaHeight;
VideoFrame.I420Buffer frameBuffer = JavaI420Buffer.allocate(width, height);
VideoFrame.I420Buffer frameBuffer = allocateI420Buffer(width, height);
buffer.limit(yEnd);
buffer.position(yPos);
YuvHelper.copyPlane(
copyPlane(
buffer.slice(), stride, frameBuffer.getDataY(), frameBuffer.getStrideY(), width, height);
buffer.limit(uEnd);
buffer.position(uPos);
YuvHelper.copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(),
copyPlane(buffer.slice(), uvStride, frameBuffer.getDataU(), frameBuffer.getStrideU(),
chromaWidth, chromaHeight);
if (sliceHeight % 2 == 1) {
buffer.position(uPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
@ -548,7 +551,7 @@ class HardwareVideoDecoder implements VideoDecoder, VideoSink {
buffer.limit(vEnd);
buffer.position(vPos);
YuvHelper.copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(),
copyPlane(buffer.slice(), uvStride, frameBuffer.getDataV(), frameBuffer.getStrideV(),
chromaWidth, chromaHeight);
if (sliceHeight % 2 == 1) {
buffer.position(vPos + uvStride * (chromaHeight - 1)); // Seek to beginning of last full row.
@ -646,4 +649,26 @@ class HardwareVideoDecoder implements VideoDecoder, VideoSink {
}
return false;
}
// Visible for testing.
protected SurfaceTextureHelper createSurfaceTextureHelper() {
return SurfaceTextureHelper.create("decoder-texture-thread", sharedContext);
}
// Visible for testing.
// TODO(sakal): Remove once Robolectric commit fa991a0 has been rolled to WebRTC.
protected void releaseSurface() {
surface.release();
}
// Visible for testing.
protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) {
return JavaI420Buffer.allocate(width, height);
}
// Visible for testing.
protected void copyPlane(
ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
YuvHelper.copyPlane(src, srcStride, dst, dstStride, width, height);
}
}

View File

@ -53,6 +53,7 @@ class HardwareVideoEncoder implements VideoEncoder {
private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000;
// --- Initialized on construction.
private final MediaCodecWrapperFactory mediaCodecWrapperFactory;
private final String codecName;
private final VideoCodecType codecType;
private final Integer surfaceColorFormat;
@ -82,7 +83,7 @@ class HardwareVideoEncoder implements VideoEncoder {
private boolean automaticResizeOn;
// --- Valid and immutable while an encoding session is running.
@Nullable private MediaCodec codec;
@Nullable private MediaCodecWrapper codec;
// Thread that delivers encoded frames to the user callback.
@Nullable private Thread outputThread;
@ -128,10 +129,11 @@ class HardwareVideoEncoder implements VideoEncoder {
* desired bitrates
* @throws IllegalArgumentException if colorFormat is unsupported
*/
public HardwareVideoEncoder(String codecName, VideoCodecType codecType,
Integer surfaceColorFormat, Integer yuvColorFormat, Map<String, String> params,
int keyFrameIntervalSec, int forceKeyFrameIntervalMs, BitrateAdjuster bitrateAdjuster,
EglBase14.Context sharedContext) {
public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
VideoCodecType codecType, Integer surfaceColorFormat, Integer yuvColorFormat,
Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) {
this.mediaCodecWrapperFactory = mediaCodecWrapperFactory;
this.codecName = codecName;
this.codecType = codecType;
this.surfaceColorFormat = surfaceColorFormat;
@ -174,7 +176,7 @@ class HardwareVideoEncoder implements VideoEncoder {
lastKeyFrameNs = -1;
try {
codec = MediaCodec.createByCodecName(codecName);
codec = mediaCodecWrapperFactory.createByCodecName(codecName);
} catch (IOException | IllegalArgumentException e) {
Logging.e(TAG, "Cannot create media encoder " + codecName);
return VideoCodecStatus.FALLBACK_SOFTWARE;
@ -384,7 +386,7 @@ class HardwareVideoEncoder implements VideoEncoder {
Logging.e(TAG, "getInputBuffers failed", e);
return VideoCodecStatus.ERROR;
}
yuvFormat.fillBuffer(buffer, videoFrameBuffer);
fillInputBuffer(buffer, videoFrameBuffer);
try {
codec.queueInputBuffer(
@ -481,7 +483,8 @@ class HardwareVideoEncoder implements VideoEncoder {
};
}
private void deliverEncodedImage() {
// Visible for testing.
protected void deliverEncodedImage() {
outputThreadChecker.checkIsOnValidThread();
try {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
@ -576,6 +579,11 @@ class HardwareVideoEncoder implements VideoEncoder {
return sharedContext != null && surfaceColorFormat != null;
}
// Visible for testing.
protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer videoFrameBuffer) {
yuvFormat.fillBuffer(buffer, videoFrameBuffer);
}
/**
* Enumeration of supported YUV color formats used for MediaCodec's input.
*/

View File

@ -0,0 +1,53 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.media.MediaCodec;
import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Bundle;
import android.view.Surface;
import java.nio.ByteBuffer;
/**
* Subset of methods defined in {@link android.media.MediaCodec} needed by
* {@link HardwareVideoEncoder} and {@link HardwareVideoDecoder}. This interface
* exists to allow mocking and using a fake implementation in tests.
*/
interface MediaCodecWrapper {
void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags);
void start();
void flush();
void stop();
void release();
int dequeueInputBuffer(long timeoutUs);
void queueInputBuffer(int index, int offset, int size, long presentationTimeUs, int flags);
int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs);
void releaseOutputBuffer(int index, boolean render);
MediaFormat getOutputFormat();
ByteBuffer[] getInputBuffers();
ByteBuffer[] getOutputBuffers();
Surface createInputSurface();
void setParameters(Bundle params);
}

View File

@ -0,0 +1,22 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import java.io.IOException;
interface MediaCodecWrapperFactory {
/**
* Creates a new {@link MediaCodecWrapper} by codec name.
*
* <p>For additional information see {@link android.media.MediaCodec#createByCodecName}.
*/
MediaCodecWrapper createByCodecName(String name) throws IOException;
}

View File

@ -0,0 +1,113 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Bundle;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* Implementation of MediaCodecWrapperFactory that returns MediaCodecInterfaces wrapping
* {@link android.media.MediaCodec} objects.
*/
class MediaCodecWrapperFactoryImpl implements MediaCodecWrapperFactory {
private static class MediaCodecWrapperImpl implements MediaCodecWrapper {
private final MediaCodec mediaCodec;
public MediaCodecWrapperImpl(MediaCodec mediaCodec) {
this.mediaCodec = mediaCodec;
}
@Override
public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) {
mediaCodec.configure(format, surface, crypto, flags);
}
@Override
public void start() {
mediaCodec.start();
}
@Override
public void flush() {
mediaCodec.flush();
}
@Override
public void stop() {
mediaCodec.stop();
}
@Override
public void release() {
mediaCodec.release();
}
@Override
public int dequeueInputBuffer(long timeoutUs) {
return mediaCodec.dequeueInputBuffer(timeoutUs);
}
@Override
public void queueInputBuffer(
int index, int offset, int size, long presentationTimeUs, int flags) {
mediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags);
}
@Override
public int dequeueOutputBuffer(BufferInfo info, long timeoutUs) {
return mediaCodec.dequeueOutputBuffer(info, timeoutUs);
}
@Override
public void releaseOutputBuffer(int index, boolean render) {
mediaCodec.releaseOutputBuffer(index, render);
}
@Override
public MediaFormat getOutputFormat() {
return mediaCodec.getOutputFormat();
}
@Override
public ByteBuffer[] getInputBuffers() {
return mediaCodec.getInputBuffers();
}
@Override
public ByteBuffer[] getOutputBuffers() {
return mediaCodec.getOutputBuffers();
}
@Override
@TargetApi(18)
public Surface createInputSurface() {
return mediaCodec.createInputSurface();
}
@Override
@TargetApi(19)
public void setParameters(Bundle params) {
mediaCodec.setParameters(params);
}
}
@Override
public MediaCodecWrapper createByCodecName(String name) throws IOException {
return new MediaCodecWrapperImpl(MediaCodec.createByCodecName(name));
}
}

View File

@ -0,0 +1,64 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import java.nio.ByteBuffer;
import java.util.Random;
import org.webrtc.JavaI420Buffer;
import org.webrtc.VideoFrame;
/**
* Helper methods for {@link HardwareVideoEncoderTest} and {@link HardwareVideoDecoderTest}.
*/
class CodecTestHelper {
static void assertEqualContents(byte[] expected, ByteBuffer actual, int offset, int size) {
assertThat(size).isEqualTo(expected.length);
assertThat(actual.capacity()).isAtLeast(offset + size);
for (int i = 0; i < expected.length; i++) {
assertWithMessage("At index: " + i).that(actual.get(offset + i)).isEqualTo(expected[i]);
}
}
static byte[] generateRandomData(int length) {
Random random = new Random();
byte[] data = new byte[length];
random.nextBytes(data);
return data;
}
static VideoFrame.I420Buffer wrapI420(int width, int height, byte[] data) {
final int posY = 0;
final int posU = width * height;
final int posV = posU + width * height / 4;
final int endV = posV + width * height / 4;
ByteBuffer buffer = ByteBuffer.allocateDirect(data.length);
buffer.put(data);
buffer.limit(posU);
buffer.position(posY);
ByteBuffer dataY = buffer.slice();
buffer.limit(posV);
buffer.position(posU);
ByteBuffer dataU = buffer.slice();
buffer.limit(endV);
buffer.position(posV);
ByteBuffer dataV = buffer.slice();
return JavaI420Buffer.wrap(width, height, dataY, width, dataU, width / 2, dataV, width / 2,
/* releaseCallback= */ null);
}
}

View File

@ -0,0 +1,314 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.graphics.SurfaceTexture;
import android.media.MediaCodec;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCrypto;
import android.media.MediaFormat;
import android.os.Bundle;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Nullable;
/**
* Fake MediaCodec that implements the basic state machine.
*
* @note This class is only intended for single-threaded tests and is not thread-safe.
*/
public class FakeMediaCodecWrapper implements MediaCodecWrapper {
private static final int NUM_INPUT_BUFFERS = 10;
private static final int NUM_OUTPUT_BUFFERS = 10;
private static final int MAX_ENCODED_DATA_SIZE_BYTES = 1_000;
/**
* MediaCodec state as defined by:
* https://developer.android.com/reference/android/media/MediaCodec.html
*/
public enum State {
STOPPED_CONFIGURED(Primary.STOPPED),
STOPPED_UNINITIALIZED(Primary.STOPPED),
STOPPED_ERROR(Primary.STOPPED),
EXECUTING_FLUSHED(Primary.EXECUTING),
EXECUTING_RUNNING(Primary.EXECUTING),
EXECUTING_END_OF_STREAM(Primary.EXECUTING),
RELEASED(Primary.RELEASED);
public enum Primary { STOPPED, EXECUTING, RELEASED }
private final Primary primary;
State(Primary primary) {
this.primary = primary;
}
public Primary getPrimary() {
return primary;
}
}
/** Represents an output buffer that will be returned by dequeueOutputBuffer. */
public static class QueuedOutputBufferInfo {
private int index;
private int offset;
private int size;
private long presentationTimeUs;
private int flags;
private QueuedOutputBufferInfo(
int index, int offset, int size, long presentationTimeUs, int flags) {
this.index = index;
this.offset = offset;
this.size = size;
this.presentationTimeUs = presentationTimeUs;
this.flags = flags;
}
public static QueuedOutputBufferInfo create(
int index, int offset, int size, long presentationTimeUs, int flags) {
return new QueuedOutputBufferInfo(index, offset, size, presentationTimeUs, flags);
}
public int getIndex() {
return index;
}
public int getOffset() {
return offset;
}
public int getSize() {
return size;
}
public long getPresentationTimeUs() {
return presentationTimeUs;
}
public int getFlags() {
return flags;
}
}
private State state = State.STOPPED_UNINITIALIZED;
private @Nullable MediaFormat configuredFormat;
private int configuredFlags;
private final MediaFormat outputFormat;
private final ByteBuffer[] inputBuffers = new ByteBuffer[NUM_INPUT_BUFFERS];
private final ByteBuffer[] outputBuffers = new ByteBuffer[NUM_OUTPUT_BUFFERS];
private final boolean[] inputBufferReserved = new boolean[NUM_INPUT_BUFFERS];
private final boolean[] outputBufferReserved = new boolean[NUM_OUTPUT_BUFFERS];
private final List<QueuedOutputBufferInfo> queuedOutputBuffers = new ArrayList<>();
public FakeMediaCodecWrapper(MediaFormat outputFormat) {
this.outputFormat = outputFormat;
}
/** Returns the current simulated state of MediaCodec. */
public State getState() {
return state;
}
/** Gets the last configured media format passed to configure. */
public @Nullable MediaFormat getConfiguredFormat() {
return configuredFormat;
}
/** Returns the last flags passed to configure. */
public int getConfiguredFlags() {
return configuredFlags;
}
/**
* Adds a texture buffer that will be returned by dequeueOutputBuffer. Returns index of the
* buffer.
*/
public int addOutputTexture(long presentationTimestampUs, int flags) {
int index = getFreeOutputBuffer();
queuedOutputBuffers.add(QueuedOutputBufferInfo.create(
index, /* offset= */ 0, /* size= */ 0, presentationTimestampUs, flags));
return index;
}
/**
* Adds a byte buffer buffer that will be returned by dequeueOutputBuffer. Returns index of the
* buffer.
*/
public int addOutputData(byte[] data, long presentationTimestampUs, int flags) {
int index = getFreeOutputBuffer();
ByteBuffer outputBuffer = outputBuffers[index];
outputBuffer.clear();
outputBuffer.put(data);
outputBuffer.rewind();
queuedOutputBuffers.add(QueuedOutputBufferInfo.create(
index, /* offset= */ 0, data.length, presentationTimestampUs, flags));
return index;
}
/**
* Returns the first output buffer that is not reserved and reserves it. It will be stay reserved
* until released with releaseOutputBuffer.
*/
private int getFreeOutputBuffer() {
for (int i = 0; i < NUM_OUTPUT_BUFFERS; i++) {
if (!outputBufferReserved[i]) {
outputBufferReserved[i] = true;
return i;
}
}
throw new RuntimeException("All output buffers reserved!");
}
@Override
public void configure(MediaFormat format, Surface surface, MediaCrypto crypto, int flags) {
if (state != State.STOPPED_UNINITIALIZED) {
throw new IllegalStateException("Expected state STOPPED_UNINITIALIZED but was " + state);
}
state = State.STOPPED_CONFIGURED;
configuredFormat = format;
configuredFlags = flags;
final int width = configuredFormat.getInteger(MediaFormat.KEY_WIDTH);
final int height = configuredFormat.getInteger(MediaFormat.KEY_HEIGHT);
final int yuvSize = width * height * 3 / 2;
final int inputBufferSize;
final int outputBufferSize;
if ((flags & MediaCodec.CONFIGURE_FLAG_ENCODE) != 0) {
final int colorFormat = configuredFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
inputBufferSize = colorFormat == CodecCapabilities.COLOR_FormatSurface ? 0 : yuvSize;
outputBufferSize = MAX_ENCODED_DATA_SIZE_BYTES;
} else {
inputBufferSize = MAX_ENCODED_DATA_SIZE_BYTES;
outputBufferSize = surface != null ? 0 : yuvSize;
}
for (int i = 0; i < inputBuffers.length; i++) {
inputBuffers[i] = ByteBuffer.allocateDirect(inputBufferSize);
}
for (int i = 0; i < outputBuffers.length; i++) {
outputBuffers[i] = ByteBuffer.allocateDirect(outputBufferSize);
}
}
@Override
public void start() {
if (state != State.STOPPED_CONFIGURED) {
throw new IllegalStateException("Expected state STOPPED_CONFIGURED but was " + state);
}
state = State.EXECUTING_RUNNING;
}
@Override
public void flush() {
if (state.getPrimary() != State.Primary.EXECUTING) {
throw new IllegalStateException("Expected state EXECUTING but was " + state);
}
state = State.EXECUTING_FLUSHED;
}
@Override
public void stop() {
if (state.getPrimary() != State.Primary.EXECUTING) {
throw new IllegalStateException("Expected state EXECUTING but was " + state);
}
state = State.STOPPED_UNINITIALIZED;
}
@Override
public void release() {
state = State.RELEASED;
}
@Override
public int dequeueInputBuffer(long timeoutUs) {
if (state != State.EXECUTING_FLUSHED && state != State.EXECUTING_RUNNING) {
throw new IllegalStateException(
"Expected state EXECUTING_FLUSHED or EXECUTING_RUNNING but was " + state);
}
state = State.EXECUTING_RUNNING;
for (int i = 0; i < NUM_INPUT_BUFFERS; i++) {
if (!inputBufferReserved[i]) {
inputBufferReserved[i] = true;
return i;
}
}
return MediaCodec.INFO_TRY_AGAIN_LATER;
}
@Override
public void queueInputBuffer(
int index, int offset, int size, long presentationTimeUs, int flags) {
if (state.getPrimary() != State.Primary.EXECUTING) {
throw new IllegalStateException("Expected state EXECUTING but was " + state);
}
if (flags != 0) {
throw new UnsupportedOperationException(
"Flags are not implemented in FakeMediaCodecWrapper.");
}
}
@Override
public int dequeueOutputBuffer(MediaCodec.BufferInfo info, long timeoutUs) {
if (state.getPrimary() != State.Primary.EXECUTING) {
throw new IllegalStateException("Expected state EXECUTING but was " + state);
}
if (queuedOutputBuffers.isEmpty()) {
return MediaCodec.INFO_TRY_AGAIN_LATER;
}
QueuedOutputBufferInfo outputBufferInfo = queuedOutputBuffers.remove(/* index= */ 0);
info.set(outputBufferInfo.getOffset(), outputBufferInfo.getSize(),
outputBufferInfo.getPresentationTimeUs(), outputBufferInfo.getFlags());
return outputBufferInfo.getIndex();
}
@Override
public void releaseOutputBuffer(int index, boolean render) {
if (state.getPrimary() != State.Primary.EXECUTING) {
throw new IllegalStateException("Expected state EXECUTING but was " + state);
}
if (!outputBufferReserved[index]) {
throw new RuntimeException("Released output buffer was not in use.");
}
outputBufferReserved[index] = false;
}
@Override
public ByteBuffer[] getInputBuffers() {
return inputBuffers;
}
@Override
public ByteBuffer[] getOutputBuffers() {
return outputBuffers;
}
@Override
public MediaFormat getOutputFormat() {
return outputFormat;
}
@Override
public Surface createInputSurface() {
return new Surface(new SurfaceTexture(/* texName= */ 0));
}
@Override
public void setParameters(Bundle params) {}
}

View File

@ -0,0 +1,423 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.robolectric.Shadows.shadowOf;
import android.graphics.Matrix;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaFormat;
import android.os.Handler;
import java.nio.ByteBuffer;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import org.chromium.testing.local.LocalRobolectricTestRunner;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.annotation.Config;
import org.robolectric.shadows.ShadowSystemClock;
import org.webrtc.EglBase;
import org.webrtc.EncodedImage;
import org.webrtc.EncodedImage.FrameType;
import org.webrtc.FakeMediaCodecWrapper.State;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.TextureBufferImpl;
import org.webrtc.VideoCodecStatus;
import org.webrtc.VideoDecoder;
import org.webrtc.VideoDecoder.DecodeInfo;
import org.webrtc.VideoFrame;
import org.webrtc.VideoFrame.I420Buffer;
import org.webrtc.VideoFrame.TextureBuffer.Type;
import org.webrtc.VideoSink;
import org.webrtc.YuvConverter;
@RunWith(LocalRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class HardwareVideoDecoderTest {
private static final VideoDecoder.Settings TEST_DECODER_SETTINGS =
new VideoDecoder.Settings(/* numberOfCores= */ 1, /* width= */ 640, /* height= */ 480);
private static final int COLOR_FORMAT = CodecCapabilities.COLOR_FormatYUV420Planar;
private static final long POLL_DELAY_MS = 10;
private static final long DELIVER_DECODED_IMAGE_DELAY_MS = 10;
private static final byte[] ENCODED_TEST_DATA = new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
private class TestDecoder extends HardwareVideoDecoder {
private final Object deliverDecodedFrameLock = new Object();
private boolean deliverDecodedFrameDone = true;
public TestDecoder(MediaCodecWrapperFactory mediaCodecFactory, String codecName,
VideoCodecType codecType, int colorFormat, EglBase.Context sharedContext) {
super(mediaCodecFactory, codecName, codecType, colorFormat, sharedContext);
}
public void waitDeliverDecodedFrame() throws InterruptedException {
synchronized (deliverDecodedFrameLock) {
deliverDecodedFrameDone = false;
deliverDecodedFrameLock.notifyAll();
while (!deliverDecodedFrameDone) {
deliverDecodedFrameLock.wait();
}
}
}
@SuppressWarnings("WaitNotInLoop") // This method is called inside a loop.
@Override
protected void deliverDecodedFrame() {
synchronized (deliverDecodedFrameLock) {
if (deliverDecodedFrameDone) {
try {
deliverDecodedFrameLock.wait(DELIVER_DECODED_IMAGE_DELAY_MS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
if (deliverDecodedFrameDone) {
return;
}
super.deliverDecodedFrame();
deliverDecodedFrameDone = true;
deliverDecodedFrameLock.notifyAll();
}
}
@Override
protected SurfaceTextureHelper createSurfaceTextureHelper() {
return mockSurfaceTextureHelper;
}
@Override
protected void releaseSurface() {}
@Override
protected VideoFrame.I420Buffer allocateI420Buffer(int width, int height) {
int chromaHeight = (height + 1) / 2;
int strideUV = (width + 1) / 2;
int yPos = 0;
int uPos = yPos + width * height;
int vPos = uPos + strideUV * chromaHeight;
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height + 2 * strideUV * chromaHeight);
buffer.position(yPos);
buffer.limit(uPos);
ByteBuffer dataY = buffer.slice();
buffer.position(uPos);
buffer.limit(vPos);
ByteBuffer dataU = buffer.slice();
buffer.position(vPos);
buffer.limit(vPos + strideUV * chromaHeight);
ByteBuffer dataV = buffer.slice();
return JavaI420Buffer.wrap(width, height, dataY, width, dataU, strideUV, dataV, strideUV,
/* releaseCallback= */ null);
}
@Override
protected void copyPlane(
ByteBuffer src, int srcStride, ByteBuffer dst, int dstStride, int width, int height) {
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
dst.put(y * dstStride + x, src.get(y * srcStride + x));
}
}
}
}
private class TestDecoderBuilder {
private VideoCodecType codecType = VideoCodecType.VP8;
private boolean useSurface = true;
public TestDecoderBuilder setCodecType(VideoCodecType codecType) {
this.codecType = codecType;
return this;
}
public TestDecoderBuilder setUseSurface(boolean useSurface) {
this.useSurface = useSurface;
return this;
}
public TestDecoder build() {
return new TestDecoder((String name)
-> fakeMediaCodecWrapper,
/* codecName= */ "org.webrtc.testdecoder", codecType, COLOR_FORMAT,
useSurface ? mockEglBaseContext : null);
}
}
private EncodedImage createTestEncodedImage() {
return EncodedImage.builder()
.setBuffer(ByteBuffer.wrap(ENCODED_TEST_DATA))
.setFrameType(FrameType.VideoFrameKey)
.setCompleteFrame(true)
.createEncodedImage();
}
@Mock private EglBase.Context mockEglBaseContext;
@Mock private SurfaceTextureHelper mockSurfaceTextureHelper;
@Mock private VideoDecoder.Callback mockDecoderCallback;
private FakeMediaCodecWrapper fakeMediaCodecWrapper;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
MediaFormat outputFormat = new MediaFormat();
// TODO(sakal): Add more details to output format as needed.
fakeMediaCodecWrapper = spy(new FakeMediaCodecWrapper(outputFormat));
}
@Test
public void testInit() {
// Set-up.
HardwareVideoDecoder decoder =
new TestDecoderBuilder().setCodecType(VideoCodecType.VP8).build();
// Test.
assertThat(decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback))
.isEqualTo(VideoCodecStatus.OK);
// Verify.
assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.EXECUTING_RUNNING);
MediaFormat mediaFormat = fakeMediaCodecWrapper.getConfiguredFormat();
assertThat(mediaFormat).isNotNull();
assertThat(mediaFormat.getInteger(MediaFormat.KEY_WIDTH))
.isEqualTo(TEST_DECODER_SETTINGS.width);
assertThat(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
.isEqualTo(TEST_DECODER_SETTINGS.height);
assertThat(mediaFormat.getString(MediaFormat.KEY_MIME))
.isEqualTo(VideoCodecType.VP8.mimeType());
}
@Test
public void testRelease() {
// Set-up.
HardwareVideoDecoder decoder = new TestDecoderBuilder().build();
decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
// Test.
assertThat(decoder.release()).isEqualTo(VideoCodecStatus.OK);
// Verify.
assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.RELEASED);
}
@Test
public void testReleaseMultipleTimes() {
// Set-up.
HardwareVideoDecoder decoder = new TestDecoderBuilder().build();
decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
// Test.
assertThat(decoder.release()).isEqualTo(VideoCodecStatus.OK);
assertThat(decoder.release()).isEqualTo(VideoCodecStatus.OK);
// Verify.
assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.RELEASED);
}
@Test
public void testDecodeQueuesData() {
// Set-up.
HardwareVideoDecoder decoder = new TestDecoderBuilder().build();
decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
// Test.
assertThat(decoder.decode(createTestEncodedImage(),
new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0)))
.isEqualTo(VideoCodecStatus.OK);
// Verify.
ArgumentCaptor<Integer> indexCaptor = ArgumentCaptor.forClass(Integer.class);
ArgumentCaptor<Integer> offsetCaptor = ArgumentCaptor.forClass(Integer.class);
ArgumentCaptor<Integer> sizeCaptor = ArgumentCaptor.forClass(Integer.class);
verify(fakeMediaCodecWrapper)
.queueInputBuffer(indexCaptor.capture(), offsetCaptor.capture(), sizeCaptor.capture(),
/* presentationTimeUs= */ anyLong(),
/* flags= */ eq(0));
ByteBuffer inputBuffer = fakeMediaCodecWrapper.getInputBuffers()[indexCaptor.getValue()];
CodecTestHelper.assertEqualContents(
ENCODED_TEST_DATA, inputBuffer, offsetCaptor.getValue(), sizeCaptor.getValue());
}
@Test
public void testDeliversOutputByteBuffers() throws InterruptedException {
final byte[] testOutputData = CodecTestHelper.generateRandomData(
TEST_DECODER_SETTINGS.width * TEST_DECODER_SETTINGS.height * 3 / 2);
final I420Buffer expectedDeliveredBuffer = CodecTestHelper.wrapI420(
TEST_DECODER_SETTINGS.width, TEST_DECODER_SETTINGS.height, testOutputData);
// Set-up.
TestDecoder decoder = new TestDecoderBuilder().setUseSurface(/* useSurface = */ false).build();
decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
decoder.decode(createTestEncodedImage(),
new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0));
fakeMediaCodecWrapper.addOutputData(
testOutputData, /* presentationTimestampUs= */ 0, /* flags= */ 0);
// Test.
decoder.waitDeliverDecodedFrame();
// Verify.
ArgumentCaptor<VideoFrame> videoFrameCaptor = ArgumentCaptor.forClass(VideoFrame.class);
verify(mockDecoderCallback)
.onDecodedFrame(videoFrameCaptor.capture(),
/* decodeTimeMs= */ any(Integer.class),
/* qp= */ any());
VideoFrame videoFrame = videoFrameCaptor.getValue();
assertThat(videoFrame).isNotNull();
assertThat(videoFrame.getRotatedWidth()).isEqualTo(TEST_DECODER_SETTINGS.width);
assertThat(videoFrame.getRotatedHeight()).isEqualTo(TEST_DECODER_SETTINGS.height);
assertThat(videoFrame.getRotation()).isEqualTo(0);
I420Buffer deliveredBuffer = videoFrame.getBuffer().toI420();
assertThat(deliveredBuffer.getDataY()).isEqualTo(expectedDeliveredBuffer.getDataY());
assertThat(deliveredBuffer.getDataU()).isEqualTo(expectedDeliveredBuffer.getDataU());
assertThat(deliveredBuffer.getDataV()).isEqualTo(expectedDeliveredBuffer.getDataV());
}
@Test
public void testRendersOutputTexture() throws InterruptedException {
// Set-up.
TestDecoder decoder = new TestDecoderBuilder().build();
decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
decoder.decode(createTestEncodedImage(),
new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0));
int bufferIndex =
fakeMediaCodecWrapper.addOutputTexture(/* presentationTimestampUs= */ 0, /* flags= */ 0);
// Test.
decoder.waitDeliverDecodedFrame();
// Verify.
verify(fakeMediaCodecWrapper).releaseOutputBuffer(bufferIndex, /* render= */ true);
}
@Test
@Ignore("TODO(webrtc:9128): Fix")
public void testSurfaceTextureStall_FramesDropped() throws InterruptedException {
final int numFrames = 10;
// Maximum number of frame the decoder can keep queued on the output side.
final int maxQueuedBuffers = 3;
// Set-up.
TestDecoder decoder = new TestDecoderBuilder().build();
decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
// Test.
int[] bufferIndices = new int[numFrames];
for (int i = 0; i < 10; i++) {
decoder.decode(createTestEncodedImage(),
new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0));
bufferIndices[i] =
fakeMediaCodecWrapper.addOutputTexture(/* presentationTimestampUs= */ 0, /* flags= */ 0);
decoder.waitDeliverDecodedFrame();
}
// Verify.
InOrder releaseOrder = inOrder(fakeMediaCodecWrapper);
releaseOrder.verify(fakeMediaCodecWrapper)
.releaseOutputBuffer(bufferIndices[0], /* render= */ true);
for (int i = 1; i < numFrames - maxQueuedBuffers; i++) {
releaseOrder.verify(fakeMediaCodecWrapper)
.releaseOutputBuffer(bufferIndices[i], /* render= */ false);
}
}
@Test
public void testDeliversRenderedBuffers() throws InterruptedException {
// Set-up.
TestDecoder decoder = new TestDecoderBuilder().build();
decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback);
decoder.decode(createTestEncodedImage(),
new DecodeInfo(/* isMissingFrames= */ false, /* renderTimeMs= */ 0));
fakeMediaCodecWrapper.addOutputTexture(/* presentationTimestampUs= */ 0, /* flags= */ 0);
// Render the output buffer.
decoder.waitDeliverDecodedFrame();
ArgumentCaptor<VideoSink> videoSinkCaptor = ArgumentCaptor.forClass(VideoSink.class);
verify(mockSurfaceTextureHelper).startListening(videoSinkCaptor.capture());
// Test.
Runnable releaseCallback = mock(Runnable.class);
VideoFrame.TextureBuffer outputTextureBuffer =
new TextureBufferImpl(TEST_DECODER_SETTINGS.width, TEST_DECODER_SETTINGS.height, Type.OES,
/* id= */ 0,
/* transformMatrix= */ new Matrix(),
/* toI420Handler= */ new Handler(), new YuvConverter(), releaseCallback);
VideoFrame outputVideoFrame =
new VideoFrame(outputTextureBuffer, /* rotation= */ 0, /* timestampNs= */ 0);
videoSinkCaptor.getValue().onFrame(outputVideoFrame);
outputVideoFrame.release();
// Verify.
ArgumentCaptor<VideoFrame> videoFrameCaptor = ArgumentCaptor.forClass(VideoFrame.class);
verify(mockDecoderCallback)
.onDecodedFrame(videoFrameCaptor.capture(),
/* decodeTimeMs= */ any(Integer.class),
/* qp= */ any());
VideoFrame videoFrame = videoFrameCaptor.getValue();
assertThat(videoFrame).isNotNull();
assertThat(videoFrame.getBuffer()).isEqualTo(outputTextureBuffer);
verify(releaseCallback).run();
}
@Test
public void testConfigureExceptionTriggerSWFallback() {
// Set-up.
doThrow(new IllegalStateException("Fake error"))
.when(fakeMediaCodecWrapper)
.configure(any(), any(), any(), anyInt());
HardwareVideoDecoder decoder = new TestDecoderBuilder().build();
// Test.
assertThat(decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback))
.isEqualTo(VideoCodecStatus.FALLBACK_SOFTWARE);
}
@Test
public void testStartExceptionTriggerSWFallback() {
// Set-up.
doThrow(new IllegalStateException("Fake error")).when(fakeMediaCodecWrapper).start();
HardwareVideoDecoder decoder = new TestDecoderBuilder().build();
// Test.
assertThat(decoder.initDecode(TEST_DECODER_SETTINGS, mockDecoderCallback))
.isEqualTo(VideoCodecStatus.FALLBACK_SOFTWARE);
}
}

View File

@ -0,0 +1,271 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.robolectric.Shadows.shadowOf;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import org.chromium.testing.local.LocalRobolectricTestRunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.annotation.Config;
import org.robolectric.shadows.ShadowSystemClock;
import org.webrtc.EglBase.Context;
import org.webrtc.EncodedImage;
import org.webrtc.EncodedImage.FrameType;
import org.webrtc.FakeMediaCodecWrapper.State;
import org.webrtc.VideoCodecStatus;
import org.webrtc.VideoEncoder;
import org.webrtc.VideoEncoder.CodecSpecificInfo;
import org.webrtc.VideoEncoder.EncodeInfo;
import org.webrtc.VideoEncoder.Settings;
import org.webrtc.VideoFrame;
import org.webrtc.VideoFrame.Buffer;
import org.webrtc.VideoFrame.I420Buffer;
@RunWith(LocalRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class HardwareVideoEncoderTest {
private static final VideoEncoder.Settings TEST_ENCODER_SETTINGS = new Settings(
/* numberOfCores= */ 1,
/* width= */ 640,
/* height= */ 480,
/* startBitrate= */ 10000,
/* maxFramerate= */ 30,
/* automaticResizeOn= */ true);
private static final long POLL_DELAY_MS = 10;
private static final long DELIVER_ENCODED_IMAGE_DELAY_MS = 10;
private static class TestEncoder extends HardwareVideoEncoder {
private final Object deliverEncodedImageLock = new Object();
private boolean deliverEncodedImageDone = true;
TestEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName,
VideoCodecType codecType, Integer surfaceColorFormat, Integer yuvColorFormat,
Map<String, String> params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs,
BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) {
super(mediaCodecWrapperFactory, codecName, codecType, surfaceColorFormat, yuvColorFormat,
params, keyFrameIntervalSec, forceKeyFrameIntervalMs, bitrateAdjuster, sharedContext);
}
public void waitDeliverEncodedImage() throws InterruptedException {
synchronized (deliverEncodedImageLock) {
deliverEncodedImageDone = false;
deliverEncodedImageLock.notifyAll();
while (!deliverEncodedImageDone) {
deliverEncodedImageLock.wait();
}
}
}
@SuppressWarnings("WaitNotInLoop") // This method is called inside a loop.
@Override
protected void deliverEncodedImage() {
synchronized (deliverEncodedImageLock) {
if (deliverEncodedImageDone) {
try {
deliverEncodedImageLock.wait(DELIVER_ENCODED_IMAGE_DELAY_MS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
if (deliverEncodedImageDone) {
return;
}
super.deliverEncodedImage();
deliverEncodedImageDone = true;
deliverEncodedImageLock.notifyAll();
}
}
@Override
protected void fillInputBuffer(ByteBuffer buffer, Buffer videoFrameBuffer) {
I420Buffer i420Buffer = videoFrameBuffer.toI420();
buffer.put(i420Buffer.getDataY());
buffer.put(i420Buffer.getDataU());
buffer.put(i420Buffer.getDataV());
buffer.flip();
i420Buffer.release();
}
}
private class TestEncoderBuilder {
private VideoCodecType codecType = VideoCodecType.VP8;
public TestEncoderBuilder setCodecType(VideoCodecType codecType) {
this.codecType = codecType;
return this;
}
public TestEncoder build() {
return new TestEncoder((String name)
-> fakeMediaCodecWrapper,
"org.webrtc.testencoder", codecType,
/* surfaceColorFormat= */ null,
/* yuvColorFormat= */ MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
/* params= */ new HashMap<>(),
/* keyFrameIntervalSec= */ 0,
/* forceKeyFrameIntervalMs= */ 0,
/* bitrateAdjuster= */ new BaseBitrateAdjuster(),
/* sharedContext= */ null);
}
}
@Mock VideoEncoder.Callback mockEncoderCallback;
private FakeMediaCodecWrapper fakeMediaCodecWrapper;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
MediaFormat outputFormat = new MediaFormat();
// TODO(sakal): Add more details to output format as needed.
fakeMediaCodecWrapper = spy(new FakeMediaCodecWrapper(outputFormat));
}
@Test
public void testInit() {
// Set-up.
HardwareVideoEncoder encoder =
new TestEncoderBuilder().setCodecType(VideoCodecType.VP8).build();
// Test.
assertThat(encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback))
.isEqualTo(VideoCodecStatus.OK);
// Verify.
assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.EXECUTING_RUNNING);
MediaFormat mediaFormat = fakeMediaCodecWrapper.getConfiguredFormat();
assertThat(mediaFormat).isNotNull();
assertThat(mediaFormat.getInteger(MediaFormat.KEY_WIDTH))
.isEqualTo(TEST_ENCODER_SETTINGS.width);
assertThat(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT))
.isEqualTo(TEST_ENCODER_SETTINGS.height);
assertThat(mediaFormat.getString(MediaFormat.KEY_MIME))
.isEqualTo(VideoCodecType.VP8.mimeType());
assertThat(fakeMediaCodecWrapper.getConfiguredFlags())
.isEqualTo(MediaCodec.CONFIGURE_FLAG_ENCODE);
}
@Test
public void testEncodeByteBuffer() {
// Set-up.
HardwareVideoEncoder encoder = new TestEncoderBuilder().build();
encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
// Test.
byte[] i420 = CodecTestHelper.generateRandomData(
TEST_ENCODER_SETTINGS.width * TEST_ENCODER_SETTINGS.height * 3 / 2);
final VideoFrame.I420Buffer testBuffer =
CodecTestHelper.wrapI420(TEST_ENCODER_SETTINGS.width, TEST_ENCODER_SETTINGS.height, i420);
final VideoFrame testFrame =
new VideoFrame(testBuffer, /* rotation= */ 0, /* timestampNs= */ 0);
assertThat(encoder.encode(testFrame, new EncodeInfo(new FrameType[] {FrameType.VideoFrameKey})))
.isEqualTo(VideoCodecStatus.OK);
// Verify.
ArgumentCaptor<Integer> indexCaptor = ArgumentCaptor.forClass(Integer.class);
ArgumentCaptor<Integer> offsetCaptor = ArgumentCaptor.forClass(Integer.class);
ArgumentCaptor<Integer> sizeCaptor = ArgumentCaptor.forClass(Integer.class);
verify(fakeMediaCodecWrapper)
.queueInputBuffer(indexCaptor.capture(), offsetCaptor.capture(), sizeCaptor.capture(),
anyLong(), anyInt());
ByteBuffer buffer = fakeMediaCodecWrapper.getInputBuffers()[indexCaptor.getValue()];
CodecTestHelper.assertEqualContents(
i420, buffer, offsetCaptor.getValue(), sizeCaptor.getValue());
}
@Test
public void testDeliversOutputData() throws InterruptedException {
final int outputDataLength = 100;
// Set-up.
TestEncoder encoder = new TestEncoderBuilder().build();
encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
byte[] i420 = CodecTestHelper.generateRandomData(
TEST_ENCODER_SETTINGS.width * TEST_ENCODER_SETTINGS.height * 3 / 2);
final VideoFrame.I420Buffer testBuffer =
CodecTestHelper.wrapI420(TEST_ENCODER_SETTINGS.width, TEST_ENCODER_SETTINGS.height, i420);
final VideoFrame testFrame =
new VideoFrame(testBuffer, /* rotation= */ 0, /* timestampNs= */ 42);
encoder.encode(testFrame, new EncodeInfo(new FrameType[] {FrameType.VideoFrameKey}));
// Test.
byte[] outputData = CodecTestHelper.generateRandomData(outputDataLength);
fakeMediaCodecWrapper.addOutputData(outputData,
/* presentationTimestampUs= */ 0,
/* flags= */ MediaCodec.BUFFER_FLAG_SYNC_FRAME);
encoder.waitDeliverEncodedImage();
// Verify.
ArgumentCaptor<EncodedImage> videoFrameCaptor = ArgumentCaptor.forClass(EncodedImage.class);
verify(mockEncoderCallback)
.onEncodedFrame(videoFrameCaptor.capture(), any(CodecSpecificInfo.class));
EncodedImage videoFrame = videoFrameCaptor.getValue();
assertThat(videoFrame).isNotNull();
assertThat(videoFrame.encodedWidth).isEqualTo(TEST_ENCODER_SETTINGS.width);
assertThat(videoFrame.encodedHeight).isEqualTo(TEST_ENCODER_SETTINGS.height);
assertThat(videoFrame.rotation).isEqualTo(0);
assertThat(videoFrame.captureTimeNs).isEqualTo(42);
assertThat(videoFrame.completeFrame).isTrue();
assertThat(videoFrame.frameType).isEqualTo(FrameType.VideoFrameKey);
CodecTestHelper.assertEqualContents(
outputData, videoFrame.buffer, /* offset= */ 0, videoFrame.buffer.capacity());
}
@Test
public void testRelease() {
// Set-up.
HardwareVideoEncoder encoder = new TestEncoderBuilder().build();
encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
// Test.
assertThat(encoder.release()).isEqualTo(VideoCodecStatus.OK);
// Verify.
assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.RELEASED);
}
@Test
public void testReleaseMultipleTimes() {
// Set-up.
HardwareVideoEncoder encoder = new TestEncoderBuilder().build();
encoder.initEncode(TEST_ENCODER_SETTINGS, mockEncoderCallback);
// Test.
assertThat(encoder.release()).isEqualTo(VideoCodecStatus.OK);
assertThat(encoder.release()).isEqualTo(VideoCodecStatus.OK);
// Verify.
assertThat(fakeMediaCodecWrapper.getState()).isEqualTo(State.RELEASED);
}
}