Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1,41 @@
<!--
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
-->
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="org.webrtc">
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-feature android:glEsVersion="0x00020000" android:required="true" />
<uses-sdk android:minSdkVersion="16" android:targetSdkVersion="21" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CHANGE_NETWORK_STATE" />
<uses-permission android:name="android.permission.RUN_INSTRUMENTATION" />
<application>
<uses-library android:name="android.test.runner" />
</application>
<!-- tools:ignore needed for chromium-junit4 tag. crbug.com/640116
TODO(sakal): Remove once the tag is no longer needed. -->
<instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
tools:ignore="MissingPrefix"
android:targetPackage="org.webrtc"
android:label="Tests for WebRTC Android SDK"
chromium-junit4="true"/>
</manifest>

View File

@ -0,0 +1,18 @@
# This file is used to override default values used by the Ant build system.
#
# This file must be checked into Version Control Systems, as it is
# integral to the build system of your project.
# This file is only used by the Ant script.
# You can use this to override default values such as
# 'source.dir' for the location of your java source folder and
# 'out.dir' for the location of your output folder.
# You can also use it define how the release builds are signed by declaring
# the following properties:
# 'key.store' for the location of your keystore and
# 'key.alias' for the name of the key to use.
# The password will be asked during the build when you use the 'release' target.
source.dir=../java/testcommon/src;src

View File

@ -0,0 +1,92 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="libjingle_peerconnection_android_unittest" default="help">
<!-- The local.properties file is created and updated by the 'android' tool.
It contains the path to the SDK. It should *NOT* be checked into
Version Control Systems. -->
<property file="local.properties" />
<!-- The ant.properties file can be created by you. It is only edited by the
'android' tool to add properties to it.
This is the place to change some Ant specific build properties.
Here are some properties you may want to change/update:
source.dir
The name of the source directory. Default is 'src'.
out.dir
The name of the output directory. Default is 'bin'.
For other overridable properties, look at the beginning of the rules
files in the SDK, at tools/ant/build.xml
Properties related to the SDK location or the project target should
be updated using the 'android' tool with the 'update' action.
This file is an integral part of the build system for your
application and should be checked into Version Control Systems.
-->
<property file="ant.properties" />
<!-- if sdk.dir was not set from one of the property file, then
get it from the ANDROID_HOME env var.
This must be done before we load project.properties since
the proguard config can use sdk.dir -->
<property environment="env" />
<condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
<isset property="env.ANDROID_SDK_ROOT" />
</condition>
<!-- The project.properties file is created and updated by the 'android'
tool, as well as ADT.
This contains project specific properties such as project target, and library
dependencies. Lower level build properties are stored in ant.properties
(or in .classpath for Eclipse projects).
This file is an integral part of the build system for your
application and should be checked into Version Control Systems. -->
<loadproperties srcFile="project.properties" />
<!-- quick check on sdk.dir -->
<fail
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
unless="sdk.dir"
/>
<!--
Import per project custom build rules if present at the root of the project.
This is the place to put custom intermediary targets such as:
-pre-build
-pre-compile
-post-compile (This is typically used for code obfuscation.
Compiled code location: ${out.classes.absolute.dir}
If this is not done in place, override ${out.dex.input.absolute.dir})
-post-package
-post-build
-pre-clean
-->
<import file="custom_rules.xml" optional="true" />
<!-- Import the actual build file.
To customize existing targets, there are two options:
- Customize only one target:
- copy/paste the target into this file, *before* the
<import> task.
- customize it to your needs.
- Customize the whole content of build.xml
- copy/paste the content of the rules files (minus the top node)
into this file, replacing the <import> task.
- customize to your needs.
***********************
****** IMPORTANT ******
***********************
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
in order to avoid having your file be overridden by tools such as "android update project"
-->
<!-- version-tag: 1 -->
<import file="${sdk.dir}/tools/ant/build.xml" />
</project>

View File

@ -0,0 +1,16 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system edit
# "ant.properties", and override values to adapt the script to your
# project structure.
#
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
# Project target.
target=android-22
java.compilerargs=-Xlint:all -Werror

View File

@ -0,0 +1,204 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.LargeTest;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import java.io.IOException;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(BaseJUnit4ClassRunner.class)
public class Camera1CapturerUsingByteBufferTest {
static final String TAG = "Camera1CapturerUsingByteBufferTest";
private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
@Override
public boolean isCapturingToTexture() {
return false;
}
@Override
public CameraEnumerator getCameraEnumerator() {
return new Camera1Enumerator(false);
}
@Override
public Context getAppContext() {
return InstrumentationRegistry.getTargetContext();
}
@SuppressWarnings("deprecation")
@Override
public Object rawOpenCamera(String cameraName) {
return android.hardware.Camera.open(Camera1Enumerator.getCameraIndex(cameraName));
}
@SuppressWarnings("deprecation")
@Override
public void rawCloseCamera(Object camera) {
((android.hardware.Camera) camera).release();
}
}
private CameraVideoCapturerTestFixtures fixtures;
@Before
public void setUp() {
// Enable VideoFrame capture.
PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
+ PeerConnectionFactory.TRIAL_ENABLED + "/");
fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
}
@After
public void tearDown() {
fixtures.dispose();
}
@Test
@SmallTest
public void testCreateAndDispose() throws InterruptedException {
fixtures.createCapturerAndDispose();
}
@Test
@SmallTest
public void testCreateNonExistingCamera() throws InterruptedException {
fixtures.createNonExistingCamera();
}
// This test that the camera can be started and that the frames are forwarded
// to a Java video renderer using a "default" capturer.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testCreateCapturerAndRender() throws InterruptedException {
fixtures.createCapturerAndRender();
}
// This test that the camera can be started and that the frames are forwarded
// to a Java video renderer using the front facing video capturer.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testStartFrontFacingVideoCapturer() throws InterruptedException {
fixtures.createFrontFacingCapturerAndRender();
}
// This test that the camera can be started and that the frames are forwarded
// to a Java video renderer using the back facing video capturer.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testStartBackFacingVideoCapturer() throws InterruptedException {
fixtures.createBackFacingCapturerAndRender();
}
// This test that the default camera can be started and that the camera can
// later be switched to another camera.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testSwitchVideoCapturer() throws InterruptedException {
fixtures.switchCamera();
}
@Test
@MediumTest
public void testCameraEvents() throws InterruptedException {
fixtures.cameraEventsInvoked();
}
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(false /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test
@MediumTest
public void testCameraCallsAfterStop() throws InterruptedException {
fixtures.cameraCallsAfterStop();
}
// This test that the VideoSource that the CameraVideoCapturer is connected to can
// be stopped and restarted. It tests both the Java and the C++ layer.
@Test
@LargeTest
public void testStopRestartVideoSource() throws InterruptedException {
fixtures.stopRestartVideoSource();
}
// This test that the camera can be started at different resolutions.
// It does not test or use the C++ layer.
@Test
@LargeTest
public void testStartStopWithDifferentResolutions() throws InterruptedException {
fixtures.startStopWithDifferentResolutions();
}
// This test what happens if buffers are returned after the capturer have
// been stopped and restarted. It does not test or use the C++ layer.
@Test
@LargeTest
public void testReturnBufferLate() throws InterruptedException {
fixtures.returnBufferLate();
}
// This test that we can capture frames, keep the frames in a local renderer, stop capturing,
// and then return the frames. The difference between the test testReturnBufferLate() is that we
// also test the JNI and C++ AndroidVideoCapturer parts.
@Test
@MediumTest
public void testReturnBufferLateEndToEnd() throws InterruptedException {
fixtures.returnBufferLateEndToEnd();
}
// This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
// called. This test both Java and C++ parts of of the stack.
@Test
@MediumTest
public void testScaleCameraOutput() throws InterruptedException {
fixtures.scaleCameraOutput();
}
// This test that an error is reported if the camera is already opened
// when CameraVideoCapturer is started.
@Test
@LargeTest
public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
fixtures.startWhileCameraIsAlreadyOpen();
}
// This test that CameraVideoCapturer can be started, even if the camera is already opened
// if the camera is closed while CameraVideoCapturer is re-trying to start.
@Test
@LargeTest
public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
}
// This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
// re-trying to start.
@Test
@MediumTest
public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
fixtures.startWhileCameraIsAlreadyOpenAndStop();
}
}

View File

@ -0,0 +1,207 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.LargeTest;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import java.io.IOException;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(BaseJUnit4ClassRunner.class)
public class Camera1CapturerUsingTextureTest {
static final String TAG = "Camera1CapturerUsingTextureTest";
private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
@Override
public CameraEnumerator getCameraEnumerator() {
return new Camera1Enumerator();
}
@Override
public Context getAppContext() {
return InstrumentationRegistry.getTargetContext();
}
@SuppressWarnings("deprecation")
@Override
public Object rawOpenCamera(String cameraName) {
return android.hardware.Camera.open(Camera1Enumerator.getCameraIndex(cameraName));
}
@SuppressWarnings("deprecation")
@Override
public void rawCloseCamera(Object camera) {
((android.hardware.Camera) camera).release();
}
}
private CameraVideoCapturerTestFixtures fixtures;
@Before
public void setUp() {
// Enable VideoFrame capture.
PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
+ PeerConnectionFactory.TRIAL_ENABLED + "/");
fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
}
@After
public void tearDown() {
fixtures.dispose();
}
@Test
@SmallTest
public void testCreateAndDispose() throws InterruptedException {
fixtures.createCapturerAndDispose();
}
@Test
@SmallTest
public void testCreateNonExistingCamera() throws InterruptedException {
fixtures.createNonExistingCamera();
}
// This test that the camera can be started and that the frames are forwarded
// to a Java video renderer using a "default" capturer.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testCreateCapturerAndRender() throws InterruptedException {
fixtures.createCapturerAndRender();
}
// This test that the camera can be started and that the frames are forwarded
// to a Java video renderer using the front facing video capturer.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testStartFrontFacingVideoCapturer() throws InterruptedException {
fixtures.createFrontFacingCapturerAndRender();
}
// This test that the camera can be started and that the frames are forwarded
// to a Java video renderer using the back facing video capturer.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testStartBackFacingVideoCapturer() throws InterruptedException {
fixtures.createBackFacingCapturerAndRender();
}
// This test that the default camera can be started and that the camera can
// later be switched to another camera.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testSwitchVideoCapturer() throws InterruptedException {
fixtures.switchCamera();
}
@Test
@MediumTest
public void testCameraEvents() throws InterruptedException {
fixtures.cameraEventsInvoked();
}
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(false /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test
@MediumTest
public void testCameraCallsAfterStop() throws InterruptedException {
fixtures.cameraCallsAfterStop();
}
// This test that the VideoSource that the CameraVideoCapturer is connected to can
// be stopped and restarted. It tests both the Java and the C++ layer.
@Test
@LargeTest
public void testStopRestartVideoSource() throws InterruptedException {
fixtures.stopRestartVideoSource();
}
// This test that the camera can be started at different resolutions.
// It does not test or use the C++ layer.
@Test
@LargeTest
public void testStartStopWithDifferentResolutions() throws InterruptedException {
fixtures.startStopWithDifferentResolutions();
}
// This test what happens if buffers are returned after the capturer have
// been stopped and restarted. It does not test or use the C++ layer.
@Test
@LargeTest
public void testReturnBufferLate() throws InterruptedException {
fixtures.returnBufferLate();
}
// This test that we can capture frames, keep the frames in a local renderer, stop capturing,
// and then return the frames. The difference between the test testReturnBufferLate() is that we
// also test the JNI and C++ AndroidVideoCapturer parts.
@Test
@MediumTest
public void testReturnBufferLateEndToEnd() throws InterruptedException {
fixtures.returnBufferLateEndToEnd();
}
// This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
// the capturer.
@Test
@LargeTest
public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
fixtures.cameraFreezedEventOnBufferStarvation();
}
// This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
// called. This test both Java and C++ parts of of the stack.
@Test
@MediumTest
public void testScaleCameraOutput() throws InterruptedException {
fixtures.scaleCameraOutput();
}
// This test that an error is reported if the camera is already opened
// when CameraVideoCapturer is started.
@Test
@LargeTest
public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
fixtures.startWhileCameraIsAlreadyOpen();
}
// This test that CameraVideoCapturer can be started, even if the camera is already opened
// if the camera is closed while CameraVideoCapturer is re-trying to start.
@Test
@LargeTest
public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
}
// This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
// re-trying to start.
@Test
@MediumTest
public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
fixtures.startWhileCameraIsAlreadyOpenAndStop();
}
}

View File

@ -0,0 +1,336 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import android.annotation.TargetApi;
import android.content.Context;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.os.Handler;
import android.os.Looper;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.LargeTest;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import java.io.IOException;
import java.util.concurrent.CountDownLatch;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@TargetApi(21)
@RunWith(BaseJUnit4ClassRunner.class)
public class Camera2CapturerTest {
static final String TAG = "Camera2CapturerTest";
/**
* Simple camera2 implementation that only knows how to open the camera and close it.
*/
private class SimpleCamera2 {
final CameraManager cameraManager;
final LooperThread looperThread;
final CountDownLatch openDoneSignal;
final Object cameraDeviceLock;
CameraDevice cameraDevice; // Guarded by cameraDeviceLock
boolean openSucceeded; // Guarded by cameraDeviceLock
private class LooperThread extends Thread {
final CountDownLatch startedSignal = new CountDownLatch(1);
private Handler handler;
@Override
public void run() {
Looper.prepare();
handler = new Handler();
startedSignal.countDown();
Looper.loop();
}
public void waitToStart() {
ThreadUtils.awaitUninterruptibly(startedSignal);
}
public void requestStop() {
handler.getLooper().quit();
}
public Handler getHandler() {
return handler;
}
}
private class CameraStateCallback extends CameraDevice.StateCallback {
@Override
public void onClosed(CameraDevice cameraDevice) {
Logging.d(TAG, "Simple camera2 closed.");
synchronized (cameraDeviceLock) {
SimpleCamera2.this.cameraDevice = null;
}
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
Logging.d(TAG, "Simple camera2 disconnected.");
synchronized (cameraDeviceLock) {
SimpleCamera2.this.cameraDevice = null;
}
}
@Override
public void onError(CameraDevice cameraDevice, int errorCode) {
Logging.w(TAG, "Simple camera2 error: " + errorCode);
synchronized (cameraDeviceLock) {
SimpleCamera2.this.cameraDevice = cameraDevice;
openSucceeded = false;
}
openDoneSignal.countDown();
}
@Override
public void onOpened(CameraDevice cameraDevice) {
Logging.d(TAG, "Simple camera2 opened.");
synchronized (cameraDeviceLock) {
SimpleCamera2.this.cameraDevice = cameraDevice;
openSucceeded = true;
}
openDoneSignal.countDown();
}
}
SimpleCamera2(Context context, String deviceName) {
cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
looperThread = new LooperThread();
looperThread.start();
looperThread.waitToStart();
cameraDeviceLock = new Object();
openDoneSignal = new CountDownLatch(1);
cameraDevice = null;
Logging.d(TAG, "Opening simple camera2.");
try {
cameraManager.openCamera(deviceName, new CameraStateCallback(), looperThread.getHandler());
} catch (CameraAccessException e) {
fail("Simple camera2 CameraAccessException: " + e.getMessage());
}
Logging.d(TAG, "Waiting for simple camera2 to open.");
ThreadUtils.awaitUninterruptibly(openDoneSignal);
synchronized (cameraDeviceLock) {
if (!openSucceeded) {
fail("Opening simple camera2 failed.");
}
}
}
public void close() {
Logging.d(TAG, "Closing simple camera2.");
synchronized (cameraDeviceLock) {
if (cameraDevice != null) {
cameraDevice.close();
}
}
looperThread.requestStop();
ThreadUtils.joinUninterruptibly(looperThread);
}
}
private class TestObjectFactory extends CameraVideoCapturerTestFixtures.TestObjectFactory {
@Override
public CameraEnumerator getCameraEnumerator() {
return new Camera2Enumerator(getAppContext());
}
@Override
public Context getAppContext() {
return InstrumentationRegistry.getTargetContext();
}
@SuppressWarnings("deprecation")
@Override
public Object rawOpenCamera(String cameraName) {
return new SimpleCamera2(getAppContext(), cameraName);
}
@SuppressWarnings("deprecation")
@Override
public void rawCloseCamera(Object camera) {
((SimpleCamera2) camera).close();
}
}
private CameraVideoCapturerTestFixtures fixtures;
@Before
public void setUp() {
// Enable VideoFrame capture.
PeerConnectionFactory.initializeFieldTrials(PeerConnectionFactory.VIDEO_FRAME_EMIT_TRIAL + "/"
+ PeerConnectionFactory.TRIAL_ENABLED + "/");
fixtures = new CameraVideoCapturerTestFixtures(new TestObjectFactory());
}
@After
public void tearDown() {
fixtures.dispose();
}
@Test
@SmallTest
public void testCreateAndDispose() throws InterruptedException {
fixtures.createCapturerAndDispose();
}
@Test
@SmallTest
public void testCreateNonExistingCamera() throws InterruptedException {
fixtures.createNonExistingCamera();
}
// This test that the camera can be started and that the frames are forwarded
// to a Java video renderer using a "default" capturer.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testCreateCapturerAndRender() throws InterruptedException {
fixtures.createCapturerAndRender();
}
// This test that the camera can be started and that the frames are forwarded
// to a Java video renderer using the front facing video capturer.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testStartFrontFacingVideoCapturer() throws InterruptedException {
fixtures.createFrontFacingCapturerAndRender();
}
// This test that the camera can be started and that the frames are forwarded
// to a Java video renderer using the back facing video capturer.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testStartBackFacingVideoCapturer() throws InterruptedException {
fixtures.createBackFacingCapturerAndRender();
}
// This test that the default camera can be started and that the camera can
// later be switched to another camera.
// It tests both the Java and the C++ layer.
@Test
@MediumTest
public void testSwitchVideoCapturer() throws InterruptedException {
fixtures.switchCamera();
}
@Test
@MediumTest
public void testCameraEvents() throws InterruptedException {
fixtures.cameraEventsInvoked();
}
@Test
@MediumTest
public void testUpdateMediaRecorder() throws InterruptedException, IOException {
fixtures.updateMediaRecorder(true /* useSurfaceCapture */);
}
// Test what happens when attempting to call e.g. switchCamera() after camera has been stopped.
@Test
@MediumTest
public void testCameraCallsAfterStop() throws InterruptedException {
fixtures.cameraCallsAfterStop();
}
// This test that the VideoSource that the CameraVideoCapturer is connected to can
// be stopped and restarted. It tests both the Java and the C++ layer.
@Test
@LargeTest
public void testStopRestartVideoSource() throws InterruptedException {
fixtures.stopRestartVideoSource();
}
// This test that the camera can be started at different resolutions.
// It does not test or use the C++ layer.
@Test
@LargeTest
public void testStartStopWithDifferentResolutions() throws InterruptedException {
fixtures.startStopWithDifferentResolutions();
}
// This test what happens if buffers are returned after the capturer have
// been stopped and restarted. It does not test or use the C++ layer.
@Test
@LargeTest
public void testReturnBufferLate() throws InterruptedException {
fixtures.returnBufferLate();
}
// This test that we can capture frames, keep the frames in a local renderer, stop capturing,
// and then return the frames. The difference between the test testReturnBufferLate() is that we
// also test the JNI and C++ AndroidVideoCapturer parts.
@Test
@MediumTest
public void testReturnBufferLateEndToEnd() throws InterruptedException {
fixtures.returnBufferLateEndToEnd();
}
// This test that CameraEventsHandler.onError is triggered if video buffers are not returned to
// the capturer.
@Test
@LargeTest
public void testCameraFreezedEventOnBufferStarvation() throws InterruptedException {
fixtures.cameraFreezedEventOnBufferStarvation();
}
// This test that frames forwarded to a renderer is scaled if adaptOutputFormat is
// called. This test both Java and C++ parts of of the stack.
@Test
@MediumTest
public void testScaleCameraOutput() throws InterruptedException {
fixtures.scaleCameraOutput();
}
// This test that an error is reported if the camera is already opened
// when CameraVideoCapturer is started.
@Test
@LargeTest
public void testStartWhileCameraIsAlreadyOpen() throws InterruptedException {
fixtures.startWhileCameraIsAlreadyOpen();
}
// This test that CameraVideoCapturer can be started, even if the camera is already opened
// if the camera is closed while CameraVideoCapturer is re-trying to start.
@Test
@LargeTest
public void testStartWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
fixtures.startWhileCameraIsAlreadyOpenAndCloseCamera();
}
// This test that CameraVideoCapturer.stop can be called while CameraVideoCapturer is
// re-trying to start.
@Test
@MediumTest
public void testStartWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
fixtures.startWhileCameraIsAlreadyOpenAndStop();
}
}

View File

@ -0,0 +1,844 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import android.annotation.TargetApi;
import android.content.Context;
import android.media.CamcorderProfile;
import android.media.MediaRecorder;
import android.os.Environment;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.runner.RunWith;
import org.webrtc.CameraEnumerationAndroid.CaptureFormat;
import org.webrtc.VideoRenderer.I420Frame;
class CameraVideoCapturerTestFixtures {
static final String TAG = "CameraVideoCapturerTestFixtures";
// Default values used for starting capturing
static final int DEFAULT_WIDTH = 640;
static final int DEFAULT_HEIGHT = 480;
static final int DEFAULT_FPS = 15;
static private class RendererCallbacks implements VideoRenderer.Callbacks {
private int framesRendered = 0;
private Object frameLock = 0;
private int width = 0;
private int height = 0;
@Override
public void renderFrame(I420Frame frame) {
synchronized (frameLock) {
++framesRendered;
width = frame.rotatedWidth();
height = frame.rotatedHeight();
frameLock.notify();
}
VideoRenderer.renderFrameDone(frame);
}
public int frameWidth() {
synchronized (frameLock) {
return width;
}
}
public int frameHeight() {
synchronized (frameLock) {
return height;
}
}
public int waitForNextFrameToRender() throws InterruptedException {
Logging.d(TAG, "Waiting for the next frame to render");
synchronized (frameLock) {
frameLock.wait();
return framesRendered;
}
}
}
static private class FakeAsyncRenderer implements VideoRenderer.Callbacks {
private final List<I420Frame> pendingFrames = new ArrayList<I420Frame>();
@Override
public void renderFrame(I420Frame frame) {
synchronized (pendingFrames) {
pendingFrames.add(frame);
pendingFrames.notifyAll();
}
}
// Wait until at least one frame have been received, before returning them.
public List<I420Frame> waitForPendingFrames() throws InterruptedException {
Logging.d(TAG, "Waiting for pending frames");
synchronized (pendingFrames) {
while (pendingFrames.isEmpty()) {
pendingFrames.wait();
}
return new ArrayList<I420Frame>(pendingFrames);
}
}
}
static private class FakeCapturerObserver implements CameraVideoCapturer.CapturerObserver {
private int framesCaptured = 0;
private VideoFrame videoFrame;
final private Object frameLock = new Object();
final private Object capturerStartLock = new Object();
private boolean capturerStartResult = false;
final private List<Long> timestamps = new ArrayList<Long>();
@Override
public void onCapturerStarted(boolean success) {
Logging.d(TAG, "onCapturerStarted: " + success);
synchronized (capturerStartLock) {
capturerStartResult = success;
capturerStartLock.notifyAll();
}
}
@Override
public void onCapturerStopped() {
Logging.d(TAG, "onCapturerStopped");
}
@Override
public void onByteBufferFrameCaptured(
byte[] frame, int width, int height, int rotation, long timeStamp) {
throw new RuntimeException("onByteBufferFrameCaptured called");
}
@Override
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timeStamp) {
throw new RuntimeException("onTextureFrameCaptured called");
}
@Override
public void onFrameCaptured(VideoFrame frame) {
synchronized (frameLock) {
++framesCaptured;
if (videoFrame != null) {
videoFrame.release();
}
videoFrame = frame;
videoFrame.retain();
timestamps.add(videoFrame.getTimestampNs());
frameLock.notify();
}
}
public boolean waitForCapturerToStart() throws InterruptedException {
Logging.d(TAG, "Waiting for the capturer to start");
synchronized (capturerStartLock) {
capturerStartLock.wait();
return capturerStartResult;
}
}
public int waitForNextCapturedFrame() throws InterruptedException {
Logging.d(TAG, "Waiting for the next captured frame");
synchronized (frameLock) {
frameLock.wait();
return framesCaptured;
}
}
int frameWidth() {
synchronized (frameLock) {
return videoFrame.getBuffer().getWidth();
}
}
int frameHeight() {
synchronized (frameLock) {
return videoFrame.getBuffer().getHeight();
}
}
void releaseFrame() {
synchronized (frameLock) {
if (videoFrame != null) {
videoFrame.release();
videoFrame = null;
}
}
}
List<Long> getCopyAndResetListOftimeStamps() {
synchronized (frameLock) {
ArrayList<Long> list = new ArrayList<Long>(timestamps);
timestamps.clear();
return list;
}
}
}
static class CameraEvents implements CameraVideoCapturer.CameraEventsHandler {
public boolean onCameraOpeningCalled;
public boolean onFirstFrameAvailableCalled;
public final Object onCameraFreezedLock = new Object();
private String onCameraFreezedDescription;
public final Object cameraClosedLock = new Object();
private boolean cameraClosed = true;
@Override
public void onCameraError(String errorDescription) {
Logging.w(TAG, "Camera error: " + errorDescription);
cameraClosed = true;
}
@Override
public void onCameraDisconnected() {}
@Override
public void onCameraFreezed(String errorDescription) {
synchronized (onCameraFreezedLock) {
onCameraFreezedDescription = errorDescription;
onCameraFreezedLock.notifyAll();
}
}
@Override
public void onCameraOpening(String cameraName) {
onCameraOpeningCalled = true;
synchronized (cameraClosedLock) {
cameraClosed = false;
}
}
@Override
public void onFirstFrameAvailable() {
onFirstFrameAvailableCalled = true;
}
@Override
public void onCameraClosed() {
synchronized (cameraClosedLock) {
cameraClosed = true;
cameraClosedLock.notifyAll();
}
}
public String waitForCameraFreezed() throws InterruptedException {
Logging.d(TAG, "Waiting for the camera to freeze");
synchronized (onCameraFreezedLock) {
onCameraFreezedLock.wait();
return onCameraFreezedDescription;
}
}
public void waitForCameraClosed() throws InterruptedException {
synchronized (cameraClosedLock) {
while (!cameraClosed) {
Logging.d(TAG, "Waiting for the camera to close.");
cameraClosedLock.wait();
}
}
}
}
/**
* Class to collect all classes related to single capturer instance.
*/
static private class CapturerInstance {
public CameraVideoCapturer capturer;
public CameraEvents cameraEvents;
public SurfaceTextureHelper surfaceTextureHelper;
public FakeCapturerObserver observer;
public List<CaptureFormat> supportedFormats;
public CaptureFormat format;
}
/**
* Class used for collecting a VideoSource, a VideoTrack and a renderer. The class
* is used for testing local rendering from a capturer.
*/
static private class VideoTrackWithRenderer {
public VideoSource source;
public VideoTrack track;
public RendererCallbacks rendererCallbacks;
public FakeAsyncRenderer fakeAsyncRenderer;
}
public abstract static class TestObjectFactory {
final CameraEnumerator cameraEnumerator;
TestObjectFactory() {
cameraEnumerator = getCameraEnumerator();
}
public CameraVideoCapturer createCapturer(
String name, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return cameraEnumerator.createCapturer(name, eventsHandler);
}
public String getNameOfFrontFacingDevice() {
for (String deviceName : cameraEnumerator.getDeviceNames()) {
if (cameraEnumerator.isFrontFacing(deviceName)) {
return deviceName;
}
}
return null;
}
public String getNameOfBackFacingDevice() {
for (String deviceName : cameraEnumerator.getDeviceNames()) {
if (cameraEnumerator.isBackFacing(deviceName)) {
return deviceName;
}
}
return null;
}
public boolean haveTwoCameras() {
return cameraEnumerator.getDeviceNames().length >= 2;
}
public boolean isCapturingToTexture() {
// In the future, we plan to only support capturing to texture, so default to true
return true;
}
abstract public CameraEnumerator getCameraEnumerator();
abstract public Context getAppContext();
// CameraVideoCapturer API is too slow for some of our tests where we need to open a competing
// camera. These methods are used instead.
abstract public Object rawOpenCamera(String cameraName);
abstract public void rawCloseCamera(Object camera);
}
private PeerConnectionFactory peerConnectionFactory;
private TestObjectFactory testObjectFactory;
CameraVideoCapturerTestFixtures(TestObjectFactory testObjectFactory) {
PeerConnectionFactory.initializeAndroidGlobals(testObjectFactory.getAppContext(), true);
this.peerConnectionFactory = new PeerConnectionFactory(null /* options */);
this.testObjectFactory = testObjectFactory;
}
public void dispose() {
this.peerConnectionFactory.dispose();
}
// Internal helper methods
private CapturerInstance createCapturer(String name, boolean initialize) {
CapturerInstance instance = new CapturerInstance();
instance.cameraEvents = new CameraEvents();
instance.capturer = testObjectFactory.createCapturer(name, instance.cameraEvents);
instance.surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, null /* sharedContext */);
instance.observer = new FakeCapturerObserver();
if (initialize) {
instance.capturer.initialize(
instance.surfaceTextureHelper, testObjectFactory.getAppContext(), instance.observer);
}
instance.supportedFormats = testObjectFactory.cameraEnumerator.getSupportedFormats(name);
return instance;
}
private CapturerInstance createCapturer(boolean initialize) {
String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0];
return createCapturer(name, initialize);
}
private void startCapture(CapturerInstance instance) {
startCapture(instance, 0);
}
private void startCapture(CapturerInstance instance, int formatIndex) {
final CameraEnumerationAndroid.CaptureFormat format =
instance.supportedFormats.get(formatIndex);
instance.capturer.startCapture(format.width, format.height, format.framerate.max);
instance.format = format;
}
private void disposeCapturer(CapturerInstance instance) throws InterruptedException {
instance.capturer.stopCapture();
instance.cameraEvents.waitForCameraClosed();
instance.capturer.dispose();
instance.observer.releaseFrame();
instance.surfaceTextureHelper.dispose();
}
private VideoTrackWithRenderer createVideoTrackWithRenderer(
CameraVideoCapturer capturer, VideoRenderer.Callbacks rendererCallbacks) {
VideoTrackWithRenderer videoTrackWithRenderer = new VideoTrackWithRenderer();
videoTrackWithRenderer.source = peerConnectionFactory.createVideoSource(capturer);
capturer.startCapture(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
videoTrackWithRenderer.track =
peerConnectionFactory.createVideoTrack("dummy", videoTrackWithRenderer.source);
videoTrackWithRenderer.track.addRenderer(new VideoRenderer(rendererCallbacks));
return videoTrackWithRenderer;
}
private VideoTrackWithRenderer createVideoTrackWithRenderer(CameraVideoCapturer capturer) {
RendererCallbacks rendererCallbacks = new RendererCallbacks();
VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturer, rendererCallbacks);
videoTrackWithRenderer.rendererCallbacks = rendererCallbacks;
return videoTrackWithRenderer;
}
private VideoTrackWithRenderer createVideoTrackWithFakeAsyncRenderer(
CameraVideoCapturer capturer) {
FakeAsyncRenderer fakeAsyncRenderer = new FakeAsyncRenderer();
VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturer, fakeAsyncRenderer);
videoTrackWithRenderer.fakeAsyncRenderer = fakeAsyncRenderer;
return videoTrackWithRenderer;
}
private void disposeVideoTrackWithRenderer(VideoTrackWithRenderer videoTrackWithRenderer) {
videoTrackWithRenderer.track.dispose();
videoTrackWithRenderer.source.dispose();
}
private void waitUntilIdle(CapturerInstance capturerInstance) throws InterruptedException {
final CountDownLatch barrier = new CountDownLatch(1);
capturerInstance.surfaceTextureHelper.getHandler().post(new Runnable() {
@Override
public void run() {
barrier.countDown();
}
});
barrier.await();
}
private void createCapturerAndRender(String name) throws InterruptedException {
if (name == null) {
Logging.w(TAG, "Skipping video capturer test because device name is null.");
return;
}
final CapturerInstance capturerInstance = createCapturer(name, false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
disposeCapturer(capturerInstance);
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
}
// Test methods
public void createCapturerAndDispose() throws InterruptedException {
disposeCapturer(createCapturer(true /* initialize */));
}
public void createNonExistingCamera() throws InterruptedException {
try {
disposeCapturer(createCapturer("non-existing camera", false /* initialize */));
} catch (IllegalArgumentException e) {
return;
}
fail("Expected illegal argument exception when creating non-existing camera.");
}
public void createCapturerAndRender() throws InterruptedException {
String name = testObjectFactory.cameraEnumerator.getDeviceNames()[0];
createCapturerAndRender(name);
}
public void createFrontFacingCapturerAndRender() throws InterruptedException {
createCapturerAndRender(testObjectFactory.getNameOfFrontFacingDevice());
}
public void createBackFacingCapturerAndRender() throws InterruptedException {
createCapturerAndRender(testObjectFactory.getNameOfBackFacingDevice());
}
public void switchCamera() throws InterruptedException {
if (!testObjectFactory.haveTwoCameras()) {
Logging.w(
TAG, "Skipping test switch video capturer because the device doesn't have two cameras.");
return;
}
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
// Wait for the camera to start so we can switch it
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
// Array with one element to avoid final problem in nested classes.
final boolean[] cameraSwitchSuccessful = new boolean[1];
final CountDownLatch barrier = new CountDownLatch(1);
capturerInstance.capturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() {
@Override
public void onCameraSwitchDone(boolean isFrontCamera) {
cameraSwitchSuccessful[0] = true;
barrier.countDown();
}
@Override
public void onCameraSwitchError(String errorDescription) {
cameraSwitchSuccessful[0] = false;
barrier.countDown();
}
});
// Wait until the camera has been switched.
barrier.await();
// Check result.
assertTrue(cameraSwitchSuccessful[0]);
// Ensure that frames are received.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
disposeCapturer(capturerInstance);
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
}
@TargetApi(21)
private static void prepareMediaRecorderForTests(
MediaRecorder mediaRecorder, File outputFile, boolean useSurfaceCapture) throws IOException {
mediaRecorder.setVideoSource(
useSurfaceCapture ? MediaRecorder.VideoSource.SURFACE : MediaRecorder.VideoSource.CAMERA);
CamcorderProfile profile = CamcorderProfile.get(CamcorderProfile.QUALITY_480P);
profile.videoCodec = MediaRecorder.VideoEncoder.H264;
profile.videoBitRate = 2500000;
profile.videoFrameWidth = 640;
profile.videoFrameHeight = 480;
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mediaRecorder.setVideoFrameRate(profile.videoFrameRate);
mediaRecorder.setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
mediaRecorder.setVideoEncodingBitRate(profile.videoBitRate);
mediaRecorder.setVideoEncoder(profile.videoCodec);
mediaRecorder.setOutputFile(outputFile.getPath());
mediaRecorder.prepare();
}
@TargetApi(21)
public void updateMediaRecorder(boolean useSurfaceCapture)
throws InterruptedException, IOException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
// Wait for the camera to start so we can add and remove MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
+ "/chromium_tests_root/testmediarecorder.mp4";
File outputFile = new File(videoOutPath);
// Create MediaRecorder object
MediaRecorder mediaRecorder = new MediaRecorder();
if (useSurfaceCapture) {
// When using using surface capture, media recorder has to be prepared before adding it to the
// camera.
prepareMediaRecorderForTests(mediaRecorder, outputFile, useSurfaceCapture);
}
// Add MediaRecorder to camera pipeline.
final boolean[] addMediaRecorderSuccessful = new boolean[1];
final CountDownLatch addBarrier = new CountDownLatch(1);
CameraVideoCapturer.MediaRecorderHandler addMediaRecorderHandler =
new CameraVideoCapturer.MediaRecorderHandler() {
@Override
public void onMediaRecorderSuccess() {
addMediaRecorderSuccessful[0] = true;
addBarrier.countDown();
}
@Override
public void onMediaRecorderError(String errorDescription) {
Logging.e(TAG, errorDescription);
addMediaRecorderSuccessful[0] = false;
addBarrier.countDown();
}
};
capturerInstance.capturer.addMediaRecorderToCamera(mediaRecorder, addMediaRecorderHandler);
// Wait until MediaRecoder has been added.
addBarrier.await();
// Check result.
assertTrue(addMediaRecorderSuccessful[0]);
// Start MediaRecorder and wait for a few frames to capture.
if (!useSurfaceCapture) {
// When using using camera capture, media recorder has to be prepared after adding it to the
// camera.
prepareMediaRecorderForTests(mediaRecorder, outputFile, useSurfaceCapture);
}
mediaRecorder.start();
for (int i = 0; i < 5; i++) {
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
}
mediaRecorder.stop();
// Remove MediaRecorder from camera pipeline.
final boolean[] removeMediaRecorderSuccessful = new boolean[1];
final CountDownLatch removeBarrier = new CountDownLatch(1);
CameraVideoCapturer.MediaRecorderHandler removeMediaRecorderHandler =
new CameraVideoCapturer.MediaRecorderHandler() {
@Override
public void onMediaRecorderSuccess() {
removeMediaRecorderSuccessful[0] = true;
removeBarrier.countDown();
}
@Override
public void onMediaRecorderError(String errorDescription) {
removeMediaRecorderSuccessful[0] = false;
removeBarrier.countDown();
}
};
capturerInstance.capturer.removeMediaRecorderFromCamera(removeMediaRecorderHandler);
// Wait until MediaRecoder has been removed.
removeBarrier.await();
// Check result.
assertTrue(removeMediaRecorderSuccessful[0]);
// Ensure that frames are received after removing MediaRecorder.
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
// Check that recorded file contains some data.
assertTrue(outputFile.length() > 0);
disposeCapturer(capturerInstance);
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
}
public void cameraEventsInvoked() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
startCapture(capturerInstance);
// Make sure camera is started and first frame is received and then stop it.
assertTrue(capturerInstance.observer.waitForCapturerToStart());
capturerInstance.observer.waitForNextCapturedFrame();
disposeCapturer(capturerInstance);
assertTrue(capturerInstance.cameraEvents.onCameraOpeningCalled);
assertTrue(capturerInstance.cameraEvents.onFirstFrameAvailableCalled);
}
public void cameraCallsAfterStop() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
startCapture(capturerInstance);
// Make sure camera is started and then stop it.
assertTrue(capturerInstance.observer.waitForCapturerToStart());
capturerInstance.capturer.stopCapture();
capturerInstance.observer.releaseFrame();
// We can't change |capturer| at this point, but we should not crash.
capturerInstance.capturer.switchCamera(null /* switchEventsHandler */);
capturerInstance.capturer.changeCaptureFormat(DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_FPS);
disposeCapturer(capturerInstance);
}
public void stopRestartVideoSource() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
capturerInstance.capturer.stopCapture();
assertEquals(MediaSource.State.ENDED, videoTrackWithRenderer.source.state());
startCapture(capturerInstance);
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
assertEquals(MediaSource.State.LIVE, videoTrackWithRenderer.source.state());
disposeCapturer(capturerInstance);
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
}
public void startStopWithDifferentResolutions() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
for (int i = 0; i < 3; ++i) {
startCapture(capturerInstance, i);
assertTrue(capturerInstance.observer.waitForCapturerToStart());
capturerInstance.observer.waitForNextCapturedFrame();
// Check the frame size. The actual width and height depend on how the capturer is mounted.
final boolean identicalResolution =
(capturerInstance.observer.frameWidth() == capturerInstance.format.width
&& capturerInstance.observer.frameHeight() == capturerInstance.format.height);
final boolean flippedResolution =
(capturerInstance.observer.frameWidth() == capturerInstance.format.height
&& capturerInstance.observer.frameHeight() == capturerInstance.format.width);
if (!identicalResolution && !flippedResolution) {
fail("Wrong resolution, got: " + capturerInstance.observer.frameWidth() + "x"
+ capturerInstance.observer.frameHeight() + " expected: "
+ capturerInstance.format.width + "x" + capturerInstance.format.height + " or "
+ capturerInstance.format.height + "x" + capturerInstance.format.width);
}
capturerInstance.capturer.stopCapture();
capturerInstance.observer.releaseFrame();
}
disposeCapturer(capturerInstance);
}
public void returnBufferLate() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
startCapture(capturerInstance);
assertTrue(capturerInstance.observer.waitForCapturerToStart());
capturerInstance.observer.waitForNextCapturedFrame();
capturerInstance.capturer.stopCapture();
List<Long> listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
assertTrue(listOftimestamps.size() >= 1);
startCapture(capturerInstance, 1);
capturerInstance.observer.waitForCapturerToStart();
capturerInstance.observer.releaseFrame();
capturerInstance.observer.waitForNextCapturedFrame();
capturerInstance.capturer.stopCapture();
listOftimestamps = capturerInstance.observer.getCopyAndResetListOftimeStamps();
assertTrue(listOftimestamps.size() >= 1);
disposeCapturer(capturerInstance);
}
public void returnBufferLateEndToEnd() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithFakeAsyncRenderer(capturerInstance.capturer);
// Wait for at least one frame that has not been returned.
assertFalse(videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames().isEmpty());
capturerInstance.capturer.stopCapture();
// Dispose everything.
disposeCapturer(capturerInstance);
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
// Return the frame(s), on a different thread out of spite.
final List<I420Frame> pendingFrames =
videoTrackWithRenderer.fakeAsyncRenderer.waitForPendingFrames();
final Thread returnThread = new Thread(new Runnable() {
@Override
public void run() {
for (I420Frame frame : pendingFrames) {
VideoRenderer.renderFrameDone(frame);
}
}
});
returnThread.start();
returnThread.join();
}
public void cameraFreezedEventOnBufferStarvation() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(true /* initialize */);
startCapture(capturerInstance);
// Make sure camera is started.
assertTrue(capturerInstance.observer.waitForCapturerToStart());
// Since we don't return the buffer, we should get a starvation message if we are
// capturing to a texture.
assertEquals("Camera failure. Client must return video buffers.",
capturerInstance.cameraEvents.waitForCameraFreezed());
capturerInstance.capturer.stopCapture();
disposeCapturer(capturerInstance);
}
public void scaleCameraOutput() throws InterruptedException {
final CapturerInstance capturerInstance = createCapturer(false /* initialize */);
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
assertTrue(videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender() > 0);
final int startWidth = videoTrackWithRenderer.rendererCallbacks.frameWidth();
final int startHeight = videoTrackWithRenderer.rendererCallbacks.frameHeight();
final int frameRate = 30;
final int scaledWidth = startWidth / 2;
final int scaledHeight = startHeight / 2;
// Request the captured frames to be scaled.
videoTrackWithRenderer.source.adaptOutputFormat(scaledWidth, scaledHeight, frameRate);
boolean gotExpectedResolution = false;
int numberOfInspectedFrames = 0;
do {
videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
++numberOfInspectedFrames;
gotExpectedResolution = (videoTrackWithRenderer.rendererCallbacks.frameWidth() == scaledWidth
&& videoTrackWithRenderer.rendererCallbacks.frameHeight() == scaledHeight);
} while (!gotExpectedResolution && numberOfInspectedFrames < 30);
disposeCapturer(capturerInstance);
disposeVideoTrackWithRenderer(videoTrackWithRenderer);
assertTrue(gotExpectedResolution);
}
public void startWhileCameraIsAlreadyOpen() throws InterruptedException {
final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
// At this point camera is not actually opened.
final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
startCapture(capturerInstance);
if (android.os.Build.VERSION.SDK_INT > android.os.Build.VERSION_CODES.LOLLIPOP_MR1) {
// The first opened camera client will be evicted.
assertTrue(capturerInstance.observer.waitForCapturerToStart());
} else {
assertFalse(capturerInstance.observer.waitForCapturerToStart());
}
testObjectFactory.rawCloseCamera(competingCamera);
disposeCapturer(capturerInstance);
}
public void startWhileCameraIsAlreadyOpenAndCloseCamera() throws InterruptedException {
final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
// At this point camera is not actually opened.
final CapturerInstance capturerInstance = createCapturer(cameraName, false /* initialize */);
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening competing camera.");
final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Opening camera.");
final VideoTrackWithRenderer videoTrackWithRenderer =
createVideoTrackWithRenderer(capturerInstance.capturer);
waitUntilIdle(capturerInstance);
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Closing competing camera.");
testObjectFactory.rawCloseCamera(competingCamera);
// Make sure camera is started and first frame is received and then stop it.
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Waiting for capture to start.");
videoTrackWithRenderer.rendererCallbacks.waitForNextFrameToRender();
Logging.d(TAG, "startWhileCameraIsAlreadyOpenAndCloseCamera: Stopping capture.");
disposeCapturer(capturerInstance);
}
public void startWhileCameraIsAlreadyOpenAndStop() throws InterruptedException {
final String cameraName = testObjectFactory.getNameOfBackFacingDevice();
// At this point camera is not actually opened.
final CapturerInstance capturerInstance = createCapturer(cameraName, true /* initialize */);
final Object competingCamera = testObjectFactory.rawOpenCamera(cameraName);
startCapture(capturerInstance);
disposeCapturer(capturerInstance);
testObjectFactory.rawCloseCamera(competingCamera);
}
}

View File

@ -0,0 +1,338 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.SmallTest;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
// EmptyActivity is needed for the surface.
@RunWith(BaseJUnit4ClassRunner.class)
public class EglRendererTest {
final static String TAG = "EglRendererTest";
final static int RENDER_WAIT_MS = 1000;
final static int SURFACE_WAIT_MS = 1000;
final static int TEST_FRAME_WIDTH = 4;
final static int TEST_FRAME_HEIGHT = 4;
final static int REMOVE_FRAME_LISTENER_RACY_NUM_TESTS = 10;
// Some arbitrary frames.
final static ByteBuffer[][] TEST_FRAMES = {
{
ByteBuffer.wrap(new byte[] {
11, -12, 13, -14, -15, 16, -17, 18, 19, -110, 111, -112, -113, 114, -115, 116}),
ByteBuffer.wrap(new byte[] {117, 118, 119, 120}),
ByteBuffer.wrap(new byte[] {121, 122, 123, 124}),
},
{
ByteBuffer.wrap(new byte[] {-11, -12, -13, -14, -15, -16, -17, -18, -19, -110, -111, -112,
-113, -114, -115, -116}),
ByteBuffer.wrap(new byte[] {-121, -122, -123, -124}),
ByteBuffer.wrap(new byte[] {-117, -118, -119, -120}),
},
{
ByteBuffer.wrap(new byte[] {-11, -12, -13, -14, -15, -16, -17, -18, -19, -110, -111, -112,
-113, -114, -115, -116}),
ByteBuffer.wrap(new byte[] {117, 118, 119, 120}),
ByteBuffer.wrap(new byte[] {121, 122, 123, 124}),
},
};
private class TestFrameListener implements EglRenderer.FrameListener {
final private ArrayList<Bitmap> bitmaps = new ArrayList<Bitmap>();
boolean bitmapReceived;
Bitmap storedBitmap;
@Override
public synchronized void onFrame(Bitmap bitmap) {
if (bitmapReceived) {
fail("Unexpected bitmap was received.");
}
bitmapReceived = true;
storedBitmap = bitmap;
notify();
}
public synchronized boolean waitForBitmap(int timeoutMs) throws InterruptedException {
if (!bitmapReceived) {
wait(timeoutMs);
}
return bitmapReceived;
}
public synchronized Bitmap resetAndGetBitmap() {
bitmapReceived = false;
return storedBitmap;
}
}
final TestFrameListener testFrameListener = new TestFrameListener();
EglRenderer eglRenderer;
CountDownLatch surfaceReadyLatch = new CountDownLatch(1);
int oesTextureId;
SurfaceTexture surfaceTexture;
@Before
public void setUp() throws Exception {
PeerConnectionFactory.initializeAndroidGlobals(
InstrumentationRegistry.getTargetContext(), true /* videoHwAcceleration */);
eglRenderer = new EglRenderer("TestRenderer: ");
eglRenderer.init(null /* sharedContext */, EglBase.CONFIG_RGBA, new GlRectDrawer());
oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
surfaceTexture = new SurfaceTexture(oesTextureId);
surfaceTexture.setDefaultBufferSize(1 /* width */, 1 /* height */);
eglRenderer.createEglSurface(surfaceTexture);
}
@After
public void tearDown() {
surfaceTexture.release();
GLES20.glDeleteTextures(1 /* n */, new int[] {oesTextureId}, 0 /* offset */);
eglRenderer.release();
}
/** Checks the bitmap is not null and the correct size. */
private static void checkBitmap(Bitmap bitmap, float scale) {
assertNotNull(bitmap);
assertEquals((int) (TEST_FRAME_WIDTH * scale), bitmap.getWidth());
assertEquals((int) (TEST_FRAME_HEIGHT * scale), bitmap.getHeight());
}
/**
* Does linear sampling on U/V plane of test data.
*
* @param data Plane data to be sampled from.
* @param planeWidth Width of the plane data. This is also assumed to be the stride.
* @param planeHeight Height of the plane data.
* @param x X-coordinate in range [0, 1].
* @param y Y-coordinate in range [0, 1].
*/
private static float linearSample(
ByteBuffer plane, int planeWidth, int planeHeight, float x, float y) {
final int stride = planeWidth;
final float coordX = x * planeWidth;
final float coordY = y * planeHeight;
int lowIndexX = (int) Math.floor(coordX - 0.5f);
int lowIndexY = (int) Math.floor(coordY - 0.5f);
int highIndexX = lowIndexX + 1;
int highIndexY = lowIndexY + 1;
final float highWeightX = coordX - lowIndexX - 0.5f;
final float highWeightY = coordY - lowIndexY - 0.5f;
final float lowWeightX = 1f - highWeightX;
final float lowWeightY = 1f - highWeightY;
// Clamp on the edges.
lowIndexX = Math.max(0, lowIndexX);
lowIndexY = Math.max(0, lowIndexY);
highIndexX = Math.min(planeWidth - 1, highIndexX);
highIndexY = Math.min(planeHeight - 1, highIndexY);
float lowYValue = (plane.get(lowIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
+ (plane.get(lowIndexY * stride + highIndexX) & 0xFF) * highWeightX;
float highYValue = (plane.get(highIndexY * stride + lowIndexX) & 0xFF) * lowWeightX
+ (plane.get(highIndexY * stride + highIndexX) & 0xFF) * highWeightX;
return (lowWeightY * lowYValue + highWeightY * highYValue) / 255f;
}
private static byte saturatedFloatToByte(float c) {
return (byte) Math.round(255f * Math.max(0f, Math.min(1f, c)));
}
/**
* Converts test data YUV frame to expected RGBA frame. Tries to match the behavior of OpenGL
* YUV drawer shader. Does linear sampling on the U- and V-planes.
*
* @param yuvFrame Array of size 3 containing Y-, U-, V-planes for image of size
* (TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT). U- and V-planes should be half the size
* of the Y-plane.
*/
private static byte[] convertYUVFrameToRGBA(ByteBuffer[] yuvFrame) {
final byte[] argbFrame = new byte[TEST_FRAME_WIDTH * TEST_FRAME_HEIGHT * 4];
final int argbStride = TEST_FRAME_WIDTH * 4;
final int yStride = TEST_FRAME_WIDTH;
final int vStride = TEST_FRAME_WIDTH / 2;
for (int y = 0; y < TEST_FRAME_HEIGHT; y++) {
for (int x = 0; x < TEST_FRAME_WIDTH; x++) {
final int x2 = x / 2;
final int y2 = y / 2;
final float yC = (yuvFrame[0].get(y * yStride + x) & 0xFF) / 255f;
final float uC = linearSample(yuvFrame[1], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
(x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
- 0.5f;
final float vC = linearSample(yuvFrame[2], TEST_FRAME_WIDTH / 2, TEST_FRAME_HEIGHT / 2,
(x + 0.5f) / TEST_FRAME_WIDTH, (y + 0.5f) / TEST_FRAME_HEIGHT)
- 0.5f;
final float rC = yC + 1.403f * vC;
final float gC = yC - 0.344f * uC - 0.714f * vC;
final float bC = yC + 1.77f * uC;
argbFrame[y * argbStride + x * 4 + 0] = saturatedFloatToByte(rC);
argbFrame[y * argbStride + x * 4 + 1] = saturatedFloatToByte(gC);
argbFrame[y * argbStride + x * 4 + 2] = saturatedFloatToByte(bC);
argbFrame[y * argbStride + x * 4 + 3] = (byte) 255;
}
}
return argbFrame;
}
/** Checks that the bitmap content matches the test frame with the given index. */
private static void checkBitmapContent(Bitmap bitmap, int frame) {
checkBitmap(bitmap, 1f);
byte[] expectedRGBA = convertYUVFrameToRGBA(TEST_FRAMES[frame]);
ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(bitmap.getByteCount());
bitmap.copyPixelsToBuffer(bitmapBuffer);
for (int i = 0; i < expectedRGBA.length; i++) {
int expected = expectedRGBA[i] & 0xFF;
int value = bitmapBuffer.get(i) & 0xFF;
// Due to unknown conversion differences check value matches +-1.
if (Math.abs(value - expected) > 1) {
Logging.d(TAG, "Expected bitmap content: " + Arrays.toString(expectedRGBA));
Logging.d(TAG, "Bitmap content: " + Arrays.toString(bitmapBuffer.array()));
fail("Frame doesn't match original frame on byte " + i + ". Expected: " + expected
+ " Result: " + value);
}
}
}
/** Tells eglRenderer to render test frame with given index. */
private void feedFrame(int i) {
eglRenderer.renderFrame(new VideoRenderer.I420Frame(TEST_FRAME_WIDTH, TEST_FRAME_HEIGHT, 0,
new int[] {TEST_FRAME_WIDTH, TEST_FRAME_WIDTH / 2, TEST_FRAME_WIDTH / 2}, TEST_FRAMES[i],
0));
}
@Test
@SmallTest
public void testAddFrameListener() throws Exception {
eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
feedFrame(0);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
assertNull(testFrameListener.resetAndGetBitmap());
eglRenderer.addFrameListener(testFrameListener, 0f /* scaleFactor */);
feedFrame(1);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
assertNull(testFrameListener.resetAndGetBitmap());
feedFrame(2);
// Check we get no more bitmaps than two.
assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
}
@Test
@SmallTest
public void testAddFrameListenerBitmap() throws Exception {
eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
feedFrame(0);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 0);
eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
feedFrame(1);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 1);
}
@Test
@SmallTest
public void testAddFrameListenerBitmapScale() throws Exception {
for (int i = 0; i < 3; ++i) {
float scale = i * 0.5f + 0.5f;
eglRenderer.addFrameListener(testFrameListener, scale);
feedFrame(i);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
checkBitmap(testFrameListener.resetAndGetBitmap(), scale);
}
}
/**
* Checks that the frame listener will not be called with a frame that was delivered before the
* frame listener was added.
*/
@Test
@SmallTest
public void testFrameListenerNotCalledWithOldFrames() throws Exception {
feedFrame(0);
eglRenderer.addFrameListener(testFrameListener, 0f);
// Check the old frame does not trigger frame listener.
assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
}
/** Checks that the frame listener will not be called after it is removed. */
@Test
@SmallTest
public void testRemoveFrameListenerNotRacy() throws Exception {
for (int i = 0; i < REMOVE_FRAME_LISTENER_RACY_NUM_TESTS; i++) {
feedFrame(0);
eglRenderer.addFrameListener(testFrameListener, 0f);
eglRenderer.removeFrameListener(testFrameListener);
feedFrame(1);
}
// Check the frame listener hasn't triggered.
assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
}
@Test
@SmallTest
public void testFrameListenersFpsReduction() throws Exception {
// Test that normal frame listeners receive frames while the renderer is paused.
eglRenderer.pauseVideo();
eglRenderer.addFrameListener(testFrameListener, 1f /* scaleFactor */);
feedFrame(0);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 0);
// Test that frame listeners with FPS reduction applied receive frames while the renderer is not
// paused.
eglRenderer.disableFpsReduction();
eglRenderer.addFrameListener(
testFrameListener, 1f /* scaleFactor */, null, true /* applyFpsReduction */);
feedFrame(1);
assertTrue(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
checkBitmapContent(testFrameListener.resetAndGetBitmap(), 1);
// Test that frame listeners with FPS reduction applied will not receive frames while the
// renderer is paused.
eglRenderer.pauseVideo();
eglRenderer.addFrameListener(
testFrameListener, 1f /* scaleFactor */, null, true /* applyFpsReduction */);
feedFrame(1);
assertFalse(testFrameListener.waitForBitmap(RENDER_WAIT_MS));
}
}

View File

@ -0,0 +1,120 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import android.os.Environment;
import android.support.test.filters.LargeTest;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import java.io.IOException;
import java.lang.Thread;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(BaseJUnit4ClassRunner.class)
public class FileVideoCapturerTest {
private static class Frame {
public byte[] data;
public int width;
public int height;
}
public class MockCapturerObserver implements VideoCapturer.CapturerObserver {
private final ArrayList<Frame> frameDatas = new ArrayList<Frame>();
@Override
public void onCapturerStarted(boolean success) {
assertTrue(success);
}
@Override
public void onCapturerStopped() {
// Empty on purpose.
}
@Override
public synchronized void onByteBufferFrameCaptured(
byte[] data, int width, int height, int rotation, long timeStamp) {
Frame frame = new Frame();
frame.data = data;
frame.width = width;
frame.height = height;
assertTrue(data.length != 0);
frameDatas.add(frame);
notify();
}
@Override
public void onTextureFrameCaptured(int width, int height, int oesTextureId,
float[] transformMatrix, int rotation, long timestamp) {
// Empty on purpose.
}
@Override
public void onFrameCaptured(VideoFrame frame) {
// Empty on purpose.
}
public synchronized ArrayList<Frame> getMinimumFramesBlocking(int minFrames)
throws InterruptedException {
while (frameDatas.size() < minFrames) {
wait();
}
return new ArrayList<Frame>(frameDatas);
}
}
@Test
@SmallTest
public void testVideoCaptureFromFile() throws InterruptedException, IOException {
final int FRAME_WIDTH = 4;
final int FRAME_HEIGHT = 4;
final FileVideoCapturer fileVideoCapturer =
new FileVideoCapturer(Environment.getExternalStorageDirectory().getPath()
+ "/chromium_tests_root/webrtc/sdk/android/instrumentationtests/src/org/webrtc/"
+ "capturetestvideo.y4m");
final MockCapturerObserver capturerObserver = new MockCapturerObserver();
fileVideoCapturer.initialize(null, null, capturerObserver);
fileVideoCapturer.startCapture(FRAME_WIDTH, FRAME_HEIGHT, 33);
final String[] expectedFrames = {
"THIS IS JUST SOME TEXT x", "THE SECOND FRAME qwerty.", "HERE IS THE THRID FRAME!"};
final ArrayList<Frame> frameDatas;
frameDatas = capturerObserver.getMinimumFramesBlocking(expectedFrames.length);
assertEquals(expectedFrames.length, frameDatas.size());
fileVideoCapturer.stopCapture();
fileVideoCapturer.dispose();
for (int i = 0; i < expectedFrames.length; ++i) {
Frame frame = frameDatas.get(i);
assertEquals(FRAME_WIDTH, frame.width);
assertEquals(FRAME_HEIGHT, frame.height);
assertEquals(FRAME_WIDTH * FRAME_HEIGHT * 3 / 2, frame.data.length);
byte[] expectedNV12Bytes = new byte[frame.data.length];
FileVideoCapturer.nativeI420ToNV21(expectedFrames[i].getBytes(Charset.forName("US-ASCII")),
FRAME_WIDTH, FRAME_HEIGHT, expectedNV12Bytes);
assertTrue(Arrays.equals(expectedNV12Bytes, frame.data));
}
}
}

View File

@ -0,0 +1,305 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import java.nio.ByteBuffer;
import java.util.Random;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(BaseJUnit4ClassRunner.class)
public class GlRectDrawerTest {
// Resolution of the test image.
private static final int WIDTH = 16;
private static final int HEIGHT = 16;
// Seed for random pixel creation.
private static final int SEED = 42;
// When comparing pixels, allow some slack for float arithmetic and integer rounding.
private static final float MAX_DIFF = 1.5f;
private static float normalizedByte(byte b) {
return (b & 0xFF) / 255.0f;
}
private static float saturatedConvert(float c) {
return 255.0f * Math.max(0, Math.min(c, 1));
}
// Assert RGB ByteBuffers are pixel perfect identical.
private static void assertByteBufferEquals(
int width, int height, ByteBuffer actual, ByteBuffer expected) {
actual.rewind();
expected.rewind();
assertEquals(actual.remaining(), width * height * 3);
assertEquals(expected.remaining(), width * height * 3);
for (int y = 0; y < height; ++y) {
for (int x = 0; x < width; ++x) {
final int actualR = actual.get() & 0xFF;
final int actualG = actual.get() & 0xFF;
final int actualB = actual.get() & 0xFF;
final int expectedR = expected.get() & 0xFF;
final int expectedG = expected.get() & 0xFF;
final int expectedB = expected.get() & 0xFF;
if (actualR != expectedR || actualG != expectedG || actualB != expectedB) {
fail("ByteBuffers of size " + width + "x" + height + " not equal at position "
+ "(" + x + ", " + y + "). Expected color (R,G,B): "
+ "(" + expectedR + ", " + expectedG + ", " + expectedB + ")"
+ " but was: "
+ "(" + actualR + ", " + actualG + ", " + actualB + ").");
}
}
}
}
// Convert RGBA ByteBuffer to RGB ByteBuffer.
private static ByteBuffer stripAlphaChannel(ByteBuffer rgbaBuffer) {
rgbaBuffer.rewind();
assertEquals(rgbaBuffer.remaining() % 4, 0);
final int numberOfPixels = rgbaBuffer.remaining() / 4;
final ByteBuffer rgbBuffer = ByteBuffer.allocateDirect(numberOfPixels * 3);
while (rgbaBuffer.hasRemaining()) {
// Copy RGB.
for (int channel = 0; channel < 3; ++channel) {
rgbBuffer.put(rgbaBuffer.get());
}
// Drop alpha.
rgbaBuffer.get();
}
return rgbBuffer;
}
@Test
@SmallTest
public void testRgbRendering() {
// Create EGL base with a pixel buffer as display output.
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(WIDTH, HEIGHT);
eglBase.makeCurrent();
// Create RGB byte buffer plane with random content.
final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
final Random random = new Random(SEED);
random.nextBytes(rgbPlane.array());
// Upload the RGB byte buffer data as a texture.
final int rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
GLES20.GL_UNSIGNED_BYTE, rgbPlane);
GlUtil.checkNoGLES2Error("glTexImage2D");
// Draw the RGB frame onto the pixel buffer.
final GlRectDrawer drawer = new GlRectDrawer();
drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
0 /* viewportY */, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
GlUtil.checkNoGLES2Error("glReadPixels");
// Assert rendered image is pixel perfect to source RGB.
assertByteBufferEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
drawer.release();
GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
eglBase.release();
}
@Test
@SmallTest
public void testYuvRendering() {
// Create EGL base with a pixel buffer as display output.
EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(WIDTH, HEIGHT);
eglBase.makeCurrent();
// Create YUV byte buffer planes with random content.
final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
final Random random = new Random(SEED);
for (int i = 0; i < 3; ++i) {
yuvPlanes[i] = ByteBuffer.allocateDirect(WIDTH * HEIGHT);
random.nextBytes(yuvPlanes[i].array());
}
// Generate 3 texture ids for Y/U/V.
final int yuvTextures[] = new int[3];
for (int i = 0; i < 3; i++) {
yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
// Upload the YUV byte buffer data as textures.
for (int i = 0; i < 3; ++i) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, WIDTH, HEIGHT, 0,
GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, yuvPlanes[i]);
GlUtil.checkNoGLES2Error("glTexImage2D");
}
// Draw the YUV frame onto the pixel buffer.
final GlRectDrawer drawer = new GlRectDrawer();
drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), WIDTH, HEIGHT, 0 /* viewportX */,
0 /* viewportY */, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, data);
GlUtil.checkNoGLES2Error("glReadPixels");
// Compare the YUV data with the RGBA result.
for (int y = 0; y < HEIGHT; ++y) {
for (int x = 0; x < WIDTH; ++x) {
// YUV color space. Y in [0, 1], UV in [-0.5, 0.5]. The constants are taken from the YUV
// fragment shader code in GlRectDrawer.
final float y_luma = normalizedByte(yuvPlanes[0].get());
final float u_chroma = normalizedByte(yuvPlanes[1].get()) - 0.5f;
final float v_chroma = normalizedByte(yuvPlanes[2].get()) - 0.5f;
// Expected color in unrounded RGB [0.0f, 255.0f].
final float expectedRed = saturatedConvert(y_luma + 1.403f * v_chroma);
final float expectedGreen =
saturatedConvert(y_luma - 0.344f * u_chroma - 0.714f * v_chroma);
final float expectedBlue = saturatedConvert(y_luma + 1.77f * u_chroma);
// Actual color in RGB8888.
final int actualRed = data.get() & 0xFF;
final int actualGreen = data.get() & 0xFF;
final int actualBlue = data.get() & 0xFF;
final int actualAlpha = data.get() & 0xFF;
// Assert rendered image is close to pixel perfect from source YUV.
assertTrue(Math.abs(actualRed - expectedRed) < MAX_DIFF);
assertTrue(Math.abs(actualGreen - expectedGreen) < MAX_DIFF);
assertTrue(Math.abs(actualBlue - expectedBlue) < MAX_DIFF);
assertEquals(actualAlpha, 255);
}
}
drawer.release();
GLES20.glDeleteTextures(3, yuvTextures, 0);
eglBase.release();
}
/**
* The purpose here is to test GlRectDrawer.oesDraw(). Unfortunately, there is no easy way to
* create an OES texture, which is needed for input to oesDraw(). Most of the test is concerned
* with creating OES textures in the following way:
* - Create SurfaceTexture with help from SurfaceTextureHelper.
* - Create an EglBase with the SurfaceTexture as EGLSurface.
* - Upload RGB texture with known content.
* - Draw the RGB texture onto the EglBase with the SurfaceTexture as target.
* - Wait for an OES texture to be produced.
* The actual oesDraw() test is this:
* - Create an EglBase with a pixel buffer as target.
* - Render the OES texture onto the pixel buffer.
* - Read back the pixel buffer and compare it with the known RGB data.
*/
@Test
@MediumTest
public void testOesRendering() throws InterruptedException {
/**
* Stub class to convert RGB ByteBuffers to OES textures by drawing onto a SurfaceTexture.
*/
class StubOesTextureProducer {
private final EglBase eglBase;
private final GlRectDrawer drawer;
private final int rgbTexture;
public StubOesTextureProducer(
EglBase.Context sharedContext, SurfaceTexture surfaceTexture, int width, int height) {
eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
surfaceTexture.setDefaultBufferSize(width, height);
eglBase.createSurface(surfaceTexture);
assertEquals(eglBase.surfaceWidth(), width);
assertEquals(eglBase.surfaceHeight(), height);
drawer = new GlRectDrawer();
eglBase.makeCurrent();
rgbTexture = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
}
public void draw(ByteBuffer rgbPlane) {
eglBase.makeCurrent();
// Upload RGB data to texture.
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, rgbTexture);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH, HEIGHT, 0, GLES20.GL_RGB,
GLES20.GL_UNSIGNED_BYTE, rgbPlane);
// Draw the RGB data onto the SurfaceTexture.
drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), WIDTH, HEIGHT,
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
eglBase.swapBuffers();
}
public void release() {
eglBase.makeCurrent();
drawer.release();
GLES20.glDeleteTextures(1, new int[] {rgbTexture}, 0);
eglBase.release();
}
}
// Create EGL base with a pixel buffer as display output.
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(WIDTH, HEIGHT);
// Create resources for generating OES textures.
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
final StubOesTextureProducer oesProducer = new StubOesTextureProducer(
eglBase.getEglBaseContext(), surfaceTextureHelper.getSurfaceTexture(), WIDTH, HEIGHT);
final SurfaceTextureHelperTest.MockTextureListener listener =
new SurfaceTextureHelperTest.MockTextureListener();
surfaceTextureHelper.startListening(listener);
// Create RGB byte buffer plane with random content.
final ByteBuffer rgbPlane = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 3);
final Random random = new Random(SEED);
random.nextBytes(rgbPlane.array());
// Draw the frame and block until an OES texture is delivered.
oesProducer.draw(rgbPlane);
listener.waitForNewFrame();
// Real test starts here.
// Draw the OES texture on the pixel buffer.
eglBase.makeCurrent();
final GlRectDrawer drawer = new GlRectDrawer();
drawer.drawOes(listener.oesTextureId, listener.transformMatrix, WIDTH, HEIGHT,
0 /* viewportX */, 0 /* viewportY */, WIDTH, HEIGHT);
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
GLES20.glReadPixels(0, 0, WIDTH, HEIGHT, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
GlUtil.checkNoGLES2Error("glReadPixels");
// Assert rendered image is pixel perfect to source RGB.
assertByteBufferEquals(WIDTH, HEIGHT, stripAlphaChannel(rgbaData), rgbPlane);
drawer.release();
surfaceTextureHelper.returnTextureFrame();
oesProducer.release();
surfaceTextureHelper.dispose();
eglBase.release();
}
}

View File

@ -0,0 +1,233 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import android.annotation.TargetApi;
import android.graphics.Matrix;
import android.support.test.filters.MediumTest;
import android.util.Log;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicReference;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit tests for {@link HardwareVideoDecoder}. */
@TargetApi(16)
@RunWith(BaseJUnit4ClassRunner.class)
public final class HardwareVideoDecoderTest {
private static final String TAG = "HardwareVideoDecoderTest";
private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
private static final boolean ENABLE_H264_HIGH_PROFILE = true;
private static final VideoDecoder.Settings SETTINGS =
new VideoDecoder.Settings(1 /* core */, 640 /* width */, 480 /* height */);
@Test
@MediumTest
public void testInitialize() {
HardwareVideoEncoderFactory encoderFactory =
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.i(TAG, "No hardware encoding support, skipping testInitialize");
return;
}
HardwareVideoDecoderFactory decoderFactory = new HardwareVideoDecoderFactory(null);
VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name);
assertEquals(decoder.initDecode(SETTINGS, null), VideoCodecStatus.OK);
assertEquals(decoder.release(), VideoCodecStatus.OK);
}
@Test
@MediumTest
public void testInitializeUsingTextures() {
HardwareVideoEncoderFactory encoderFactory =
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.i(TAG, "No hardware encoding support, skipping testInitialize");
return;
}
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
HardwareVideoDecoderFactory decoderFactory =
new HardwareVideoDecoderFactory(eglBase.getEglBaseContext());
VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name);
assertEquals(decoder.initDecode(SETTINGS, null), VideoCodecStatus.OK);
assertEquals(decoder.release(), VideoCodecStatus.OK);
eglBase.release();
}
@Test
@MediumTest
public void testDecode() throws InterruptedException {
HardwareVideoEncoderFactory encoderFactory =
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.i(TAG, "No hardware encoding support, skipping testEncodeYuvBuffer");
return;
}
// Set up the decoder.
HardwareVideoDecoderFactory decoderFactory = new HardwareVideoDecoderFactory(null);
VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name);
final long presentationTimestampUs = 20000;
final int rotation = 270;
final CountDownLatch decodeDone = new CountDownLatch(1);
final AtomicReference<VideoFrame> decoded = new AtomicReference<>();
VideoDecoder.Callback decodeCallback = new VideoDecoder.Callback() {
@Override
public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) {
frame.retain();
decoded.set(frame);
decodeDone.countDown();
}
};
assertEquals(decoder.initDecode(SETTINGS, decodeCallback), VideoCodecStatus.OK);
// Set up an encoder to produce a valid encoded frame.
VideoEncoder encoder = encoderFactory.createEncoder(supportedCodecs[0]);
final CountDownLatch encodeDone = new CountDownLatch(1);
final AtomicReference<EncodedImage> encoded = new AtomicReference<>();
VideoEncoder.Callback encodeCallback = new VideoEncoder.Callback() {
@Override
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
encoded.set(image);
encodeDone.countDown();
}
};
assertEquals(encoder.initEncode(new VideoEncoder.Settings(1, SETTINGS.width, SETTINGS.height,
300, 30, true /* automaticResizeOn */),
encodeCallback),
VideoCodecStatus.OK);
// First, encode a frame.
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
VideoFrame frame = new VideoFrame(buffer, rotation, presentationTimestampUs * 1000);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
ThreadUtils.awaitUninterruptibly(encodeDone);
// Now decode the frame.
assertEquals(
decoder.decode(encoded.get(), new VideoDecoder.DecodeInfo(false, 0)), VideoCodecStatus.OK);
ThreadUtils.awaitUninterruptibly(decodeDone);
frame = decoded.get();
assertEquals(frame.getRotation(), rotation);
assertEquals(frame.getTimestampNs(), presentationTimestampUs * 1000);
assertEquals(frame.getBuffer().getWidth(), SETTINGS.width);
assertEquals(frame.getBuffer().getHeight(), SETTINGS.height);
frame.release();
assertEquals(decoder.release(), VideoCodecStatus.OK);
assertEquals(encoder.release(), VideoCodecStatus.OK);
}
@Test
@MediumTest
public void testDecodeUsingTextures() throws InterruptedException {
HardwareVideoEncoderFactory encoderFactory =
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = encoderFactory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.i(TAG, "No hardware encoding support, skipping testEncodeYuvBuffer");
return;
}
// Set up the decoder.
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
HardwareVideoDecoderFactory decoderFactory =
new HardwareVideoDecoderFactory(eglBase.getEglBaseContext());
VideoDecoder decoder = decoderFactory.createDecoder(supportedCodecs[0].name);
final long presentationTimestampUs = 20000;
final int rotation = 270;
final CountDownLatch decodeDone = new CountDownLatch(1);
final AtomicReference<VideoFrame> decoded = new AtomicReference<>();
VideoDecoder.Callback decodeCallback = new VideoDecoder.Callback() {
@Override
public void onDecodedFrame(VideoFrame frame, Integer decodeTimeMs, Integer qp) {
frame.retain();
decoded.set(frame);
decodeDone.countDown();
}
};
assertEquals(decoder.initDecode(SETTINGS, decodeCallback), VideoCodecStatus.OK);
// Set up an encoder to produce a valid encoded frame.
VideoEncoder encoder = encoderFactory.createEncoder(supportedCodecs[0]);
final CountDownLatch encodeDone = new CountDownLatch(1);
final AtomicReference<EncodedImage> encoded = new AtomicReference<>();
VideoEncoder.Callback encodeCallback = new VideoEncoder.Callback() {
@Override
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
encoded.set(image);
encodeDone.countDown();
}
};
assertEquals(encoder.initEncode(new VideoEncoder.Settings(1, SETTINGS.width, SETTINGS.height,
300, 30, true /* automaticResizeOn */),
encodeCallback),
VideoCodecStatus.OK);
// First, encode a frame.
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
VideoFrame frame = new VideoFrame(buffer, rotation, presentationTimestampUs * 1000);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
ThreadUtils.awaitUninterruptibly(encodeDone);
// Now decode the frame.
assertEquals(
decoder.decode(encoded.get(), new VideoDecoder.DecodeInfo(false, 0)), VideoCodecStatus.OK);
ThreadUtils.awaitUninterruptibly(decodeDone);
frame = decoded.get();
assertEquals(frame.getRotation(), rotation);
assertEquals(frame.getTimestampNs(), presentationTimestampUs * 1000);
assertTrue(frame.getBuffer() instanceof VideoFrame.TextureBuffer);
VideoFrame.TextureBuffer textureBuffer = (VideoFrame.TextureBuffer) frame.getBuffer();
// TODO(mellem): Compare the matrix to whatever we expect to get back?
assertNotNull(textureBuffer.getTransformMatrix());
assertEquals(textureBuffer.getWidth(), SETTINGS.width);
assertEquals(textureBuffer.getHeight(), SETTINGS.height);
assertEquals(textureBuffer.getType(), VideoFrame.TextureBuffer.Type.OES);
assertEquals(decoder.release(), VideoCodecStatus.OK);
assertEquals(encoder.release(), VideoCodecStatus.OK);
frame.release();
eglBase.release();
}
}

View File

@ -0,0 +1,212 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import android.annotation.TargetApi;
import android.graphics.Matrix;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.support.test.filters.SmallTest;
import android.util.Log;
import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
@TargetApi(16)
@RunWith(BaseJUnit4ClassRunner.class)
public class HardwareVideoEncoderTest {
final static String TAG = "MediaCodecVideoEncoderTest";
private static final boolean ENABLE_INTEL_VP8_ENCODER = true;
private static final boolean ENABLE_H264_HIGH_PROFILE = true;
private static final VideoEncoder.Settings SETTINGS =
new VideoEncoder.Settings(1 /* core */, 640 /* width */, 480 /* height */, 300 /* kbps */,
30 /* fps */, true /* automaticResizeOn */);
@Test
@SmallTest
public void testInitializeUsingYuvBuffer() {
HardwareVideoEncoderFactory factory =
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testInitializeUsingYuvBuffer");
return;
}
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
assertEquals(VideoCodecStatus.OK, encoder.release());
}
@Test
@SmallTest
public void testInitializeUsingTextures() {
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory(
eglBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testInitializeUsingTextures");
return;
}
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
assertEquals(VideoCodecStatus.OK, encoder.initEncode(SETTINGS, null));
assertEquals(VideoCodecStatus.OK, encoder.release());
eglBase.release();
}
@Test
@SmallTest
public void testEncodeYuvBuffer() throws InterruptedException {
HardwareVideoEncoderFactory factory =
new HardwareVideoEncoderFactory(ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testEncodeYuvBuffer");
return;
}
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
final long presentationTimestampNs = 20000;
final CountDownLatch encodeDone = new CountDownLatch(1);
VideoEncoder.Callback callback = new VideoEncoder.Callback() {
@Override
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
assertTrue(image.buffer.capacity() > 0);
assertEquals(image.encodedWidth, SETTINGS.width);
assertEquals(image.encodedHeight, SETTINGS.height);
assertEquals(image.captureTimeNs, presentationTimestampNs);
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
assertEquals(image.rotation, 0);
assertTrue(image.completeFrame);
encodeDone.countDown();
}
};
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
VideoFrame.I420Buffer buffer = I420BufferImpl.allocate(SETTINGS.width, SETTINGS.height);
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
ThreadUtils.awaitUninterruptibly(encodeDone);
assertEquals(encoder.release(), VideoCodecStatus.OK);
}
@Test
@SmallTest
public void testEncodeTextures() throws InterruptedException {
final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
HardwareVideoEncoderFactory factory = new HardwareVideoEncoderFactory(
eglOesBase.getEglBaseContext(), ENABLE_INTEL_VP8_ENCODER, ENABLE_H264_HIGH_PROFILE);
VideoCodecInfo[] supportedCodecs = factory.getSupportedCodecs();
if (supportedCodecs.length == 0) {
Log.w(TAG, "No hardware encoding support, skipping testEncodeTextures");
return;
}
eglOesBase.createDummyPbufferSurface();
eglOesBase.makeCurrent();
final int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
VideoEncoder encoder = factory.createEncoder(supportedCodecs[0]);
final long presentationTimestampNs = 20000;
final CountDownLatch encodeDone = new CountDownLatch(1);
VideoEncoder.Callback callback = new VideoEncoder.Callback() {
@Override
public void onEncodedFrame(EncodedImage image, VideoEncoder.CodecSpecificInfo info) {
assertTrue(image.buffer.capacity() > 0);
assertEquals(image.encodedWidth, SETTINGS.width);
assertEquals(image.encodedHeight, SETTINGS.height);
assertEquals(image.captureTimeNs, presentationTimestampNs);
assertEquals(image.frameType, EncodedImage.FrameType.VideoFrameKey);
assertEquals(image.rotation, 0);
assertTrue(image.completeFrame);
encodeDone.countDown();
}
};
assertEquals(encoder.initEncode(SETTINGS, callback), VideoCodecStatus.OK);
VideoFrame.TextureBuffer buffer = new VideoFrame.TextureBuffer() {
@Override
public VideoFrame.TextureBuffer.Type getType() {
return VideoFrame.TextureBuffer.Type.OES;
}
@Override
public int getTextureId() {
return oesTextureId;
}
@Override
public Matrix getTransformMatrix() {
return new Matrix();
}
@Override
public int getWidth() {
return SETTINGS.width;
}
@Override
public int getHeight() {
return SETTINGS.height;
}
@Override
public VideoFrame.I420Buffer toI420() {
return null;
}
@Override
public void retain() {}
@Override
public void release() {}
@Override
public VideoFrame.Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight) {
return null;
}
};
VideoFrame frame = new VideoFrame(buffer, 0 /* rotation */, presentationTimestampNs);
VideoEncoder.EncodeInfo info = new VideoEncoder.EncodeInfo(
new EncodedImage.FrameType[] {EncodedImage.FrameType.VideoFrameKey});
assertEquals(encoder.encode(frame, info), VideoCodecStatus.OK);
GlUtil.checkNoGLES2Error("encodeTexture");
// It should be Ok to delete the texture after calling encodeTexture.
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
ThreadUtils.awaitUninterruptibly(encodeDone);
assertEquals(encoder.release(), VideoCodecStatus.OK);
eglOesBase.release();
}
}

View File

@ -0,0 +1,170 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import android.annotation.TargetApi;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import android.support.test.filters.SmallTest;
import android.util.Log;
import java.nio.ByteBuffer;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo;
@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
@RunWith(BaseJUnit4ClassRunner.class)
public class MediaCodecVideoEncoderTest {
final static String TAG = "MediaCodecVideoEncoderTest";
final static int profile = MediaCodecVideoEncoder.H264Profile.CONSTRAINED_BASELINE.getValue();
@Test
@SmallTest
public void testInitializeUsingByteBuffer() {
if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
Log.i(TAG, "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
return;
}
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, 640, 480, 300, 30, null));
encoder.release();
}
@Test
@SmallTest
public void testInitilizeUsingTextures() {
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
return;
}
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile,
640, 480, 300, 30, eglBase.getEglBaseContext()));
encoder.release();
eglBase.release();
}
@Test
@SmallTest
public void testInitializeUsingByteBufferReInitilizeUsingTextures() {
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
return;
}
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(
MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile, 640, 480, 300, 30, null));
encoder.release();
EglBase14 eglBase = new EglBase14(null, EglBase.CONFIG_PLAIN);
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile,
640, 480, 300, 30, eglBase.getEglBaseContext()));
encoder.release();
eglBase.release();
}
@Test
@SmallTest
public void testEncoderUsingByteBuffer() throws InterruptedException {
if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingByteBuffer");
return;
}
final int width = 640;
final int height = 480;
final int min_size = width * height * 3 / 2;
final long presentationTimestampUs = 2;
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile,
width, height, 300, 30, null));
ByteBuffer[] inputBuffers = encoder.getInputBuffers();
assertNotNull(inputBuffers);
assertTrue(min_size <= inputBuffers[0].capacity());
int bufferIndex;
do {
Thread.sleep(10);
bufferIndex = encoder.dequeueInputBuffer();
} while (bufferIndex == -1); // |-1| is returned when there is no buffer available yet.
assertTrue(bufferIndex >= 0);
assertTrue(bufferIndex < inputBuffers.length);
assertTrue(encoder.encodeBuffer(true, bufferIndex, min_size, presentationTimestampUs));
OutputBufferInfo info;
do {
info = encoder.dequeueOutputBuffer();
Thread.sleep(10);
} while (info == null);
assertTrue(info.index >= 0);
assertEquals(presentationTimestampUs, info.presentationTimestampUs);
assertTrue(info.buffer.capacity() > 0);
encoder.releaseOutputBuffer(info.index);
encoder.release();
}
@Test
@SmallTest
public void testEncoderUsingTextures() throws InterruptedException {
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
return;
}
final int width = 640;
final int height = 480;
final long presentationTs = 2;
final EglBase14 eglOesBase = new EglBase14(null, EglBase.CONFIG_PIXEL_BUFFER);
eglOesBase.createDummyPbufferSurface();
eglOesBase.makeCurrent();
int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
// TODO(perkj): This test is week since we don't fill the texture with valid data with correct
// width and height and verify the encoded data. Fill the OES texture and figure out a way to
// verify that the output make sense.
MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
assertTrue(encoder.initEncode(MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, profile,
width, height, 300, 30, eglOesBase.getEglBaseContext()));
assertTrue(
encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(), presentationTs));
GlUtil.checkNoGLES2Error("encodeTexture");
// It should be Ok to delete the texture after calling encodeTexture.
GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
OutputBufferInfo info = encoder.dequeueOutputBuffer();
while (info == null) {
info = encoder.dequeueOutputBuffer();
Thread.sleep(20);
}
assertTrue(info.index != -1);
assertTrue(info.buffer.capacity() > 0);
assertEquals(presentationTs, info.presentationTimestampUs);
encoder.releaseOutputBuffer(info.index);
encoder.release();
eglOesBase.release();
}
}

View File

@ -0,0 +1,291 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.webrtc.NetworkMonitorAutoDetect.ConnectionType;
import static org.webrtc.NetworkMonitorAutoDetect.ConnectivityManagerDelegate;
import static org.webrtc.NetworkMonitorAutoDetect.INVALID_NET_ID;
import static org.webrtc.NetworkMonitorAutoDetect.NetworkInformation;
import static org.webrtc.NetworkMonitorAutoDetect.NetworkState;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.Network;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.support.test.InstrumentationRegistry;
import android.support.test.annotation.UiThreadTest;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import android.support.test.rule.UiThreadTestRule;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Tests for org.webrtc.NetworkMonitor.
*
* TODO(deadbeef): These tests don't cover the interaction between
* NetworkManager.java and androidnetworkmonitor_jni.cc, which is how this
* class is used in practice in WebRTC.
*/
@SuppressLint("NewApi")
@RunWith(BaseJUnit4ClassRunner.class)
public class NetworkMonitorTest {
@Rule public UiThreadTestRule uiThreadTestRule = new UiThreadTestRule();
/**
* Listens for alerts fired by the NetworkMonitor when network status changes.
*/
private static class NetworkMonitorTestObserver implements NetworkMonitor.NetworkObserver {
private boolean receivedNotification = false;
@Override
public void onConnectionTypeChanged(ConnectionType connectionType) {
receivedNotification = true;
}
public boolean hasReceivedNotification() {
return receivedNotification;
}
public void resetHasReceivedNotification() {
receivedNotification = false;
}
}
/**
* Mocks out calls to the ConnectivityManager.
*/
private static class MockConnectivityManagerDelegate extends ConnectivityManagerDelegate {
private boolean activeNetworkExists;
private int networkType;
private int networkSubtype;
@Override
public NetworkState getNetworkState() {
return new NetworkState(activeNetworkExists, networkType, networkSubtype);
}
// Dummy implementations to avoid NullPointerExceptions in default implementations:
@Override
public long getDefaultNetId() {
return INVALID_NET_ID;
}
@Override
public Network[] getAllNetworks() {
return new Network[0];
}
@Override
public NetworkState getNetworkState(Network network) {
return new NetworkState(false, -1, -1);
}
public void setActiveNetworkExists(boolean networkExists) {
activeNetworkExists = networkExists;
}
public void setNetworkType(int networkType) {
this.networkType = networkType;
}
public void setNetworkSubtype(int networkSubtype) {
this.networkSubtype = networkSubtype;
}
}
/**
* Mocks out calls to the WifiManager.
*/
private static class MockWifiManagerDelegate
extends NetworkMonitorAutoDetect.WifiManagerDelegate {
private String wifiSSID;
@Override
public String getWifiSSID() {
return wifiSSID;
}
public void setWifiSSID(String wifiSSID) {
this.wifiSSID = wifiSSID;
}
}
// A dummy NetworkMonitorAutoDetect.Observer.
private static class TestNetworkMonitorAutoDetectObserver
implements NetworkMonitorAutoDetect.Observer {
@Override
public void onConnectionTypeChanged(ConnectionType newConnectionType) {}
@Override
public void onNetworkConnect(NetworkInformation networkInfo) {}
@Override
public void onNetworkDisconnect(long networkHandle) {}
}
private static final Object lock = new Object();
private static Handler uiThreadHandler = null;
private NetworkMonitorAutoDetect receiver;
private MockConnectivityManagerDelegate connectivityDelegate;
private MockWifiManagerDelegate wifiDelegate;
private static Handler getUiThreadHandler() {
synchronized (lock) {
if (uiThreadHandler == null) {
uiThreadHandler = new Handler(Looper.getMainLooper());
}
return uiThreadHandler;
}
}
/**
* Helper method to create a network monitor and delegates for testing.
*/
private void createTestMonitor() {
Context context = InstrumentationRegistry.getTargetContext();
NetworkMonitor.resetInstanceForTests();
NetworkMonitor.createAutoDetectorForTest();
receiver = NetworkMonitor.getAutoDetectorForTest();
assertNotNull(receiver);
connectivityDelegate = new MockConnectivityManagerDelegate();
connectivityDelegate.setActiveNetworkExists(true);
receiver.setConnectivityManagerDelegateForTests(connectivityDelegate);
wifiDelegate = new MockWifiManagerDelegate();
receiver.setWifiManagerDelegateForTests(wifiDelegate);
wifiDelegate.setWifiSSID("foo");
}
private NetworkMonitorAutoDetect.ConnectionType getCurrentConnectionType() {
final NetworkMonitorAutoDetect.NetworkState networkState = receiver.getCurrentNetworkState();
return receiver.getConnectionType(networkState);
}
@Before
public void setUp() {
ContextUtils.initialize(InstrumentationRegistry.getTargetContext());
createTestMonitor();
}
/**
* Tests that the receiver registers for connectivity intents during construction.
*/
@Test
@UiThreadTest
@SmallTest
public void testNetworkMonitorRegistersInConstructor() throws InterruptedException {
Context context = InstrumentationRegistry.getTargetContext();
NetworkMonitorAutoDetect.Observer observer = new TestNetworkMonitorAutoDetectObserver();
NetworkMonitorAutoDetect receiver = new NetworkMonitorAutoDetect(observer, context);
assertTrue(receiver.isReceiverRegisteredForTesting());
}
/**
* Tests that when there is an intent indicating a change in network connectivity, it sends a
* notification to Java observers.
*/
@Test
@UiThreadTest
@MediumTest
public void testNetworkMonitorJavaObservers() throws InterruptedException {
// Initialize the NetworkMonitor with a connection.
Intent connectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
// We shouldn't be re-notified if the connection hasn't actually changed.
NetworkMonitorTestObserver observer = new NetworkMonitorTestObserver();
NetworkMonitor.addNetworkObserver(observer);
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
assertFalse(observer.hasReceivedNotification());
// We shouldn't be notified if we're connected to non-Wifi and the Wifi SSID changes.
wifiDelegate.setWifiSSID("bar");
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
assertFalse(observer.hasReceivedNotification());
// We should be notified when we change to Wifi.
connectivityDelegate.setNetworkType(ConnectivityManager.TYPE_WIFI);
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
assertTrue(observer.hasReceivedNotification());
observer.resetHasReceivedNotification();
// We should be notified when the Wifi SSID changes.
wifiDelegate.setWifiSSID("foo");
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
assertTrue(observer.hasReceivedNotification());
observer.resetHasReceivedNotification();
// We shouldn't be re-notified if the Wifi SSID hasn't actually changed.
receiver.onReceive(InstrumentationRegistry.getTargetContext(), connectivityIntent);
assertFalse(observer.hasReceivedNotification());
// Mimic that connectivity has been lost and ensure that the observer gets the notification.
connectivityDelegate.setActiveNetworkExists(false);
Intent noConnectivityIntent = new Intent(ConnectivityManager.CONNECTIVITY_ACTION);
receiver.onReceive(InstrumentationRegistry.getTargetContext(), noConnectivityIntent);
assertTrue(observer.hasReceivedNotification());
}
/**
* Tests that ConnectivityManagerDelegate doesn't crash. This test cannot rely on having any
* active network connections so it cannot usefully check results, but it can at least check
* that the functions don't crash.
*/
@Test
@UiThreadTest
@SmallTest
public void testConnectivityManagerDelegateDoesNotCrash() {
ConnectivityManagerDelegate delegate =
new ConnectivityManagerDelegate(InstrumentationRegistry.getTargetContext());
delegate.getNetworkState();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
Network[] networks = delegate.getAllNetworks();
if (networks.length >= 1) {
delegate.getNetworkState(networks[0]);
delegate.hasInternetCapability(networks[0]);
}
delegate.getDefaultNetId();
}
}
/**
* Tests that NetworkMonitorAutoDetect queryable APIs don't crash. This test cannot rely
* on having any active network connections so it cannot usefully check results, but it can at
* least check that the functions don't crash.
*/
@Test
@UiThreadTest
@SmallTest
public void testQueryableAPIsDoNotCrash() {
NetworkMonitorAutoDetect.Observer observer = new TestNetworkMonitorAutoDetectObserver();
NetworkMonitorAutoDetect ncn =
new NetworkMonitorAutoDetect(observer, InstrumentationRegistry.getTargetContext());
ncn.getDefaultNetId();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,200 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.webrtc.RendererCommon.ScalingType.*;
import static org.webrtc.RendererCommon.getDisplaySize;
import static org.webrtc.RendererCommon.getLayoutMatrix;
import static org.webrtc.RendererCommon.rotateTextureMatrix;
import android.graphics.Point;
import android.support.test.filters.SmallTest;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(BaseJUnit4ClassRunner.class)
public class RendererCommonTest {
@Test
@SmallTest
public void testDisplaySizeNoFrame() {
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 0, 0));
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 0, 0));
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 0, 0));
}
@Test
@SmallTest
public void testDisplaySizeDegenerateAspectRatio() {
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 1280, 720));
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 1280, 720));
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 1280, 720));
}
@Test
@SmallTest
public void testZeroDisplaySize() {
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 0, 0));
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 0, 0));
assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 0, 0));
}
@Test
@SmallTest
public void testDisplaySizePerfectFit() {
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 1280, 720));
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 1280, 720));
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 1280, 720));
assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 720, 1280));
assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 720, 1280));
assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 720, 1280));
}
@Test
@SmallTest
public void testLandscapeVideoInPortraitDisplay() {
assertEquals(new Point(720, 405), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 720, 1280));
assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 720, 1280));
assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 720, 1280));
}
@Test
@SmallTest
public void testPortraitVideoInLandscapeDisplay() {
assertEquals(new Point(405, 720), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 1280, 720));
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 1280, 720));
assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 1280, 720));
}
@Test
@SmallTest
public void testFourToThreeVideoInSixteenToNineDisplay() {
assertEquals(new Point(960, 720), getDisplaySize(SCALE_ASPECT_FIT, 4.0f / 3, 1280, 720));
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 4.0f / 3, 1280, 720));
assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 4.0f / 3, 1280, 720));
}
// Only keep 2 rounded decimals to make float comparison robust.
private static double[] round(float[] array) {
assertEquals(16, array.length);
final double[] doubleArray = new double[16];
for (int i = 0; i < 16; ++i) {
doubleArray[i] = Math.round(100 * array[i]) / 100.0;
}
return doubleArray;
}
// Brief summary about matrix transformations:
// A coordinate p = [u, v, 0, 1] is transformed by matrix m like this p' = [u', v', 0, 1] = m * p.
// OpenGL uses column-major order, so:
// u' = u * m[0] + v * m[4] + m[12].
// v' = u * m[1] + v * m[5] + m[13].
@Test
@SmallTest
public void testLayoutMatrixDefault() {
final float layoutMatrix[] = getLayoutMatrix(false, 1.0f, 1.0f);
// Assert:
// u' = u.
// v' = v.
// clang-format off
assertArrayEquals(new double[] {
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1}, round(layoutMatrix), 0.0);
// clang-format on
}
@Test
@SmallTest
public void testLayoutMatrixMirror() {
final float layoutMatrix[] = getLayoutMatrix(true, 1.0f, 1.0f);
// Assert:
// u' = 1 - u.
// v' = v.
// clang-format off
assertArrayEquals(new double[] {
-1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
1, 0, 0, 1}, round(layoutMatrix), 0.0);
// clang-format on
}
@Test
@SmallTest
public void testLayoutMatrixScale() {
// Video has aspect ratio 2, but layout is square. This will cause only the center part of the
// video to be visible, i.e. the u coordinate will go from 0.25 to 0.75 instead of from 0 to 1.
final float layoutMatrix[] = getLayoutMatrix(false, 2.0f, 1.0f);
// Assert:
// u' = 0.25 + 0.5 u.
// v' = v.
// clang-format off
assertArrayEquals(new double[] {
0.5, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0.25, 0, 0, 1}, round(layoutMatrix), 0.0);
// clang-format on
}
@Test
@SmallTest
public void testRotateTextureMatrixDefault() {
// Test that rotation with 0 degrees returns an identical matrix.
// clang-format off
final float[] matrix = new float[] {
1, 2, 3, 4,
5, 6, 7, 8,
9, 0, 1, 2,
3, 4, 5, 6
};
// clang-format on
final float rotatedMatrix[] = rotateTextureMatrix(matrix, 0);
assertArrayEquals(round(matrix), round(rotatedMatrix), 0.0);
}
@Test
@SmallTest
public void testRotateTextureMatrix90Deg() {
final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 90);
// Assert:
// u' = 1 - v.
// v' = u.
// clang-format off
assertArrayEquals(new double[] {
0, 1, 0, 0,
-1, 0, 0, 0,
0, 0, 1, 0,
1, 0, 0, 1}, round(samplingMatrix), 0.0);
// clang-format on
}
@Test
@SmallTest
public void testRotateTextureMatrix180Deg() {
final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 180);
// Assert:
// u' = 1 - u.
// v' = 1 - v.
// clang-format off
assertArrayEquals(new double[] {
-1, 0, 0, 0,
0, -1, 0, 0,
0, 0, 1, 0,
1, 1, 0, 1}, round(samplingMatrix), 0.0);
// clang-format on
}
}

View File

@ -0,0 +1,506 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.os.SystemClock;
import android.support.test.filters.MediumTest;
import android.support.test.filters.SmallTest;
import java.nio.ByteBuffer;
import java.util.concurrent.CountDownLatch;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(BaseJUnit4ClassRunner.class)
public class SurfaceTextureHelperTest {
/**
* Mock texture listener with blocking wait functionality.
*/
public static final class MockTextureListener
implements SurfaceTextureHelper.OnTextureFrameAvailableListener {
public int oesTextureId;
public float[] transformMatrix;
private boolean hasNewFrame = false;
// Thread where frames are expected to be received on.
private final Thread expectedThread;
MockTextureListener() {
this.expectedThread = null;
}
MockTextureListener(Thread expectedThread) {
this.expectedThread = expectedThread;
}
@Override
public synchronized void onTextureFrameAvailable(
int oesTextureId, float[] transformMatrix, long timestampNs) {
if (expectedThread != null && Thread.currentThread() != expectedThread) {
throw new IllegalStateException("onTextureFrameAvailable called on wrong thread.");
}
this.oesTextureId = oesTextureId;
this.transformMatrix = transformMatrix;
hasNewFrame = true;
notifyAll();
}
/**
* Wait indefinitely for a new frame.
*/
public synchronized void waitForNewFrame() throws InterruptedException {
while (!hasNewFrame) {
wait();
}
hasNewFrame = false;
}
/**
* Wait for a new frame, or until the specified timeout elapses. Returns true if a new frame was
* received before the timeout.
*/
public synchronized boolean waitForNewFrame(final long timeoutMs) throws InterruptedException {
final long startTimeMs = SystemClock.elapsedRealtime();
long timeRemainingMs = timeoutMs;
while (!hasNewFrame && timeRemainingMs > 0) {
wait(timeRemainingMs);
final long elapsedTimeMs = SystemClock.elapsedRealtime() - startTimeMs;
timeRemainingMs = timeoutMs - elapsedTimeMs;
}
final boolean didReceiveFrame = hasNewFrame;
hasNewFrame = false;
return didReceiveFrame;
}
}
/** Assert that two integers are close, with difference at most
* {@code threshold}. */
public static void assertClose(int threshold, int expected, int actual) {
if (Math.abs(expected - actual) <= threshold)
return;
fail("Not close enough, threshold " + threshold + ". Expected: " + expected + " Actual: "
+ actual);
}
/**
* Test normal use by receiving three uniform texture frames. Texture frames are returned as early
* as possible. The texture pixel values are inspected by drawing the texture frame to a pixel
* buffer and reading it back with glReadPixels().
*/
@Test
@MediumTest
public void testThreeConstantColorFrames() throws InterruptedException {
final int width = 16;
final int height = 16;
// Create EGL base with a pixel buffer as display output.
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(width, height);
final GlRectDrawer drawer = new GlRectDrawer();
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface.
final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglOesBase.surfaceWidth(), width);
assertEquals(eglOesBase.surfaceHeight(), height);
final int red[] = new int[] {79, 144, 185};
final int green[] = new int[] {66, 210, 162};
final int blue[] = new int[] {161, 117, 158};
// Draw three frames.
for (int i = 0; i < 3; ++i) {
// Draw a constant color frame onto the SurfaceTexture.
eglOesBase.makeCurrent();
GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
eglOesBase.swapBuffers();
// Wait for an OES texture to arrive and draw it onto the pixel buffer.
listener.waitForNewFrame();
eglBase.makeCurrent();
drawer.drawOes(
listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
surfaceTextureHelper.returnTextureFrame();
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g.
// Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
GlUtil.checkNoGLES2Error("glReadPixels");
// Assert rendered image is expected constant color.
while (rgbaData.hasRemaining()) {
assertEquals(rgbaData.get() & 0xFF, red[i]);
assertEquals(rgbaData.get() & 0xFF, green[i]);
assertEquals(rgbaData.get() & 0xFF, blue[i]);
assertEquals(rgbaData.get() & 0xFF, 255);
}
}
drawer.release();
surfaceTextureHelper.dispose();
eglBase.release();
}
/**
* Test disposing the SurfaceTextureHelper while holding a pending texture frame. The pending
* texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
* buffer and reading it back with glReadPixels().
*/
@Test
@MediumTest
public void testLateReturnFrame() throws InterruptedException {
final int width = 16;
final int height = 16;
// Create EGL base with a pixel buffer as display output.
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PIXEL_BUFFER);
eglBase.createPbufferSurface(width, height);
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
// Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface.
final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.CONFIG_PLAIN);
eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglOesBase.surfaceWidth(), width);
assertEquals(eglOesBase.surfaceHeight(), height);
final int red = 79;
final int green = 66;
final int blue = 161;
// Draw a constant color frame onto the SurfaceTexture.
eglOesBase.makeCurrent();
GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
eglOesBase.swapBuffers();
eglOesBase.release();
// Wait for OES texture frame.
listener.waitForNewFrame();
// Diconnect while holding the frame.
surfaceTextureHelper.dispose();
// Draw the pending texture frame onto the pixel buffer.
eglBase.makeCurrent();
final GlRectDrawer drawer = new GlRectDrawer();
drawer.drawOes(
listener.oesTextureId, listener.transformMatrix, width, height, 0, 0, width, height);
drawer.release();
// Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
GlUtil.checkNoGLES2Error("glReadPixels");
eglBase.release();
// Assert rendered image is expected constant color.
while (rgbaData.hasRemaining()) {
assertEquals(rgbaData.get() & 0xFF, red);
assertEquals(rgbaData.get() & 0xFF, green);
assertEquals(rgbaData.get() & 0xFF, blue);
assertEquals(rgbaData.get() & 0xFF, 255);
}
// Late frame return after everything has been disposed and released.
surfaceTextureHelper.returnTextureFrame();
}
/**
* Test disposing the SurfaceTextureHelper, but keep trying to produce more texture frames. No
* frames should be delivered to the listener.
*/
@Test
@MediumTest
public void testDispose() throws InterruptedException {
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener);
// Create EglBase with the SurfaceTexture as target EGLSurface.
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
eglBase.makeCurrent();
// Assert no frame has been received yet.
assertFalse(listener.waitForNewFrame(1));
// Draw and wait for one frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
eglBase.swapBuffers();
listener.waitForNewFrame();
surfaceTextureHelper.returnTextureFrame();
// Dispose - we should not receive any textures after this.
surfaceTextureHelper.dispose();
// Draw one frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
eglBase.swapBuffers();
// swapBuffers() should not trigger onTextureFrameAvailable() because disposed has been called.
// Assert that no OES texture was delivered.
assertFalse(listener.waitForNewFrame(500));
eglBase.release();
}
/**
* Test disposing the SurfaceTextureHelper immediately after is has been setup to use a
* shared context. No frames should be delivered to the listener.
*/
@Test
@SmallTest
public void testDisposeImmediately() {
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
surfaceTextureHelper.dispose();
}
/**
* Call stopListening(), but keep trying to produce more texture frames. No frames should be
* delivered to the listener.
*/
@Test
@MediumTest
public void testStopListening() throws InterruptedException {
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener);
// Create EglBase with the SurfaceTexture as target EGLSurface.
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
eglBase.makeCurrent();
// Assert no frame has been received yet.
assertFalse(listener.waitForNewFrame(1));
// Draw and wait for one frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
eglBase.swapBuffers();
listener.waitForNewFrame();
surfaceTextureHelper.returnTextureFrame();
// Stop listening - we should not receive any textures after this.
surfaceTextureHelper.stopListening();
// Draw one frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
eglBase.swapBuffers();
// swapBuffers() should not trigger onTextureFrameAvailable() because disposed has been called.
// Assert that no OES texture was delivered.
assertFalse(listener.waitForNewFrame(500));
surfaceTextureHelper.dispose();
eglBase.release();
}
/**
* Test stopListening() immediately after the SurfaceTextureHelper has been setup.
*/
@Test
@SmallTest
public void testStopListeningImmediately() throws InterruptedException {
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.stopListening();
surfaceTextureHelper.dispose();
}
/**
* Test stopListening() immediately after the SurfaceTextureHelper has been setup on the handler
* thread.
*/
@Test
@SmallTest
public void testStopListeningImmediatelyOnHandlerThread() throws InterruptedException {
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener = new MockTextureListener();
final CountDownLatch stopListeningBarrier = new CountDownLatch(1);
final CountDownLatch stopListeningBarrierDone = new CountDownLatch(1);
// Start by posting to the handler thread to keep it occupied.
surfaceTextureHelper.getHandler().post(new Runnable() {
@Override
public void run() {
ThreadUtils.awaitUninterruptibly(stopListeningBarrier);
surfaceTextureHelper.stopListening();
stopListeningBarrierDone.countDown();
}
});
// startListening() is asynchronous and will post to the occupied handler thread.
surfaceTextureHelper.startListening(listener);
// Wait for stopListening() to be called on the handler thread.
stopListeningBarrier.countDown();
stopListeningBarrierDone.await();
// Wait until handler thread is idle to try to catch late startListening() call.
final CountDownLatch barrier = new CountDownLatch(1);
surfaceTextureHelper.getHandler().post(new Runnable() {
@Override
public void run() {
barrier.countDown();
}
});
ThreadUtils.awaitUninterruptibly(barrier);
// Previous startListening() call should never have taken place and it should be ok to call it
// again.
surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.dispose();
}
/**
* Test calling startListening() with a new listener after stopListening() has been called.
*/
@Test
@MediumTest
public void testRestartListeningWithNewListener() throws InterruptedException {
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper =
SurfaceTextureHelper.create("SurfaceTextureHelper test" /* threadName */, null);
final MockTextureListener listener1 = new MockTextureListener();
surfaceTextureHelper.startListening(listener1);
// Create EglBase with the SurfaceTexture as target EGLSurface.
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
eglBase.makeCurrent();
// Assert no frame has been received yet.
assertFalse(listener1.waitForNewFrame(1));
// Draw and wait for one frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
eglBase.swapBuffers();
listener1.waitForNewFrame();
surfaceTextureHelper.returnTextureFrame();
// Stop listening - |listener1| should not receive any textures after this.
surfaceTextureHelper.stopListening();
// Connect different listener.
final MockTextureListener listener2 = new MockTextureListener();
surfaceTextureHelper.startListening(listener2);
// Assert no frame has been received yet.
assertFalse(listener2.waitForNewFrame(1));
// Draw one frame.
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
eglBase.swapBuffers();
// Check that |listener2| received the frame, and not |listener1|.
listener2.waitForNewFrame();
assertFalse(listener1.waitForNewFrame(1));
surfaceTextureHelper.returnTextureFrame();
surfaceTextureHelper.dispose();
eglBase.release();
}
@Test
@MediumTest
public void testTexturetoYUV() throws InterruptedException {
final int width = 16;
final int height = 16;
final EglBase eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
// Create SurfaceTextureHelper and listener.
final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(
"SurfaceTextureHelper test" /* threadName */, eglBase.getEglBaseContext());
final MockTextureListener listener = new MockTextureListener();
surfaceTextureHelper.startListening(listener);
surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);
// Create resources for stubbing an OES texture producer. |eglBase| has the SurfaceTexture in
// |surfaceTextureHelper| as the target EGLSurface.
eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
assertEquals(eglBase.surfaceWidth(), width);
assertEquals(eglBase.surfaceHeight(), height);
final int red[] = new int[] {79, 144, 185};
final int green[] = new int[] {66, 210, 162};
final int blue[] = new int[] {161, 117, 158};
final int ref_y[] = new int[] {81, 180, 168};
final int ref_u[] = new int[] {173, 93, 122};
final int ref_v[] = new int[] {127, 103, 140};
// Draw three frames.
for (int i = 0; i < 3; ++i) {
// Draw a constant color frame onto the SurfaceTexture.
eglBase.makeCurrent();
GLES20.glClearColor(red[i] / 255.0f, green[i] / 255.0f, blue[i] / 255.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
// swapBuffers() will ultimately trigger onTextureFrameAvailable().
eglBase.swapBuffers();
// Wait for an OES texture to arrive.
listener.waitForNewFrame();
// Memory layout: Lines are 16 bytes. First 16 lines are
// the Y data. These are followed by 8 lines with 8 bytes of U
// data on the left and 8 bytes of V data on the right.
//
// Offset
// 0 YYYYYYYY YYYYYYYY
// 16 YYYYYYYY YYYYYYYY
// ...
// 240 YYYYYYYY YYYYYYYY
// 256 UUUUUUUU VVVVVVVV
// 272 UUUUUUUU VVVVVVVV
// ...
// 368 UUUUUUUU VVVVVVVV
// 384 buffer end
ByteBuffer buffer = ByteBuffer.allocateDirect(width * height * 3 / 2);
surfaceTextureHelper.textureToYUV(
buffer, width, height, width, listener.oesTextureId, listener.transformMatrix);
surfaceTextureHelper.returnTextureFrame();
// Allow off-by-one differences due to different rounding.
while (buffer.position() < width * height) {
assertClose(1, buffer.get() & 0xff, ref_y[i]);
}
while (buffer.hasRemaining()) {
if (buffer.position() % width < width / 2)
assertClose(1, buffer.get() & 0xff, ref_u[i]);
else
assertClose(1, buffer.get() & 0xff, ref_v[i]);
}
}
surfaceTextureHelper.dispose();
eglBase.release();
}
}

View File

@ -0,0 +1,235 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import android.annotation.SuppressLint;
import android.graphics.Point;
import android.support.test.InstrumentationRegistry;
import android.support.test.annotation.UiThreadTest;
import android.support.test.filters.MediumTest;
import android.support.test.rule.UiThreadTestRule;
import android.view.View.MeasureSpec;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(BaseJUnit4ClassRunner.class)
public class SurfaceViewRendererOnMeasureTest {
@Rule public UiThreadTestRule uiThreadTestRule = new UiThreadTestRule();
/**
* List with all possible scaling types.
*/
private static final List<RendererCommon.ScalingType> scalingTypes = Arrays.asList(
RendererCommon.ScalingType.SCALE_ASPECT_FIT, RendererCommon.ScalingType.SCALE_ASPECT_FILL,
RendererCommon.ScalingType.SCALE_ASPECT_BALANCED);
/**
* List with MeasureSpec modes.
*/
private static final List<Integer> measureSpecModes =
Arrays.asList(MeasureSpec.EXACTLY, MeasureSpec.AT_MOST);
/**
* Returns a dummy YUV frame.
*/
static VideoRenderer.I420Frame createFrame(int width, int height, int rotationDegree) {
final int[] yuvStrides = new int[] {width, (width + 1) / 2, (width + 1) / 2};
final int[] yuvHeights = new int[] {height, (height + 1) / 2, (height + 1) / 2};
final ByteBuffer[] yuvPlanes = new ByteBuffer[3];
for (int i = 0; i < 3; ++i) {
yuvPlanes[i] = ByteBuffer.allocateDirect(yuvStrides[i] * yuvHeights[i]);
}
return new VideoRenderer.I420Frame(width, height, rotationDegree, yuvStrides, yuvPlanes, 0);
}
/**
* Assert onMeasure() with given parameters will result in expected measured size.
*/
@SuppressLint("WrongCall")
private static void assertMeasuredSize(SurfaceViewRenderer surfaceViewRenderer,
RendererCommon.ScalingType scalingType, String frameDimensions, int expectedWidth,
int expectedHeight, int widthSpec, int heightSpec) {
surfaceViewRenderer.setScalingType(scalingType);
surfaceViewRenderer.onMeasure(widthSpec, heightSpec);
final int measuredWidth = surfaceViewRenderer.getMeasuredWidth();
final int measuredHeight = surfaceViewRenderer.getMeasuredHeight();
if (measuredWidth != expectedWidth || measuredHeight != expectedHeight) {
fail("onMeasure(" + MeasureSpec.toString(widthSpec) + ", " + MeasureSpec.toString(heightSpec)
+ ")"
+ " with scaling type " + scalingType + " and frame: " + frameDimensions
+ " expected measured size " + expectedWidth + "x" + expectedHeight + ", but was "
+ measuredWidth + "x" + measuredHeight);
}
}
/**
* Test how SurfaceViewRenderer.onMeasure() behaves when no frame has been delivered.
*/
@Test
@UiThreadTest
@MediumTest
public void testNoFrame() {
final SurfaceViewRenderer surfaceViewRenderer =
new SurfaceViewRenderer(InstrumentationRegistry.getContext());
final String frameDimensions = "null";
// Test behaviour before SurfaceViewRenderer.init() is called.
for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) {
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
zeroMeasureSize);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
MeasureSpec.makeMeasureSpec(720, measureSpecMode));
}
}
// Test behaviour after SurfaceViewRenderer.init() is called, but still no frame.
surfaceViewRenderer.init((EglBase.Context) null, null);
for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) {
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0, zeroMeasureSize,
zeroMeasureSize);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 1280, 720,
MeasureSpec.makeMeasureSpec(1280, measureSpecMode),
MeasureSpec.makeMeasureSpec(720, measureSpecMode));
}
}
surfaceViewRenderer.release();
}
/**
* Test how SurfaceViewRenderer.onMeasure() behaves with a 1280x720 frame.
*/
@Test
@UiThreadTest
@MediumTest
public void testFrame1280x720() throws InterruptedException {
final SurfaceViewRenderer surfaceViewRenderer =
new SurfaceViewRenderer(InstrumentationRegistry.getContext());
/**
* Mock renderer events with blocking wait functionality for frame size changes.
*/
class MockRendererEvents implements RendererCommon.RendererEvents {
private int frameWidth;
private int frameHeight;
private int rotation;
public synchronized void waitForFrameSize(int frameWidth, int frameHeight, int rotation)
throws InterruptedException {
while (this.frameWidth != frameWidth || this.frameHeight != frameHeight
|| this.rotation != rotation) {
wait();
}
}
public void onFirstFrameRendered() {}
public synchronized void onFrameResolutionChanged(
int frameWidth, int frameHeight, int rotation) {
this.frameWidth = frameWidth;
this.frameHeight = frameHeight;
this.rotation = rotation;
notifyAll();
}
}
final MockRendererEvents rendererEvents = new MockRendererEvents();
surfaceViewRenderer.init((EglBase.Context) null, rendererEvents);
// Test different rotation degress, but same rotated size.
for (int rotationDegree : new int[] {0, 90, 180, 270}) {
final int rotatedWidth = 1280;
final int rotatedHeight = 720;
final int unrotatedWidth = (rotationDegree % 180 == 0 ? rotatedWidth : rotatedHeight);
final int unrotatedHeight = (rotationDegree % 180 == 0 ? rotatedHeight : rotatedWidth);
final VideoRenderer.I420Frame frame =
createFrame(unrotatedWidth, unrotatedHeight, rotationDegree);
assertEquals(rotatedWidth, frame.rotatedWidth());
assertEquals(rotatedHeight, frame.rotatedHeight());
final String frameDimensions =
unrotatedWidth + "x" + unrotatedHeight + " with rotation " + rotationDegree;
surfaceViewRenderer.renderFrame(frame);
rendererEvents.waitForFrameSize(unrotatedWidth, unrotatedHeight, rotationDegree);
// Test forcing to zero size.
for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) {
final int zeroMeasureSize = MeasureSpec.makeMeasureSpec(0, measureSpecMode);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 0, 0,
zeroMeasureSize, zeroMeasureSize);
}
}
// Test perfect fit.
for (RendererCommon.ScalingType scalingType : scalingTypes) {
for (int measureSpecMode : measureSpecModes) {
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, rotatedWidth,
rotatedHeight, MeasureSpec.makeMeasureSpec(rotatedWidth, measureSpecMode),
MeasureSpec.makeMeasureSpec(rotatedHeight, measureSpecMode));
}
}
// Force spec size with different aspect ratio than frame aspect ratio.
for (RendererCommon.ScalingType scalingType : scalingTypes) {
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, 720, 1280,
MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY));
}
final float videoAspectRatio = (float) rotatedWidth / rotatedHeight;
{
// Relax both width and height constraints.
final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
for (RendererCommon.ScalingType scalingType : scalingTypes) {
final Point expectedSize =
RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
expectedSize.y, widthSpec, heightSpec);
}
}
{
// Force width to 720, but relax height constraint. This will give the same result as
// above, because width is already the limiting factor and will be maxed out.
final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.EXACTLY);
final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.AT_MOST);
for (RendererCommon.ScalingType scalingType : scalingTypes) {
final Point expectedSize =
RendererCommon.getDisplaySize(scalingType, videoAspectRatio, 720, 1280);
assertMeasuredSize(surfaceViewRenderer, scalingType, frameDimensions, expectedSize.x,
expectedSize.y, widthSpec, heightSpec);
}
}
{
// Force height, but relax width constraint. This will force a bad layout size.
final int widthSpec = MeasureSpec.makeMeasureSpec(720, MeasureSpec.AT_MOST);
final int heightSpec = MeasureSpec.makeMeasureSpec(1280, MeasureSpec.EXACTLY);
for (RendererCommon.ScalingType scalingType : scalingTypes) {
assertMeasuredSize(
surfaceViewRenderer, scalingType, frameDimensions, 720, 1280, widthSpec, heightSpec);
}
}
}
surfaceViewRenderer.release();
}
}

View File

@ -0,0 +1,85 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import static org.junit.Assert.assertEquals;
import android.os.Environment;
import android.support.test.filters.SmallTest;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.lang.Thread;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Random;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(BaseJUnit4ClassRunner.class)
public class VideoFileRendererTest {
@Test
@SmallTest
public void testYuvRenderingToFile() throws InterruptedException, IOException {
EglBase eglBase = EglBase.create();
final String videoOutPath = Environment.getExternalStorageDirectory().getPath()
+ "/chromium_tests_root/testvideoout.y4m";
int frameWidth = 4;
int frameHeight = 4;
VideoFileRenderer videoFileRenderer =
new VideoFileRenderer(videoOutPath, frameWidth, frameHeight, eglBase.getEglBaseContext());
String[] frames = {
"THIS IS JUST SOME TEXT x", "THE SECOND FRAME qwerty.", "HERE IS THE THRID FRAME!"};
for (String frameStr : frames) {
int[] planeSizes = {
frameWidth * frameWidth, frameWidth * frameHeight / 4, frameWidth * frameHeight / 4};
byte[] frameBytes = frameStr.getBytes(Charset.forName("US-ASCII"));
ByteBuffer[] yuvPlanes = new ByteBuffer[3];
int pos = 0;
for (int i = 0; i < 3; i++) {
yuvPlanes[i] = ByteBuffer.allocateDirect(planeSizes[i]);
yuvPlanes[i].put(frameBytes, pos, planeSizes[i]);
pos += planeSizes[i];
}
int[] yuvStrides = {frameWidth, frameWidth / 2, frameWidth / 2};
VideoRenderer.I420Frame frame =
new VideoRenderer.I420Frame(frameWidth, frameHeight, 0, yuvStrides, yuvPlanes, 0);
videoFileRenderer.renderFrame(frame);
}
videoFileRenderer.release();
RandomAccessFile writtenFile = new RandomAccessFile(videoOutPath, "r");
try {
int length = (int) writtenFile.length();
byte[] data = new byte[length];
writtenFile.readFully(data);
String fileContent = new String(data, Charset.forName("US-ASCII"));
String expected = "YUV4MPEG2 C420 W4 H4 Ip F30:1 A1:1\n"
+ "FRAME\n"
+ "THIS IS JUST SOME TEXT xFRAME\n"
+ "THE SECOND FRAME qwerty.FRAME\n"
+ "HERE IS THE THRID FRAME!";
assertEquals(expected, fileContent);
} finally {
writtenFile.close();
}
new File(videoOutPath).delete();
}
}

View File

@ -0,0 +1,34 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.SmallTest;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.webrtc.PeerConnectionFactory;
// This test is intended to run on ARM and catch LoadLibrary errors when we load the WebRTC
// JNI. It can't really be setting up calls since ARM emulators are too slow, but instantiating
// a peer connection isn't timing-sensitive, so we can at least do that.
@RunWith(AndroidJUnit4.class)
public class WebRtcJniBootTest {
@Test
@SmallTest
public void testJniLoadsWithoutError() throws InterruptedException {
PeerConnectionFactory.initializeAndroidGlobals(InstrumentationRegistry.getTargetContext(),
false /* videoCodecHwAcceleration */);
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
new PeerConnectionFactory(options);
}
}

View File

@ -0,0 +1,5 @@
YUV4MPEG2 C420 W4 H4 Ip F30:1 A1:1
FRAME
THIS IS JUST SOME TEXT xFRAME
THE SECOND FRAME qwerty.FRAME
HERE IS THE THRID FRAME!