Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1,30 @@
<!--
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
-->
<manifest
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="org.appspot.apprtc.test">
<uses-permission android:name="android.permission.RUN_INSTRUMENTATION" />
<uses-sdk android:minSdkVersion="13" android:targetSdkVersion="21" />
<application>
<uses-library android:name="android.test.runner" />
</application>
<!-- tools:ignore needed for chromium-junit4 tag. crbug.com/640116
TODO(sakal): Remove once the tag is no longer needed. -->
<instrumentation android:name="android.support.test.runner.AndroidJUnitRunner"
tools:ignore="MissingPrefix"
android:targetPackage="org.appspot.apprtc"
android:label="Tests for AppRTCMobile"
chromium-junit4="true"/>
</manifest>

View File

@ -0,0 +1,3 @@
sakal@webrtc.org
per-file *.py=kjellander@webrtc.org

View File

@ -0,0 +1,14 @@
This directory contains an example unit test for Android AppRTCMobile.
Example of building & using the app:
- Build Android AppRTCMobile and AppRTCMobile unit test:
cd <path/to/webrtc>/src
ninja -C out/Debug AppRTCMobileTest
- Install AppRTCMobile and AppRTCMobileTest:
adb install -r out/Debug/apks/AppRTCMobile.apk
adb install -r out/Debug/apks/AppRTCMobileTest.apk
- Run unit tests:
adb shell am instrument -w org.appspot.apprtc.test/android.test.InstrumentationTestRunner

View File

@ -0,0 +1,18 @@
# This file is used to override default values used by the Ant build system.
#
# This file must be checked into Version Control Systems, as it is
# integral to the build system of your project.
# This file is only used by the Ant script.
# You can use this to override default values such as
# 'source.dir' for the location of your java source folder and
# 'out.dir' for the location of your output folder.
# You can also use it define how the release builds are signed by declaring
# the following properties:
# 'key.store' for the location of your keystore and
# 'key.alias' for the name of the key to use.
# The password will be asked during the build when you use the 'release' target.
tested.project.dir=../android

View File

@ -0,0 +1,92 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="AppRTCMobileTest" default="help">
<!-- The local.properties file is created and updated by the 'android' tool.
It contains the path to the SDK. It should *NOT* be checked into
Version Control Systems. -->
<property file="local.properties" />
<!-- The ant.properties file can be created by you. It is only edited by the
'android' tool to add properties to it.
This is the place to change some Ant specific build properties.
Here are some properties you may want to change/update:
source.dir
The name of the source directory. Default is 'src'.
out.dir
The name of the output directory. Default is 'bin'.
For other overridable properties, look at the beginning of the rules
files in the SDK, at tools/ant/build.xml
Properties related to the SDK location or the project target should
be updated using the 'android' tool with the 'update' action.
This file is an integral part of the build system for your
application and should be checked into Version Control Systems.
-->
<property file="ant.properties" />
<!-- if sdk.dir was not set from one of the property file, then
get it from the ANDROID_HOME env var.
This must be done before we load project.properties since
the proguard config can use sdk.dir -->
<property environment="env" />
<condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
<isset property="env.ANDROID_SDK_ROOT" />
</condition>
<!-- The project.properties file is created and updated by the 'android'
tool, as well as ADT.
This contains project specific properties such as project target, and library
dependencies. Lower level build properties are stored in ant.properties
(or in .classpath for Eclipse projects).
This file is an integral part of the build system for your
application and should be checked into Version Control Systems. -->
<loadproperties srcFile="project.properties" />
<!-- quick check on sdk.dir -->
<fail
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
unless="sdk.dir"
/>
<!--
Import per project custom build rules if present at the root of the project.
This is the place to put custom intermediary targets such as:
-pre-build
-pre-compile
-post-compile (This is typically used for code obfuscation.
Compiled code location: ${out.classes.absolute.dir}
If this is not done in place, override ${out.dex.input.absolute.dir})
-post-package
-post-build
-pre-clean
-->
<import file="custom_rules.xml" optional="true" />
<!-- Import the actual build file.
To customize existing targets, there are two options:
- Customize only one target:
- copy/paste the target into this file, *before* the
<import> task.
- customize it to your needs.
- Customize the whole content of build.xml
- copy/paste the content of the rules files (minus the top node)
into this file, replacing the <import> task.
- customize to your needs.
***********************
****** IMPORTANT ******
***********************
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
in order to avoid having your file be overridden by tools such as "android update project"
-->
<!-- version-tag: 1 -->
<import file="${sdk.dir}/tools/ant/build.xml" />
</project>

View File

@ -0,0 +1,76 @@
#!/usr/bin/env python
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""
This scripts tests creating an Android Studio project using the
generate_gradle.py script and making a debug build using it.
It expect to be given the webrtc output build directory as the first argument
all other arguments are optional.
"""
import argparse
import logging
import os
import shutil
import subprocess
import sys
import tempfile
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir,
os.pardir))
GENERATE_GRADLE_SCRIPT = os.path.join(SRC_DIR,
'build/android/gradle/generate_gradle.py')
GRADLEW_BIN = os.path.join(SCRIPT_DIR, 'third_party/gradle/gradlew')
def _RunCommand(argv, cwd=SRC_DIR, **kwargs):
logging.info('Running %r', argv)
subprocess.check_call(argv, cwd=cwd, **kwargs)
def _ParseArgs():
parser = argparse.ArgumentParser(
description='Test generating Android gradle project.')
parser.add_argument('build_dir_android',
help='The path to the build directory for Android.')
parser.add_argument('--project_dir',
help='A temporary directory to put the output.')
args = parser.parse_args()
return args
def main():
logging.basicConfig(level=logging.INFO)
args = _ParseArgs()
project_dir = args.project_dir
if not project_dir:
project_dir = tempfile.mkdtemp()
output_dir = os.path.abspath(args.build_dir_android)
project_dir = os.path.abspath(project_dir)
try:
_RunCommand([GENERATE_GRADLE_SCRIPT, '--output-directory', output_dir,
'--target', '//webrtc/examples:AppRTCMobile',
'--project-dir', project_dir,
'--use-gradle-process-resources', '--split-projects', '--canary'])
_RunCommand([GRADLEW_BIN, 'assembleDebug'], project_dir)
finally:
# Do not delete temporary directory if user specified it manually.
if not args.project_dir:
shutil.rmtree(project_dir, True)
if __name__ == '__main__':
sys.exit(main())

View File

@ -0,0 +1,16 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system edit
# "ant.properties", and override values to adapt the script to your
# project structure.
#
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
# Project target.
target=android-22
java.compilerargs=-Xlint:all -Werror

View File

@ -0,0 +1,88 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc.test;
import static android.support.test.espresso.action.ViewActions.click;
import static android.support.test.espresso.matcher.ViewMatchers.withId;
import static android.support.test.espresso.Espresso.onView;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Environment;
import android.support.test.espresso.IdlingPolicies;
import android.support.test.filters.LargeTest;
import android.support.test.rule.ActivityTestRule;
import android.support.test.runner.AndroidJUnit4;
import android.support.test.InstrumentationRegistry;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.appspot.apprtc.CallActivity;
import org.appspot.apprtc.ConnectActivity;
import org.appspot.apprtc.R;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Used to start a loopback call with video input from file and video output also to file.
* The test case is a building block in other testing for video quality.
*/
@RunWith(AndroidJUnit4.class)
@LargeTest
public class CallActivityStubbedInputOutputTest {
private static final String TAG = "CallActivityStubbedInputOutputTest";
@Rule
public ActivityTestRule<CallActivity> rule = new ActivityTestRule<CallActivity>(
CallActivity.class) {
@Override
protected Intent getActivityIntent() {
Context context = InstrumentationRegistry.getContext();
Intent intent = new Intent("android.intent.action.VIEW", Uri.parse("http://localhost:9999"));
intent.putExtra(CallActivity.EXTRA_USE_VALUES_FROM_INTENT, true);
intent.putExtra(CallActivity.EXTRA_LOOPBACK, true);
intent.putExtra(CallActivity.EXTRA_AUDIOCODEC, "OPUS");
intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, "VP8");
intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, false);
intent.putExtra(CallActivity.EXTRA_CAMERA2, false);
intent.putExtra(CallActivity.EXTRA_ROOMID, UUID.randomUUID().toString().substring(0, 8));
// TODO false for wstls to disable https, should be option later or if URL is http
intent.putExtra(CallActivity.EXTRA_URLPARAMETERS,
"debug=loopback&ts=&wshpp=localhost:8089&wstls=false");
intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA,
Environment.getExternalStorageDirectory().getAbsolutePath()
+ "/chromium_tests_root/resources/reference_video_640x360_30fps.y4m");
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE,
Environment.getExternalStorageDirectory().getAbsolutePath() + "/output.y4m");
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 640);
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 360);
return intent;
}
};
@Test
public void testLoopback() throws InterruptedException {
// The time to write down the data during closing of the program can take a while.
IdlingPolicies.setMasterPolicyTimeout(240000, TimeUnit.MILLISECONDS);
// During the time we sleep it will record video.
Thread.sleep(8000);
// Click on hang-up button.
onView(withId(R.id.button_call_disconnect)).perform(click());
}
}

View File

@ -0,0 +1,683 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.appspot.apprtc.test;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import android.os.Build;
import android.support.test.InstrumentationRegistry;
import android.support.test.filters.SmallTest;
import android.util.Log;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.appspot.apprtc.AppRTCClient.SignalingParameters;
import org.appspot.apprtc.PeerConnectionClient;
import org.appspot.apprtc.PeerConnectionClient.PeerConnectionEvents;
import org.appspot.apprtc.PeerConnectionClient.PeerConnectionParameters;
import org.chromium.base.test.BaseJUnit4ClassRunner;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.IceCandidate;
import org.webrtc.MediaCodecVideoEncoder;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.SessionDescription;
import org.webrtc.StatsReport;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoFrame;
import org.webrtc.VideoRenderer;
import org.webrtc.VideoSink;
@RunWith(BaseJUnit4ClassRunner.class)
public class PeerConnectionClientTest implements PeerConnectionEvents {
private static final String TAG = "RTCClientTest";
private static final int ICE_CONNECTION_WAIT_TIMEOUT = 10000;
private static final int WAIT_TIMEOUT = 7000;
private static final int CAMERA_SWITCH_ATTEMPTS = 3;
private static final int VIDEO_RESTART_ATTEMPTS = 3;
private static final int CAPTURE_FORMAT_CHANGE_ATTEMPTS = 3;
private static final int VIDEO_RESTART_TIMEOUT = 500;
private static final int EXPECTED_VIDEO_FRAMES = 10;
private static final String VIDEO_CODEC_VP8 = "VP8";
private static final String VIDEO_CODEC_VP9 = "VP9";
private static final String VIDEO_CODEC_H264 = "H264";
private static final int AUDIO_RUN_TIMEOUT = 1000;
private static final String LOCAL_RENDERER_NAME = "Local renderer";
private static final String REMOTE_RENDERER_NAME = "Remote renderer";
private static final int MAX_VIDEO_FPS = 30;
private static final int WIDTH_VGA = 640;
private static final int HEIGHT_VGA = 480;
private static final int WIDTH_QVGA = 320;
private static final int HEIGHT_QVGA = 240;
// The peer connection client is assumed to be thread safe in itself; the
// reference is written by the test thread and read by worker threads.
private volatile PeerConnectionClient pcClient;
private volatile boolean loopback;
// These are protected by their respective event objects.
private ExecutorService signalingExecutor;
private boolean isClosed;
private boolean isIceConnected;
private SessionDescription localSdp;
private List<IceCandidate> iceCandidates = new LinkedList<IceCandidate>();
private final Object localSdpEvent = new Object();
private final Object iceCandidateEvent = new Object();
private final Object iceConnectedEvent = new Object();
private final Object closeEvent = new Object();
// Mock renderer implementation.
private static class MockRenderer implements VideoRenderer.Callbacks {
// These are protected by 'this' since we gets called from worker threads.
private String rendererName;
private boolean renderFrameCalled = false;
// Thread-safe in itself.
private CountDownLatch doneRendering;
public MockRenderer(int expectedFrames, String rendererName) {
this.rendererName = rendererName;
reset(expectedFrames);
}
// Resets render to wait for new amount of video frames.
public synchronized void reset(int expectedFrames) {
renderFrameCalled = false;
doneRendering = new CountDownLatch(expectedFrames);
}
@Override
public synchronized void renderFrame(VideoRenderer.I420Frame frame) {
if (!renderFrameCalled) {
if (rendererName != null) {
Log.d(TAG, rendererName + " render frame: " + frame.rotatedWidth() + " x "
+ frame.rotatedHeight());
} else {
Log.d(TAG, "Render frame: " + frame.rotatedWidth() + " x " + frame.rotatedHeight());
}
}
renderFrameCalled = true;
VideoRenderer.renderFrameDone(frame);
doneRendering.countDown();
}
// This method shouldn't hold any locks or touch member variables since it
// blocks.
public boolean waitForFramesRendered(int timeoutMs) throws InterruptedException {
doneRendering.await(timeoutMs, TimeUnit.MILLISECONDS);
return (doneRendering.getCount() <= 0);
}
}
// Mock VideoSink implementation.
private static class MockSink implements VideoSink {
// These are protected by 'this' since we gets called from worker threads.
private String rendererName;
private boolean renderFrameCalled = false;
// Thread-safe in itself.
private CountDownLatch doneRendering;
public MockSink(int expectedFrames, String rendererName) {
this.rendererName = rendererName;
reset(expectedFrames);
}
// Resets render to wait for new amount of video frames.
public synchronized void reset(int expectedFrames) {
renderFrameCalled = false;
doneRendering = new CountDownLatch(expectedFrames);
}
@Override
public synchronized void onFrame(VideoFrame frame) {
if (!renderFrameCalled) {
if (rendererName != null) {
Log.d(TAG,
rendererName + " render frame: " + frame.getRotatedWidth() + " x "
+ frame.getRotatedHeight());
} else {
Log.d(TAG, "Render frame: " + frame.getRotatedWidth() + " x " + frame.getRotatedHeight());
}
}
renderFrameCalled = true;
doneRendering.countDown();
}
// This method shouldn't hold any locks or touch member variables since it
// blocks.
public boolean waitForFramesRendered(int timeoutMs) throws InterruptedException {
doneRendering.await(timeoutMs, TimeUnit.MILLISECONDS);
return (doneRendering.getCount() <= 0);
}
}
// Peer connection events implementation.
@Override
public void onLocalDescription(SessionDescription sdp) {
Log.d(TAG, "LocalSDP type: " + sdp.type);
synchronized (localSdpEvent) {
localSdp = sdp;
localSdpEvent.notifyAll();
}
}
@Override
public void onIceCandidate(final IceCandidate candidate) {
synchronized (iceCandidateEvent) {
Log.d(TAG, "IceCandidate #" + iceCandidates.size() + " : " + candidate.toString());
if (loopback) {
// Loopback local ICE candidate in a separate thread to avoid adding
// remote ICE candidate in a local ICE candidate callback.
signalingExecutor.execute(new Runnable() {
@Override
public void run() {
pcClient.addRemoteIceCandidate(candidate);
}
});
}
iceCandidates.add(candidate);
iceCandidateEvent.notifyAll();
}
}
@Override
public void onIceCandidatesRemoved(final IceCandidate[] candidates) {
// TODO(honghaiz): Add this for tests.
}
@Override
public void onIceConnected() {
Log.d(TAG, "ICE Connected");
synchronized (iceConnectedEvent) {
isIceConnected = true;
iceConnectedEvent.notifyAll();
}
}
@Override
public void onIceDisconnected() {
Log.d(TAG, "ICE Disconnected");
synchronized (iceConnectedEvent) {
isIceConnected = false;
iceConnectedEvent.notifyAll();
}
}
@Override
public void onPeerConnectionClosed() {
Log.d(TAG, "PeerConnection closed");
synchronized (closeEvent) {
isClosed = true;
closeEvent.notifyAll();
}
}
@Override
public void onPeerConnectionError(String description) {
fail("PC Error: " + description);
}
@Override
public void onPeerConnectionStatsReady(StatsReport[] reports) {}
// Helper wait functions.
private boolean waitForLocalSDP(int timeoutMs) throws InterruptedException {
synchronized (localSdpEvent) {
if (localSdp == null) {
localSdpEvent.wait(timeoutMs);
}
return (localSdp != null);
}
}
private boolean waitForIceCandidates(int timeoutMs) throws InterruptedException {
synchronized (iceCandidateEvent) {
if (iceCandidates.size() == 0) {
iceCandidateEvent.wait(timeoutMs);
}
return (iceCandidates.size() > 0);
}
}
private boolean waitForIceConnected(int timeoutMs) throws InterruptedException {
synchronized (iceConnectedEvent) {
if (!isIceConnected) {
iceConnectedEvent.wait(timeoutMs);
}
if (!isIceConnected) {
Log.e(TAG, "ICE connection failure");
}
return isIceConnected;
}
}
private boolean waitForPeerConnectionClosed(int timeoutMs) throws InterruptedException {
synchronized (closeEvent) {
if (!isClosed) {
closeEvent.wait(timeoutMs);
}
return isClosed;
}
}
PeerConnectionClient createPeerConnectionClient(MockSink localRenderer,
MockRenderer remoteRenderer, PeerConnectionParameters peerConnectionParameters,
VideoCapturer videoCapturer) {
List<PeerConnection.IceServer> iceServers = new LinkedList<PeerConnection.IceServer>();
SignalingParameters signalingParameters =
new SignalingParameters(iceServers, true, // iceServers, initiator.
null, null, null, // clientId, wssUrl, wssPostUrl.
null, null); // offerSdp, iceCandidates.
PeerConnectionClient client = new PeerConnectionClient();
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
options.networkIgnoreMask = 0;
options.disableNetworkMonitor = true;
client.setPeerConnectionFactoryOptions(options);
client.createPeerConnectionFactory(
InstrumentationRegistry.getTargetContext(), peerConnectionParameters, this);
client.createPeerConnection(localRenderer, remoteRenderer, videoCapturer, signalingParameters);
client.createOffer();
return client;
}
private PeerConnectionParameters createParametersForAudioCall() {
PeerConnectionParameters peerConnectionParameters = new PeerConnectionParameters(
false, /* videoCallEnabled */
true, /* loopback */
false, /* tracing */
// Video codec parameters.
0, /* videoWidth */
0, /* videoHeight */
0, /* videoFps */
0, /* videoStartBitrate */
"", /* videoCodec */
true, /* videoCodecHwAcceleration */
false, /* videoFlexfecEnabled */
// Audio codec parameters.
0, /* audioStartBitrate */
"OPUS", /* audioCodec */
false, /* noAudioProcessing */
false, /* aecDump */
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
false /* disableBuiltInNS */, false /* enableLevelControl */, false /* disableWebRtcAGC */);
return peerConnectionParameters;
}
private VideoCapturer createCameraCapturer(boolean captureToTexture) {
final boolean useCamera2 = captureToTexture
&& Camera2Enumerator.isSupported(InstrumentationRegistry.getTargetContext());
CameraEnumerator enumerator;
if (useCamera2) {
enumerator = new Camera2Enumerator(InstrumentationRegistry.getTargetContext());
} else {
enumerator = new Camera1Enumerator(captureToTexture);
}
String deviceName = enumerator.getDeviceNames()[0];
return enumerator.createCapturer(deviceName, null);
}
private PeerConnectionParameters createParametersForVideoCall(String videoCodec) {
PeerConnectionParameters peerConnectionParameters = new PeerConnectionParameters(
true, /* videoCallEnabled */
true, /* loopback */
false, /* tracing */
// Video codec parameters.
0, /* videoWidth */
0, /* videoHeight */
0, /* videoFps */
0, /* videoStartBitrate */
videoCodec, /* videoCodec */
true, /* videoCodecHwAcceleration */
false, /* videoFlexfecEnabled */
// Audio codec parameters.
0, /* audioStartBitrate */
"OPUS", /* audioCodec */
false, /* noAudioProcessing */
false, /* aecDump */
false /* useOpenSLES */, false /* disableBuiltInAEC */, false /* disableBuiltInAGC */,
false /* disableBuiltInNS */, false /* enableLevelControl */, false /* disableWebRtcAGC */);
return peerConnectionParameters;
}
@Before
public void setUp() {
signalingExecutor = Executors.newSingleThreadExecutor();
}
@After
public void tearDown() {
signalingExecutor.shutdown();
}
@Test
@SmallTest
public void testSetLocalOfferMakesVideoFlowLocally() throws InterruptedException {
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally");
MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
pcClient = createPeerConnectionClient(localRenderer, new MockRenderer(0, null),
createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */));
// Wait for local SDP and ice candidates set events.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
assertTrue("ICE candidates were not generated.", waitForIceCandidates(WAIT_TIMEOUT));
// Check that local video frames were rendered.
assertTrue(
"Local video frames were not rendered.", localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
pcClient.close();
assertTrue(
"PeerConnection close event was not received.", waitForPeerConnectionClosed(WAIT_TIMEOUT));
Log.d(TAG, "testSetLocalOfferMakesVideoFlowLocally Done.");
}
private void doLoopbackTest(PeerConnectionParameters parameters, VideoCapturer videoCapturer,
boolean decodeToTexture) throws InterruptedException {
loopback = true;
MockSink localRenderer = null;
MockRenderer remoteRenderer = null;
if (parameters.videoCallEnabled) {
Log.d(TAG, "testLoopback for video " + parameters.videoCodec);
localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
} else {
Log.d(TAG, "testLoopback for audio.");
}
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer, parameters, videoCapturer);
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
SessionDescription remoteSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
pcClient.setRemoteDescription(remoteSdp);
// Wait for ICE connection.
assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
if (parameters.videoCallEnabled) {
// Check that local and remote video frames were rendered.
assertTrue("Local video frames were not rendered.",
localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
assertTrue("Remote video frames were not rendered.",
remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
} else {
// For audio just sleep for 1 sec.
// TODO(glaznev): check how we can detect that remote audio was rendered.
Thread.sleep(AUDIO_RUN_TIMEOUT);
}
pcClient.close();
assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
Log.d(TAG, "testLoopback done.");
}
@Test
@SmallTest
public void testLoopbackAudio() throws InterruptedException {
doLoopbackTest(createParametersForAudioCall(), null, false /* decodeToTexture */);
}
@Test
@SmallTest
public void testLoopbackVp8() throws InterruptedException {
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
}
@Test
@SmallTest
public void testLoopbackVp9() throws InterruptedException {
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
}
@Test
@SmallTest
public void testLoopbackH264() throws InterruptedException {
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
createCameraCapturer(false /* captureToTexture */), false /* decodeToTexture */);
}
@Test
@SmallTest
public void testLoopbackVp8DecodeToTexture() throws InterruptedException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
}
@Test
@SmallTest
public void testLoopbackVp9DecodeToTexture() throws InterruptedException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP9),
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
}
@Test
@SmallTest
public void testLoopbackH264DecodeToTexture() throws InterruptedException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
Log.i(TAG, "Decode to textures is not supported, requires SDK version 19.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
createCameraCapturer(false /* captureToTexture */), true /* decodeToTexture */);
}
@Test
@SmallTest
public void testLoopbackVp8CaptureToTexture() throws InterruptedException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
Log.i(TAG, "Encode to textures is not supported. Requires SDK version 19");
return;
}
// TODO(perkj): If we can always capture to textures, there is no need to check if the
// hardware encoder supports to encode from a texture.
if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
Log.i(TAG, "VP8 encode to textures is not supported.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
}
@Test
@SmallTest
public void testLoopbackH264CaptureToTexture() throws InterruptedException {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
Log.i(TAG, "Encode to textures is not supported. Requires KITKAT");
return;
}
// TODO(perkj): If we can always capture to textures, there is no need to check if the
// hardware encoder supports to encode from a texture.
if (!MediaCodecVideoEncoder.isH264HwSupportedUsingTextures()) {
Log.i(TAG, "H264 encode to textures is not supported.");
return;
}
doLoopbackTest(createParametersForVideoCall(VIDEO_CODEC_H264),
createCameraCapturer(true /* captureToTexture */), true /* decodeToTexture */);
}
// Checks if default front camera can be switched to back camera and then
// again to front camera.
@Test
@SmallTest
public void testCameraSwitch() throws InterruptedException {
Log.d(TAG, "testCameraSwitch");
loopback = true;
MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */));
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
SessionDescription remoteSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
pcClient.setRemoteDescription(remoteSdp);
// Wait for ICE connection.
assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
// Check that local and remote video frames were rendered.
assertTrue("Local video frames were not rendered before camera switch.",
localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
assertTrue("Remote video frames were not rendered before camera switch.",
remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
for (int i = 0; i < CAMERA_SWITCH_ATTEMPTS; i++) {
// Try to switch camera
pcClient.switchCamera();
// Reset video renders and check that local and remote video frames
// were rendered after camera switch.
localRenderer.reset(EXPECTED_VIDEO_FRAMES);
remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
assertTrue("Local video frames were not rendered after camera switch.",
localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
assertTrue("Remote video frames were not rendered after camera switch.",
remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
}
pcClient.close();
assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
Log.d(TAG, "testCameraSwitch done.");
}
// Checks if video source can be restarted - simulate app goes to
// background and back to foreground.
@Test
@SmallTest
public void testVideoSourceRestart() throws InterruptedException {
Log.d(TAG, "testVideoSourceRestart");
loopback = true;
MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */));
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
SessionDescription remoteSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
pcClient.setRemoteDescription(remoteSdp);
// Wait for ICE connection.
assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
// Check that local and remote video frames were rendered.
assertTrue("Local video frames were not rendered before video restart.",
localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
assertTrue("Remote video frames were not rendered before video restart.",
remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
// Stop and then start video source a few times.
for (int i = 0; i < VIDEO_RESTART_ATTEMPTS; i++) {
pcClient.stopVideoSource();
Thread.sleep(VIDEO_RESTART_TIMEOUT);
pcClient.startVideoSource();
// Reset video renders and check that local and remote video frames
// were rendered after video restart.
localRenderer.reset(EXPECTED_VIDEO_FRAMES);
remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
assertTrue("Local video frames were not rendered after video restart.",
localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
assertTrue("Remote video frames were not rendered after video restart.",
remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
}
pcClient.close();
assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
Log.d(TAG, "testVideoSourceRestart done.");
}
// Checks if capture format can be changed on fly and decoder can be reset properly.
@Test
@SmallTest
public void testCaptureFormatChange() throws InterruptedException {
Log.d(TAG, "testCaptureFormatChange");
loopback = true;
MockSink localRenderer = new MockSink(EXPECTED_VIDEO_FRAMES, LOCAL_RENDERER_NAME);
MockRenderer remoteRenderer = new MockRenderer(EXPECTED_VIDEO_FRAMES, REMOTE_RENDERER_NAME);
pcClient = createPeerConnectionClient(localRenderer, remoteRenderer,
createParametersForVideoCall(VIDEO_CODEC_VP8),
createCameraCapturer(false /* captureToTexture */));
// Wait for local SDP, rename it to answer and set as remote SDP.
assertTrue("Local SDP was not set.", waitForLocalSDP(WAIT_TIMEOUT));
SessionDescription remoteSdp = new SessionDescription(
SessionDescription.Type.fromCanonicalForm("answer"), localSdp.description);
pcClient.setRemoteDescription(remoteSdp);
// Wait for ICE connection.
assertTrue("ICE connection failure.", waitForIceConnected(ICE_CONNECTION_WAIT_TIMEOUT));
// Check that local and remote video frames were rendered.
assertTrue("Local video frames were not rendered before camera resolution change.",
localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
assertTrue("Remote video frames were not rendered before camera resolution change.",
remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
// Change capture output format a few times.
for (int i = 0; i < 2 * CAPTURE_FORMAT_CHANGE_ATTEMPTS; i++) {
if (i % 2 == 0) {
pcClient.changeCaptureFormat(WIDTH_VGA, HEIGHT_VGA, MAX_VIDEO_FPS);
} else {
pcClient.changeCaptureFormat(WIDTH_QVGA, HEIGHT_QVGA, MAX_VIDEO_FPS);
}
// Reset video renders and check that local and remote video frames
// were rendered after capture format change.
localRenderer.reset(EXPECTED_VIDEO_FRAMES);
remoteRenderer.reset(EXPECTED_VIDEO_FRAMES);
assertTrue("Local video frames were not rendered after capture format change.",
localRenderer.waitForFramesRendered(WAIT_TIMEOUT));
assertTrue("Remote video frames were not rendered after capture format change.",
remoteRenderer.waitForFramesRendered(WAIT_TIMEOUT));
}
pcClient.close();
assertTrue(waitForPeerConnectionClosed(WAIT_TIMEOUT));
Log.d(TAG, "testCaptureFormatChange done.");
}
}

View File

@ -0,0 +1,3 @@
# This file is needed for projects that has this directory as a separate Git
# mirror in DEPS. Without it, a lot is wiped and re-downloaded for each sync.
/gradle

View File

@ -0,0 +1,10 @@
The third_party directory contains sources from other projects.
Code in third_party must document the license under which the source is being
used. If the source itself does not include a license header or file, create
an entry in this file that refers to reliable documentation of the project's
license terms on the web (and add a note pointing here in the README file in
that directory).
<Include table of license information here, once it is available>

View File

@ -0,0 +1,218 @@
#!/usr/bin/env python
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""
This script is the wrapper that starts a loopback call with stubbed video in
and out. It then analyses the video quality of the output video against the
reference input video.
It expect to be given the webrtc output build directory as the first argument
all other arguments are optional.
It assumes you have a Android device plugged in.
"""
import argparse
import json
import logging
import os
import shutil
import subprocess
import sys
import tempfile
import time
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir,
os.pardir))
BAD_DEVICES_JSON = os.path.join(SRC_DIR,
os.environ.get('CHROMIUM_OUT_DIR', 'out'),
'bad_devices.json')
class Error(Exception):
pass
class VideoQualityTestError(Error):
pass
def _RunCommand(argv, cwd=SRC_DIR, **kwargs):
logging.info('Running %r', argv)
subprocess.check_call(argv, cwd=cwd, **kwargs)
def _RunCommandWithOutput(argv, cwd=SRC_DIR, **kwargs):
logging.info('Running %r', argv)
return subprocess.check_output(argv, cwd=cwd, **kwargs)
def _RunBackgroundCommand(argv, cwd=SRC_DIR):
logging.info('Running %r', argv)
process = subprocess.Popen(argv, cwd=cwd)
time.sleep(0.5)
status = process.poll()
if status: # is not None or 0
raise subprocess.CalledProcessError(status, argv)
return process
def _ParseArgs():
parser = argparse.ArgumentParser(description='Start loopback video analysis.')
parser.add_argument('build_dir_android',
help='The path to the build directory for Android.')
parser.add_argument('--build_dir_x86',
help='The path to the build directory for building locally.')
parser.add_argument('--temp_dir',
help='A temporary directory to put the output.')
parser.add_argument('--adb-path', help='Path to adb binary.', default='adb')
args = parser.parse_args()
return args
def main():
logging.basicConfig(level=logging.INFO)
args = _ParseArgs()
build_dir_android = args.build_dir_android
build_dir_x86 = args.build_dir_x86
temp_dir = args.temp_dir
adb_path = args.adb_path
if not temp_dir:
temp_dir = tempfile.mkdtemp()
else:
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
if not build_dir_x86:
build_dir_x86 = os.path.join(temp_dir, 'LocalBuild')
_RunCommand(['gn', 'gen', build_dir_x86])
_RunCommand(['ninja', '-C', build_dir_x86, 'frame_analyzer'])
tools_dir = os.path.join(SRC_DIR, 'tools_webrtc')
toolchain_dir = os.path.join(tools_dir, 'video_quality_toolchain')
# Download ffmpeg and zxing.
download_tools_script = os.path.join(tools_dir, 'download_tools.py')
_RunCommand([sys.executable, download_tools_script, toolchain_dir])
testing_tools_dir = os.path.join(SRC_DIR, 'webrtc', 'rtc_tools', 'testing')
# Download, extract and build AppRTC.
setup_apprtc_script = os.path.join(testing_tools_dir, 'setup_apprtc.py')
_RunCommand([sys.executable, setup_apprtc_script, temp_dir])
# Select an Android device in case multiple are connected
try:
with open(BAD_DEVICES_JSON) as bad_devices_file:
bad_devices = json.load(bad_devices_file)
except IOError:
if os.environ.get('CHROME_HEADLESS'):
logging.warning('Cannot read %r', BAD_DEVICES_JSON)
bad_devices = {}
for line in _RunCommandWithOutput([adb_path, 'devices']).splitlines():
if line.endswith('\tdevice'):
android_device = line.split('\t')[0]
if android_device not in bad_devices:
break
else:
raise VideoQualityTestError('Cannot find any connected Android device.')
processes = []
try:
# Start AppRTC Server
dev_appserver = os.path.join(temp_dir, 'apprtc', 'temp', 'google-cloud-sdk',
'bin', 'dev_appserver.py')
appengine_dir = os.path.join(temp_dir, 'apprtc', 'out', 'app_engine')
processes.append(_RunBackgroundCommand([
'python', dev_appserver, appengine_dir,
'--port=9999', '--admin_port=9998',
'--skip_sdk_update_check', '--clear_datastore=yes']))
# Start Collider
collider_path = os.path.join(temp_dir, 'collider', 'collidermain')
processes.append(_RunBackgroundCommand([
collider_path, '-tls=false', '-port=8089',
'-room-server=http://localhost:9999']))
# Start adb reverse forwarder
reverseforwarder_path = os.path.join(
SRC_DIR, 'build', 'android', 'adb_reverse_forwarder.py')
processes.append(_RunBackgroundCommand([
reverseforwarder_path, '--device', android_device,
'9999', '9999', '8089', '8089']))
# Run the Espresso code.
test_script = os.path.join(build_dir_android,
'bin', 'run_AppRTCMobileTestStubbedVideoIO')
_RunCommand([test_script, '--device', android_device])
# Pull the output video.
test_video = os.path.join(temp_dir, 'test_video.y4m')
_RunCommand([adb_path, '-s', android_device,
'pull', '/sdcard/output.y4m', test_video])
test_video_yuv = os.path.join(temp_dir, 'test_video.yuv')
ffmpeg_path = os.path.join(toolchain_dir, 'linux', 'ffmpeg')
def ConvertVideo(input_video, output_video):
_RunCommand([ffmpeg_path, '-y', '-i', input_video, output_video])
ConvertVideo(test_video, test_video_yuv)
reference_video = os.path.join(SRC_DIR,
'resources', 'reference_video_640x360_30fps.y4m')
reference_video_yuv = os.path.join(temp_dir,
'reference_video_640x360_30fps.yuv')
ConvertVideo(reference_video, reference_video_yuv)
# Run compare script.
compare_script = os.path.join(SRC_DIR, 'webrtc', 'rtc_tools',
'compare_videos.py')
zxing_path = os.path.join(toolchain_dir, 'linux', 'zxing')
# The frame_analyzer binary should be built for local computer and not for
# Android
frame_analyzer = os.path.join(build_dir_x86, 'frame_analyzer')
frame_width = 640
frame_height = 360
stats_file_ref = os.path.join(temp_dir, 'stats_ref.txt')
stats_file_test = os.path.join(temp_dir, 'stats_test.txt')
_RunCommand([
sys.executable, compare_script, '--ref_video', reference_video_yuv,
'--test_video', test_video_yuv, '--yuv_frame_width', str(frame_width),
'--yuv_frame_height', str(frame_height),
'--stats_file_ref', stats_file_ref,
'--stats_file_test', stats_file_test,
'--frame_analyzer', frame_analyzer,
'--ffmpeg_path', ffmpeg_path, '--zxing_path', zxing_path])
finally:
for process in processes:
if process:
process.terminate()
process.wait()
shutil.rmtree(temp_dir)
if __name__ == '__main__':
sys.exit(main())