Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1 @@
andresp@webrtc.org

View File

@ -0,0 +1,12 @@
Loopback test
This is a simple html test framework to run a loopback test which can go via
turn. For now the test is used to analyse bandwidth estimation and get records
for bad scenarios.
How to run:
./run-server.sh (to start python serving the tests)
Access http://localhost:8080/loopback_test.html to run the test
How to record:
You can use record-test.sh to get a tcpdump of a test run.

View File

@ -0,0 +1,211 @@
/**
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This file is cloned from samples/js/base/adapter.js
// Modify the original and do new copy instead of doing changes here.
var RTCPeerConnection = null;
var getUserMedia = null;
var attachMediaStream = null;
var reattachMediaStream = null;
var webrtcDetectedBrowser = null;
var webrtcDetectedVersion = null;
function trace(text) {
// This function is used for logging.
if (text[text.length - 1] == '\n') {
text = text.substring(0, text.length - 1);
}
console.log((performance.now() / 1000).toFixed(3) + ": " + text);
}
function maybeFixConfiguration(pcConfig) {
if (pcConfig == null) {
return;
}
for (var i = 0; i < pcConfig.iceServers.length; i++) {
if (pcConfig.iceServers[i].hasOwnProperty('urls')){
pcConfig.iceServers[i]['url'] = pcConfig.iceServers[i]['urls'];
delete pcConfig.iceServers[i]['urls'];
}
}
}
if (navigator.mozGetUserMedia) {
console.log("This appears to be Firefox");
webrtcDetectedBrowser = "firefox";
webrtcDetectedVersion =
parseInt(navigator.userAgent.match(/Firefox\/([0-9]+)\./)[1], 10);
// The RTCPeerConnection object.
var RTCPeerConnection = function(pcConfig, pcConstraints) {
// .urls is not supported in FF yet.
maybeFixConfiguration(pcConfig);
return new mozRTCPeerConnection(pcConfig, pcConstraints);
}
// The RTCSessionDescription object.
RTCSessionDescription = mozRTCSessionDescription;
// The RTCIceCandidate object.
RTCIceCandidate = mozRTCIceCandidate;
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.mozGetUserMedia.bind(navigator);
navigator.getUserMedia = getUserMedia;
// Creates iceServer from the url for FF.
createIceServer = function(url, username, password) {
var iceServer = null;
var url_parts = url.split(':');
if (url_parts[0].indexOf('stun') === 0) {
// Create iceServer with stun url.
iceServer = { 'url': url };
} else if (url_parts[0].indexOf('turn') === 0) {
if (webrtcDetectedVersion < 27) {
// Create iceServer with turn url.
// Ignore the transport parameter from TURN url for FF version <=27.
var turn_url_parts = url.split("?");
// Return null for createIceServer if transport=tcp.
if (turn_url_parts.length === 1 ||
turn_url_parts[1].indexOf('transport=udp') === 0) {
iceServer = {'url': turn_url_parts[0],
'credential': password,
'username': username};
}
} else {
// FF 27 and above supports transport parameters in TURN url,
// So passing in the full url to create iceServer.
iceServer = {'url': url,
'credential': password,
'username': username};
}
}
return iceServer;
};
createIceServers = function(urls, username, password) {
var iceServers = [];
// Use .url for FireFox.
for (i = 0; i < urls.length; i++) {
var iceServer = createIceServer(urls[i],
username,
password);
if (iceServer !== null) {
iceServers.push(iceServer);
}
}
return iceServers;
}
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
console.log("Attaching media stream");
element.mozSrcObject = stream;
element.play();
};
reattachMediaStream = function(to, from) {
console.log("Reattaching media stream");
to.mozSrcObject = from.mozSrcObject;
to.play();
};
// Fake get{Video,Audio}Tracks
if (!MediaStream.prototype.getVideoTracks) {
MediaStream.prototype.getVideoTracks = function() {
return [];
};
}
if (!MediaStream.prototype.getAudioTracks) {
MediaStream.prototype.getAudioTracks = function() {
return [];
};
}
} else if (navigator.webkitGetUserMedia) {
console.log("This appears to be Chrome");
webrtcDetectedBrowser = "chrome";
webrtcDetectedVersion =
parseInt(navigator.userAgent.match(/Chrom(e|ium)\/([0-9]+)\./)[2], 10);
// Creates iceServer from the url for Chrome M33 and earlier.
createIceServer = function(url, username, password) {
var iceServer = null;
var url_parts = url.split(':');
if (url_parts[0].indexOf('stun') === 0) {
// Create iceServer with stun url.
iceServer = { 'url': url };
} else if (url_parts[0].indexOf('turn') === 0) {
// Chrome M28 & above uses below TURN format.
iceServer = {'url': url,
'credential': password,
'username': username};
}
return iceServer;
};
// Creates iceServers from the urls for Chrome M34 and above.
createIceServers = function(urls, username, password) {
var iceServers = [];
if (webrtcDetectedVersion >= 34) {
// .urls is supported since Chrome M34.
iceServers = {'urls': urls,
'credential': password,
'username': username };
} else {
for (i = 0; i < urls.length; i++) {
var iceServer = createIceServer(urls[i],
username,
password);
if (iceServer !== null) {
iceServers.push(iceServer);
}
}
}
return iceServers;
};
// The RTCPeerConnection object.
var RTCPeerConnection = function(pcConfig, pcConstraints) {
// .urls is supported since Chrome M34.
if (webrtcDetectedVersion < 34) {
maybeFixConfiguration(pcConfig);
}
return new webkitRTCPeerConnection(pcConfig, pcConstraints);
}
// Get UserMedia (only difference is the prefix).
// Code from Adam Barth.
getUserMedia = navigator.webkitGetUserMedia.bind(navigator);
navigator.getUserMedia = getUserMedia;
// Attach a media stream to an element.
attachMediaStream = function(element, stream) {
if (typeof element.srcObject !== 'undefined') {
element.srcObject = stream;
} else if (typeof element.mozSrcObject !== 'undefined') {
element.mozSrcObject = stream;
} else if (typeof element.src !== 'undefined') {
element.src = URL.createObjectURL(stream);
} else {
console.log('Error attaching stream to element.');
}
};
reattachMediaStream = function(to, from) {
to.src = from.src;
};
} else {
console.log("Browser does not appear to be WebRTC-capable");
}

View File

@ -0,0 +1,227 @@
<!DOCTYPE html>
<!--
This page was created to help debug and study webrtc issues such as
bandwidth estimation problems. It allows one to easily launch a test
case that establishs a connection between 2 peer connections
-->
<html>
<head>
<title>Loopback test</title>
<!-- In order to plot graphs, this tools uses google visualization API which is
loaded via goog.load provided by google api. -->
<script src="//www.google.com/jsapi"></script>
<!-- This file is included to allow loopback_test.js instantiate a
RTCPeerConnection on a browser and version agnostic way. -->
<script src="adapter.js"></script>
<!-- Provides class StatTracker used by loopback_test.js to keep track of
RTCPeerConnection stats -->
<script src="stat_tracker.js"></script>
<!-- Provides LoopbackTest class which has the core logic for the test itself.
Such as: create 2 peer connections, establish a call, filter turn
candidates, constraint video bitrate etc.
-->
<script src="loopback_test.js"></script>
<style>
#chart {
height: 400px;
}
#control-range {
height: 100px;
}
</style>
</head>
<body>
<div id="test-launcher">
<p>Duration (s): <input id="duration" type="text"></p>
<p>Max video bitrate (kbps): <input id="max-video-bitrate" type="text"></p>
<p>Peer connection constraints: <input id="pc-constraints" type="text"></p>
<p>Force TURN: <input id="force-turn" type="checkbox" checked></p>
<p><input id="launcher-button" type="button" value="Run test">
<div id="test-status" style="display:none"></div>
<div id="dashboard">
<div id="control-category"></div>
<div id="chart"></div>
<div id="control-range"></div>
</div>
</div>
<script>
google.load('visualization', '1.0', {'packages':['controls']});
var durationInput = document.getElementById('duration');
var maxVideoBitrateInput = document.getElementById('max-video-bitrate');
var forceTurnInput = document.getElementById('force-turn');
var launcherButton = document.getElementById('launcher-button');
var autoModeInput = document.createElement('input');
var testStatus = document.getElementById('test-status');
var pcConstraintsInput = document.getElementById('pc-constraints');
launcherButton.onclick = start;
// Load parameters from the url if present. This allows one to link to
// a specific test configuration and is used to automatically pass parameters
// for scripts such as record-test.sh
function getURLParameter(name, default_value) {
var search =
RegExp('(^\\?|&)' + name + '=' + '(.+?)(&|$)').exec(location.search);
if (search)
return decodeURI(search[2]);
else
return default_value;
}
durationInput.value = getURLParameter('duration', 10);
maxVideoBitrateInput.value = getURLParameter('max-video-bitrate', 2000);
forceTurnInput.checked = (getURLParameter('force-turn', 'true') === 'true');
autoModeInput.checked = (getURLParameter('auto-mode', 'false') === 'true');
pcConstraintsInput.value = getURLParameter('pc-constraints', '');
if (autoModeInput.checked) start();
function start() {
var durationMs = parseInt(durationInput.value) * 1000;
var maxVideoBitrateKbps = parseInt(maxVideoBitrateInput.value);
var forceTurn = forceTurnInput.checked;
var autoClose = autoModeInput.checked;
var pcConstraints = pcConstraintsInput.value == "" ?
null : JSON.parse(pcConstraintsInput.value);
var updateStatusInterval;
var testFinished = false;
function updateStatus() {
if (testFinished) {
testStatus.innerHTML = 'Test finished';
if (updateStatusInterval) {
clearInterval(updateStatusInterval);
updateStatusInterval = null;
}
} else {
if (!updateStatusInterval) {
updateStatusInterval = setInterval(updateStatus, 1000);
testStatus.innerHTML = 'Running';
}
testStatus.innerHTML += '.';
}
}
if (!(isFinite(maxVideoBitrateKbps) && maxVideoBitrateKbps > 0)) {
// TODO(andresp): Get a better way to show errors than alert.
alert("Invalid max video bitrate");
return;
}
if (!(isFinite(durationMs) && durationMs > 0)) {
alert("Invalid duration");
return;
}
durationInput.disabled = true;
forceTurnInput.disabled = true;
maxVideoBitrateInput.disabled = true;
launcherButton.style.display = 'none';
testStatus.style.display = 'block';
getUserMedia({audio:true, video:true},
gotStream, function() {});
function gotStream(stream) {
updateStatus();
var test = new LoopbackTest(stream, durationMs,
forceTurn,
pcConstraints,
maxVideoBitrateKbps);
test.run(onTestFinished.bind(test));
}
function onTestFinished() {
testFinished = true;
updateStatus();
if (autoClose) {
window.close();
} else {
plotStats(this.getResults());
}
}
}
function plotStats(data) {
var dashboard = new google.visualization.Dashboard(
document.getElementById('dashboard'));
var chart = new google.visualization.ChartWrapper({
'containerId': 'chart',
'chartType': 'LineChart',
'options': { 'pointSize': 0, 'lineWidth': 1, 'interpolateNulls': true },
});
var rangeFilter = new google.visualization.ControlWrapper({
'controlType': 'ChartRangeFilter',
'containerId': 'control-range',
'options': {
'filterColumnIndex': 0,
'ui': {
'chartType': 'ScatterChart',
'chartOptions': {
'hAxis': {'baselineColor': 'none'}
},
'chartView': {
'columns': [0, 1]
},
'minRangeSize': 1000 // 1 second
}
},
});
// Create a table with the columns of the dataset.
var columnsTable = new google.visualization.DataTable();
columnsTable.addColumn('number', 'columnIndex');
columnsTable.addColumn('string', 'columnLabel');
var initState = {selectedValues: []};
for (var i = 1; i < data.getNumberOfColumns(); i++) {
columnsTable.addRow([i, data.getColumnLabel(i)]);
initState.selectedValues.push(data.getColumnLabel(i));
}
var columnFilter = new google.visualization.ControlWrapper({
controlType: 'CategoryFilter',
containerId: 'control-category',
dataTable: columnsTable,
options: {
filterColumnLabel: 'columnLabel',
ui: {
label: '',
allowNone: false,
selectedValuesLayout: 'aside'
}
},
state: initState
});
google.visualization.events.addListener(columnFilter, 'statechange',
function () {
var state = columnFilter.getState();
var row;
var columnIndices = [0];
for (var i = 0; i < state.selectedValues.length; i++) {
row = columnsTable.getFilteredRows([{
column: 1,
value: state.selectedValues[i]}])[0];
columnIndices.push(columnsTable.getValue(row, 0));
}
// Sort the indices into their original order
columnIndices.sort(function (a, b) { return (a - b); });
chart.setView({columns: columnIndices});
chart.draw();
});
columnFilter.draw();
dashboard.bind([rangeFilter], [chart]);
dashboard.draw(data);
}
</script>
</body>
</html>

View File

@ -0,0 +1,240 @@
/**
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// LoopbackTest establish a one way loopback call between 2 peer connections
// while continuously monitoring bandwidth stats. The idea is to use this as
// a base for other future tests and to keep track of more than just bandwidth
// stats.
//
// Usage:
// var test = new LoopbackTest(stream, callDurationMs,
// forceTurn, pcConstraints,
// maxVideoBitrateKbps);
// test.run(onDone);
// function onDone() {
// test.getResults(); // return stats recorded during the loopback test.
// }
//
function LoopbackTest(
stream,
callDurationMs,
forceTurn,
pcConstraints,
maxVideoBitrateKbps) {
var pc1StatTracker;
var pc2StatTracker;
// In order to study effect of network (e.g. wifi) on peer connection one can
// establish a loopback call and force it to go via a turn server. This way
// the call won't switch to local addresses. That is achieved by filtering out
// all non-relay ice candidades on both peers.
function constrainTurnCandidates(pc) {
var origAddIceCandidate = pc.addIceCandidate;
pc.addIceCandidate = function (candidate, successCallback,
failureCallback) {
if (forceTurn && candidate.candidate.indexOf("typ relay ") == -1) {
trace("Dropping non-turn candidate: " + candidate.candidate);
successCallback();
return;
} else {
origAddIceCandidate.call(this, candidate, successCallback,
failureCallback);
}
}
}
// FEC makes it hard to study bwe estimation since there seems to be a spike
// when it is enabled and disabled. Disable it for now. FEC issue tracked on:
// https://code.google.com/p/webrtc/issues/detail?id=3050
function constrainOfferToRemoveFec(pc) {
var origCreateOffer = pc.createOffer;
pc.createOffer = function (successCallback, failureCallback, options) {
function filteredSuccessCallback(desc) {
desc.sdp = desc.sdp.replace(/(m=video 1 [^\r]+)(116 117)(\r\n)/g,
'$1\r\n');
desc.sdp = desc.sdp.replace(/a=rtpmap:116 red\/90000\r\n/g, '');
desc.sdp = desc.sdp.replace(/a=rtpmap:117 ulpfec\/90000\r\n/g, '');
successCallback(desc);
}
origCreateOffer.call(this, filteredSuccessCallback, failureCallback,
options);
}
}
// Constraint max video bitrate by modifying the SDP when creating an answer.
function constrainBitrateAnswer(pc) {
var origCreateAnswer = pc.createAnswer;
pc.createAnswer = function (successCallback, failureCallback, options) {
function filteredSuccessCallback(desc) {
if (maxVideoBitrateKbps) {
desc.sdp = desc.sdp.replace(
/a=mid:video\r\n/g,
'a=mid:video\r\nb=AS:' + maxVideoBitrateKbps + '\r\n');
}
successCallback(desc);
}
origCreateAnswer.call(this, filteredSuccessCallback, failureCallback,
options);
}
}
// Run the actual LoopbackTest.
this.run = function(doneCallback) {
if (forceTurn) requestTurn(start, fail);
else start();
function start(turnServer) {
var pcConfig = forceTurn ? { iceServers: [turnServer] } : null;
console.log(pcConfig);
var pc1 = new RTCPeerConnection(pcConfig, pcConstraints);
constrainTurnCandidates(pc1);
constrainOfferToRemoveFec(pc1);
pc1StatTracker = new StatTracker(pc1, 50);
pc1StatTracker.recordStat("EstimatedSendBitrate",
"bweforvideo", "googAvailableSendBandwidth");
pc1StatTracker.recordStat("TransmitBitrate",
"bweforvideo", "googTransmitBitrate");
pc1StatTracker.recordStat("TargetEncodeBitrate",
"bweforvideo", "googTargetEncBitrate");
pc1StatTracker.recordStat("ActualEncodedBitrate",
"bweforvideo", "googActualEncBitrate");
var pc2 = new RTCPeerConnection(pcConfig, pcConstraints);
constrainTurnCandidates(pc2);
constrainBitrateAnswer(pc2);
pc2StatTracker = new StatTracker(pc2, 50);
pc2StatTracker.recordStat("REMB",
"bweforvideo", "googAvailableReceiveBandwidth");
pc1.addStream(stream);
var call = new Call(pc1, pc2);
call.start();
setTimeout(function () {
call.stop();
pc1StatTracker.stop();
pc2StatTracker.stop();
success();
}, callDurationMs);
}
function success() {
trace("Success");
doneCallback();
}
function fail(msg) {
trace("Fail: " + msg);
doneCallback();
}
}
// Returns a google visualization datatable with the recorded samples during
// the loopback test.
this.getResults = function () {
return mergeDataTable(pc1StatTracker.dataTable(),
pc2StatTracker.dataTable());
}
// Helper class to establish and manage a call between 2 peer connections.
// Usage:
// var c = new Call(pc1, pc2);
// c.start();
// c.stop();
//
function Call(pc1, pc2) {
pc1.onicecandidate = applyIceCandidate.bind(pc2);
pc2.onicecandidate = applyIceCandidate.bind(pc1);
function applyIceCandidate(e) {
if (e.candidate) {
this.addIceCandidate(new RTCIceCandidate(e.candidate),
onAddIceCandidateSuccess,
onAddIceCandidateError);
}
}
function onAddIceCandidateSuccess() {}
function onAddIceCandidateError(error) {
trace("Failed to add Ice Candidate: " + error.toString());
}
this.start = function() {
pc1.createOffer(gotDescription1, onCreateSessionDescriptionError);
function onCreateSessionDescriptionError(error) {
trace('Failed to create session description: ' + error.toString());
}
function gotDescription1(desc){
trace("Offer: " + desc.sdp);
pc1.setLocalDescription(desc);
pc2.setRemoteDescription(desc);
// Since the "remote" side has no media stream we need
// to pass in the right constraints in order for it to
// accept the incoming offer of audio and video.
pc2.createAnswer(gotDescription2, onCreateSessionDescriptionError);
}
function gotDescription2(desc){
trace("Answer: " + desc.sdp);
pc2.setLocalDescription(desc);
pc1.setRemoteDescription(desc);
}
}
this.stop = function() {
pc1.close();
pc2.close();
}
}
// Request a turn server. This uses the same servers as apprtc.
function requestTurn(successCallback, failureCallback) {
var currentDomain = document.domain;
if (currentDomain.search('localhost') === -1 &&
currentDomain.search('webrtc.googlecode.com') === -1) {
failureCallback("Domain not authorized for turn server: " +
currentDomain);
return;
}
// Get a turn server from computeengineondemand.appspot.com.
var turnUrl = 'https://computeengineondemand.appspot.com/' +
'turn?username=156547625762562&key=4080218913';
var xmlhttp = new XMLHttpRequest();
xmlhttp.onreadystatechange = onTurnResult;
xmlhttp.open('GET', turnUrl, true);
xmlhttp.send();
function onTurnResult() {
if (this.readyState !== 4) {
return;
}
if (this.status === 200) {
var turnServer = JSON.parse(xmlhttp.responseText);
// Create turnUris using the polyfill (adapter.js).
turnServer.uris = turnServer.uris.filter(
function (e) { return e.search('transport=udp') != -1; }
);
var iceServers = createIceServers(turnServer.uris,
turnServer.username,
turnServer.password);
if (iceServers !== null) {
successCallback(iceServers);
return;
}
}
failureCallback("Failed to get a turn server.");
}
}
}

View File

@ -0,0 +1,60 @@
#!/bin/sh
#
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
#
# This script is used to record a tcp dump of running a loop back test.
# Example use case:
#
# $ ./run-server.sh & # spawns a server to serve the html pages
# # on localhost:8080
#
# (recording 3 tests with 5mins and bitrates 1mbps, 2mbps and 3mbps)
# $ sudo -v # Caches sudo credentials needed
# # for tcpdump
# $ export INTERFACE=eth1 # Defines interface to record packets
# $ export CHROME_UNDER_TESTING=./chrome # Define which chrome to run on tests
# $ export TEST="http://localhost:8080/loopback_test.html?auto-mode=true"
# $ record-test.sh ./record1.pcap "$TEST&duration=300&max-video-bitrate=1000"
# $ record-test.sh ./record2.pcap "$TEST&duration=300&max-video-bitrate=2000"
# $ record-test.sh ./record3.pcap "$TEST&duration=300&max-video-bitrate=3000"
# Indicate an error and exit with a nonzero status if any of the required
# environment variables is Null or Unset.
: ${INTERFACE:?"Need to set INTERFACE env variable"}
: ${CHROME_UNDER_TESTING:?"Need to set CHROME_UNDER_TESTING env variable"}
if [ ! -x "$CHROME_UNDER_TESTING" ]; then
echo "CHROME_UNDER_TESTING=$CHROME_UNDER_TESTING does not seem to exist."
exit 1
fi
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <test-url> <network-dump>"
exit 1
fi
TEST_URL=$1
OUTPUT_RECORDING=$2
sudo -nv > /dev/null 2>&1
if [ $? != 0 ]; then
echo "Run \"sudo -v\" to cache your credentials." \
"They are needed to run tcpdump."
exit
fi
echo "Recording $INTERFACE into ${OUTPUT_RECORDING}"
sudo -n tcpdump -i "$INTERFACE" -w - > "${OUTPUT_RECORDING}" &
TCPDUMP_PID=$!
echo "Starting ${CHROME_UNDER_TESTING} with ${TEST_URL}."
# Using real camera instead of --use-fake-device-for-media-stream as it
# does not produces images complex enough to reach 3mbps.
# Flag --use-fake-ui-for-media-stream automatically allows getUserMedia calls.
$CHROME_UNDER_TESTING --use-fake-ui-for-media-stream "${TEST_URL}"
kill ${TCPDUMP_PID}

View File

@ -0,0 +1,15 @@
#!/bin/sh
#
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
#
# This script is used to launch a simple http server for files in the same
# location as the script itself.
cd "`dirname \"$0\"`"
echo "Starting http server in port 8080."
exec python -m SimpleHTTPServer 8080

View File

@ -0,0 +1,94 @@
/**
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// StatTracker is a helper class to keep track of stats on a RTCPeerConnection
// object. It uses google visualization datatables to keep the recorded samples
// and simplify plugging them into graphs later.
//
// Usage example:
// var tracker = new StatTracker(pc, pollInterval);
// tracker.recordStat("EstimatedSendBitrate",
// "bweforvideo", "googAvailableSendBandwidth");
// ...
// tracker.stop();
// tracker.dataTable(); // returns the recorded values. In this case
// a table with 2 columns { Time, EstimatedSendBitrate } and a row for each
// sample taken until stop() was called.
//
function StatTracker(pc, pollInterval) {
pollInterval = pollInterval || 250;
var dataTable = new google.visualization.DataTable();
var timeColumnIndex = dataTable.addColumn('datetime', 'Time');
var recording = true;
// Set of sampling functions. Functions registered here are called
// once per getStats with the given report and a rowIndex for the
// sample period so they can extract and record the tracked variables.
var samplingFunctions = {};
// Accessor to the current recorded stats.
this.dataTable = function() { return dataTable; }
// recordStat(varName, recordName, statName) adds a samplingFunction that
// records namedItem(recordName).stat(statName) from RTCStatsReport for each
// sample into a column named varName in the dataTable.
this.recordStat = function (varName, recordName, statName) {
var columnIndex = dataTable.addColumn('number', varName);
samplingFunctions[varName] = function (report, rowIndex) {
var sample;
var record = report.namedItem(recordName);
if (record) sample = record.stat(statName);
dataTable.setCell(rowIndex, columnIndex, sample);
}
}
// Stops the polling of stats from the peer connection.
this.stop = function() {
recording = false;
}
// RTCPeerConnection.getStats is asynchronous. In order to avoid having
// too many pending getStats requests going, this code only queues the
// next getStats with setTimeout after the previous one returns, instead
// of using setInterval.
function poll() {
pc.getStats(function (report) {
if (!recording) return;
setTimeout(poll, pollInterval);
var result = report.result();
if (result.length < 1) return;
var rowIndex = dataTable.addRow();
dataTable.setCell(rowIndex, timeColumnIndex, result[0].timestamp);
for (var v in samplingFunctions)
samplingFunctions[v](report, rowIndex);
});
}
setTimeout(poll, pollInterval);
}
/**
* Utility method to perform a full join between data tables from StatTracker.
*/
function mergeDataTable(dataTable1, dataTable2) {
function allColumns(cols) {
var a = [];
for (var i = 1; i < cols; ++i) a.push(i);
return a;
}
return google.visualization.data.join(
dataTable1,
dataTable2,
'full',
[[0, 0]],
allColumns(dataTable1.getNumberOfColumns()),
allColumns(dataTable2.getNumberOfColumns()));
}