Reformat python files checked by pylint (part 1/2).

After recently changing .pylintrc (see [1]) we discovered that
the presubmit check always checks all the python files when just
one python file gets updated.

This CL moves all these files one step closer to what the linter
wants.

Autogenerated with:

# Added all the files under pylint control to ~/Desktop/to-reformat
cat ~/Desktop/to-reformat | xargs sed -i '1i\\'
git cl format --python --full

This is part 1 out of 2. The second part will fix function names and
will not be automated.

[1] - https://webrtc-review.googlesource.com/c/src/+/186664

No-Presubmit: True
Bug: webrtc:12114
Change-Id: Idfec4d759f209a2090440d0af2413a1ddc01b841
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/190980
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#32530}
This commit is contained in:
Mirko Bonadei
2020-10-30 10:13:45 +01:00
committed by Commit Bot
parent d3a3e9ef36
commit 8cc6695652
93 changed files with 9936 additions and 9285 deletions

View File

@ -18,7 +18,6 @@ import subprocess
import sys
import tempfile
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
# Chrome browsertests will throw away stderr; avoid that output gets lost.
@ -26,131 +25,154 @@ sys.stderr = sys.stdout
def _ParseArgs():
"""Registers the command-line options."""
usage = 'usage: %prog [options]'
parser = optparse.OptionParser(usage=usage)
"""Registers the command-line options."""
usage = 'usage: %prog [options]'
parser = optparse.OptionParser(usage=usage)
parser.add_option('--label', type='string', default='MY_TEST',
help=('Label of the test, used to identify different '
'tests. Default: %default'))
parser.add_option('--ref_video', type='string',
help='Reference video to compare with (YUV).')
parser.add_option('--test_video', type='string',
help=('Test video to be compared with the reference '
'video (YUV).'))
parser.add_option('--frame_analyzer', type='string',
help='Path to the frame analyzer executable.')
parser.add_option('--aligned_output_file', type='string',
help='Path for output aligned YUV or Y4M file.')
parser.add_option('--vmaf', type='string',
help='Path to VMAF executable.')
parser.add_option('--vmaf_model', type='string',
help='Path to VMAF model.')
parser.add_option('--vmaf_phone_model', action='store_true',
help='Whether to use phone model in VMAF.')
parser.add_option('--yuv_frame_width', type='int', default=640,
help='Width of the YUV file\'s frames. Default: %default')
parser.add_option('--yuv_frame_height', type='int', default=480,
help='Height of the YUV file\'s frames. Default: %default')
parser.add_option('--chartjson_result_file', type='str', default=None,
help='Where to store perf results in chartjson format.')
options, _ = parser.parse_args()
parser.add_option('--label',
type='string',
default='MY_TEST',
help=('Label of the test, used to identify different '
'tests. Default: %default'))
parser.add_option('--ref_video',
type='string',
help='Reference video to compare with (YUV).')
parser.add_option('--test_video',
type='string',
help=('Test video to be compared with the reference '
'video (YUV).'))
parser.add_option('--frame_analyzer',
type='string',
help='Path to the frame analyzer executable.')
parser.add_option('--aligned_output_file',
type='string',
help='Path for output aligned YUV or Y4M file.')
parser.add_option('--vmaf', type='string', help='Path to VMAF executable.')
parser.add_option('--vmaf_model',
type='string',
help='Path to VMAF model.')
parser.add_option('--vmaf_phone_model',
action='store_true',
help='Whether to use phone model in VMAF.')
parser.add_option(
'--yuv_frame_width',
type='int',
default=640,
help='Width of the YUV file\'s frames. Default: %default')
parser.add_option(
'--yuv_frame_height',
type='int',
default=480,
help='Height of the YUV file\'s frames. Default: %default')
parser.add_option('--chartjson_result_file',
type='str',
default=None,
help='Where to store perf results in chartjson format.')
options, _ = parser.parse_args()
if not options.ref_video:
parser.error('You must provide a path to the reference video!')
if not os.path.exists(options.ref_video):
parser.error('Cannot find the reference video at %s' % options.ref_video)
if not options.ref_video:
parser.error('You must provide a path to the reference video!')
if not os.path.exists(options.ref_video):
parser.error('Cannot find the reference video at %s' %
options.ref_video)
if not options.test_video:
parser.error('You must provide a path to the test video!')
if not os.path.exists(options.test_video):
parser.error('Cannot find the test video at %s' % options.test_video)
if not options.test_video:
parser.error('You must provide a path to the test video!')
if not os.path.exists(options.test_video):
parser.error('Cannot find the test video at %s' % options.test_video)
if not options.frame_analyzer:
parser.error('You must provide the path to the frame analyzer executable!')
if not os.path.exists(options.frame_analyzer):
parser.error('Cannot find frame analyzer executable at %s!' %
options.frame_analyzer)
if not options.frame_analyzer:
parser.error(
'You must provide the path to the frame analyzer executable!')
if not os.path.exists(options.frame_analyzer):
parser.error('Cannot find frame analyzer executable at %s!' %
options.frame_analyzer)
if options.vmaf and not options.vmaf_model:
parser.error('You must provide a path to a VMAF model to use VMAF.')
if options.vmaf and not options.vmaf_model:
parser.error('You must provide a path to a VMAF model to use VMAF.')
return options
return options
def _DevNull():
"""On Windows, sometimes the inherited stdin handle from the parent process
"""On Windows, sometimes the inherited stdin handle from the parent process
fails. Workaround this by passing null to stdin to the subprocesses commands.
This function can be used to create the null file handler.
"""
return open(os.devnull, 'r')
return open(os.devnull, 'r')
def _RunFrameAnalyzer(options, yuv_directory=None):
"""Run frame analyzer to compare the videos and print output."""
cmd = [
options.frame_analyzer,
'--label=%s' % options.label,
'--reference_file=%s' % options.ref_video,
'--test_file=%s' % options.test_video,
'--width=%d' % options.yuv_frame_width,
'--height=%d' % options.yuv_frame_height,
]
if options.chartjson_result_file:
cmd.append('--chartjson_result_file=%s' % options.chartjson_result_file)
if options.aligned_output_file:
cmd.append('--aligned_output_file=%s' % options.aligned_output_file)
if yuv_directory:
cmd.append('--yuv_directory=%s' % yuv_directory)
frame_analyzer = subprocess.Popen(cmd, stdin=_DevNull(),
stdout=sys.stdout, stderr=sys.stderr)
frame_analyzer.wait()
if frame_analyzer.returncode != 0:
print('Failed to run frame analyzer.')
return frame_analyzer.returncode
"""Run frame analyzer to compare the videos and print output."""
cmd = [
options.frame_analyzer,
'--label=%s' % options.label,
'--reference_file=%s' % options.ref_video,
'--test_file=%s' % options.test_video,
'--width=%d' % options.yuv_frame_width,
'--height=%d' % options.yuv_frame_height,
]
if options.chartjson_result_file:
cmd.append('--chartjson_result_file=%s' %
options.chartjson_result_file)
if options.aligned_output_file:
cmd.append('--aligned_output_file=%s' % options.aligned_output_file)
if yuv_directory:
cmd.append('--yuv_directory=%s' % yuv_directory)
frame_analyzer = subprocess.Popen(cmd,
stdin=_DevNull(),
stdout=sys.stdout,
stderr=sys.stderr)
frame_analyzer.wait()
if frame_analyzer.returncode != 0:
print('Failed to run frame analyzer.')
return frame_analyzer.returncode
def _RunVmaf(options, yuv_directory, logfile):
""" Run VMAF to compare videos and print output.
""" Run VMAF to compare videos and print output.
The yuv_directory is assumed to have been populated with a reference and test
video in .yuv format, with names according to the label.
"""
cmd = [
options.vmaf,
'yuv420p',
str(options.yuv_frame_width),
str(options.yuv_frame_height),
os.path.join(yuv_directory, "ref.yuv"),
os.path.join(yuv_directory, "test.yuv"),
options.vmaf_model,
'--log',
logfile,
'--log-fmt',
'json',
]
if options.vmaf_phone_model:
cmd.append('--phone-model')
cmd = [
options.vmaf,
'yuv420p',
str(options.yuv_frame_width),
str(options.yuv_frame_height),
os.path.join(yuv_directory, "ref.yuv"),
os.path.join(yuv_directory, "test.yuv"),
options.vmaf_model,
'--log',
logfile,
'--log-fmt',
'json',
]
if options.vmaf_phone_model:
cmd.append('--phone-model')
vmaf = subprocess.Popen(cmd, stdin=_DevNull(),
stdout=sys.stdout, stderr=sys.stderr)
vmaf.wait()
if vmaf.returncode != 0:
print('Failed to run VMAF.')
return 1
vmaf = subprocess.Popen(cmd,
stdin=_DevNull(),
stdout=sys.stdout,
stderr=sys.stderr)
vmaf.wait()
if vmaf.returncode != 0:
print('Failed to run VMAF.')
return 1
# Read per-frame scores from VMAF output and print.
with open(logfile) as f:
vmaf_data = json.load(f)
vmaf_scores = []
for frame in vmaf_data['frames']:
vmaf_scores.append(frame['metrics']['vmaf'])
print('RESULT VMAF: %s=' % options.label, vmaf_scores)
# Read per-frame scores from VMAF output and print.
with open(logfile) as f:
vmaf_data = json.load(f)
vmaf_scores = []
for frame in vmaf_data['frames']:
vmaf_scores.append(frame['metrics']['vmaf'])
print('RESULT VMAF: %s=' % options.label, vmaf_scores)
return 0
return 0
def main():
"""The main function.
"""The main function.
A simple invocation is:
./webrtc/rtc_tools/compare_videos.py
@ -161,27 +183,28 @@ def main():
Running vmaf requires the following arguments:
--vmaf, --vmaf_model, --yuv_frame_width, --yuv_frame_height
"""
options = _ParseArgs()
options = _ParseArgs()
if options.vmaf:
try:
# Directory to save temporary YUV files for VMAF in frame_analyzer.
yuv_directory = tempfile.mkdtemp()
_, vmaf_logfile = tempfile.mkstemp()
if options.vmaf:
try:
# Directory to save temporary YUV files for VMAF in frame_analyzer.
yuv_directory = tempfile.mkdtemp()
_, vmaf_logfile = tempfile.mkstemp()
# Run frame analyzer to compare the videos and print output.
if _RunFrameAnalyzer(options, yuv_directory=yuv_directory) != 0:
return 1
# Run frame analyzer to compare the videos and print output.
if _RunFrameAnalyzer(options, yuv_directory=yuv_directory) != 0:
return 1
# Run VMAF for further video comparison and print output.
return _RunVmaf(options, yuv_directory, vmaf_logfile)
finally:
shutil.rmtree(yuv_directory)
os.remove(vmaf_logfile)
else:
return _RunFrameAnalyzer(options)
# Run VMAF for further video comparison and print output.
return _RunVmaf(options, yuv_directory, vmaf_logfile)
finally:
shutil.rmtree(yuv_directory)
os.remove(vmaf_logfile)
else:
return _RunFrameAnalyzer(options)
return 0
return 0
if __name__ == '__main__':
sys.exit(main())
sys.exit(main())

View File

@ -39,52 +39,59 @@ MICROSECONDS_IN_SECOND = 1e6
def main():
parser = argparse.ArgumentParser(
description='Plots metrics exported from WebRTC perf tests')
parser.add_argument('-m', '--metrics', type=str, nargs='*',
help='Metrics to plot. If nothing specified then will plot all available')
args = parser.parse_args()
parser = argparse.ArgumentParser(
description='Plots metrics exported from WebRTC perf tests')
parser.add_argument(
'-m',
'--metrics',
type=str,
nargs='*',
help=
'Metrics to plot. If nothing specified then will plot all available')
args = parser.parse_args()
metrics_to_plot = set()
if args.metrics:
for metric in args.metrics:
metrics_to_plot.add(metric)
metrics_to_plot = set()
if args.metrics:
for metric in args.metrics:
metrics_to_plot.add(metric)
metrics = []
for line in fileinput.input('-'):
line = line.strip()
if line.startswith(LINE_PREFIX):
line = line.replace(LINE_PREFIX, '')
metrics.append(json.loads(line))
else:
print line
metrics = []
for line in fileinput.input('-'):
line = line.strip()
if line.startswith(LINE_PREFIX):
line = line.replace(LINE_PREFIX, '')
metrics.append(json.loads(line))
else:
print line
for metric in metrics:
if len(metrics_to_plot) > 0 and metric[GRAPH_NAME] not in metrics_to_plot:
continue
for metric in metrics:
if len(metrics_to_plot
) > 0 and metric[GRAPH_NAME] not in metrics_to_plot:
continue
figure = plt.figure()
figure.canvas.set_window_title(metric[TRACE_NAME])
figure = plt.figure()
figure.canvas.set_window_title(metric[TRACE_NAME])
x_values = []
y_values = []
start_x = None
samples = metric['samples']
samples.sort(key=lambda x: x['time'])
for sample in samples:
if start_x is None:
start_x = sample['time']
# Time is us, we want to show it in seconds.
x_values.append((sample['time'] - start_x) / MICROSECONDS_IN_SECOND)
y_values.append(sample['value'])
x_values = []
y_values = []
start_x = None
samples = metric['samples']
samples.sort(key=lambda x: x['time'])
for sample in samples:
if start_x is None:
start_x = sample['time']
# Time is us, we want to show it in seconds.
x_values.append(
(sample['time'] - start_x) / MICROSECONDS_IN_SECOND)
y_values.append(sample['value'])
plt.ylabel('%s (%s)' % (metric[GRAPH_NAME], metric[UNITS]))
plt.xlabel('time (s)')
plt.title(metric[GRAPH_NAME])
plt.plot(x_values, y_values)
plt.ylabel('%s (%s)' % (metric[GRAPH_NAME], metric[UNITS]))
plt.xlabel('time (s)')
plt.title(metric[GRAPH_NAME])
plt.plot(x_values, y_values)
plt.show()
plt.show()
if __name__ == '__main__':
main()
main()

View File

@ -10,21 +10,21 @@
import network_tester_config_pb2
def AddConfig(all_configs,
packet_send_interval_ms,
packet_size,
def AddConfig(all_configs, packet_send_interval_ms, packet_size,
execution_time_ms):
config = all_configs.configs.add()
config.packet_send_interval_ms = packet_send_interval_ms
config.packet_size = packet_size
config.execution_time_ms = execution_time_ms
config = all_configs.configs.add()
config.packet_send_interval_ms = packet_send_interval_ms
config.packet_size = packet_size
config.execution_time_ms = execution_time_ms
def main():
all_configs = network_tester_config_pb2.NetworkTesterAllConfigs()
AddConfig(all_configs, 10, 50, 200)
AddConfig(all_configs, 10, 100, 200)
with open("network_tester_config.dat", 'wb') as f:
f.write(all_configs.SerializeToString())
all_configs = network_tester_config_pb2.NetworkTesterAllConfigs()
AddConfig(all_configs, 10, 50, 200)
AddConfig(all_configs, 10, 100, 200)
with open("network_tester_config.dat", 'wb') as f:
f.write(all_configs.SerializeToString())
if __name__ == "__main__":
main()
main()

View File

@ -20,128 +20,131 @@ import matplotlib.pyplot as plt
import network_tester_packet_pb2
def GetSize(file_to_parse):
data = file_to_parse.read(1)
if data == '':
return 0
return struct.unpack('<b', data)[0]
data = file_to_parse.read(1)
if data == '':
return 0
return struct.unpack('<b', data)[0]
def ParsePacketLog(packet_log_file_to_parse):
packets = []
with open(packet_log_file_to_parse, 'rb') as file_to_parse:
while True:
size = GetSize(file_to_parse)
if size == 0:
break
try:
packet = network_tester_packet_pb2.NetworkTesterPacket()
packet.ParseFromString(file_to_parse.read(size))
packets.append(packet)
except IOError:
break
return packets
packets = []
with open(packet_log_file_to_parse, 'rb') as file_to_parse:
while True:
size = GetSize(file_to_parse)
if size == 0:
break
try:
packet = network_tester_packet_pb2.NetworkTesterPacket()
packet.ParseFromString(file_to_parse.read(size))
packets.append(packet)
except IOError:
break
return packets
def GetTimeAxis(packets):
first_arrival_time = packets[0].arrival_timestamp
return [(packet.arrival_timestamp - first_arrival_time) / 1000000.0
for packet in packets]
first_arrival_time = packets[0].arrival_timestamp
return [(packet.arrival_timestamp - first_arrival_time) / 1000000.0
for packet in packets]
def CreateSendTimeDiffPlot(packets, plot):
first_send_time_diff = (
packets[0].arrival_timestamp - packets[0].send_timestamp)
y = [(packet.arrival_timestamp - packet.send_timestamp) - first_send_time_diff
for packet in packets]
plot.grid(True)
plot.set_title("SendTime difference [us]")
plot.plot(GetTimeAxis(packets), y)
first_send_time_diff = (packets[0].arrival_timestamp -
packets[0].send_timestamp)
y = [(packet.arrival_timestamp - packet.send_timestamp) -
first_send_time_diff for packet in packets]
plot.grid(True)
plot.set_title("SendTime difference [us]")
plot.plot(GetTimeAxis(packets), y)
class MovingAverageBitrate(object):
def __init__(self):
self.packet_window = []
self.window_time = 1000000
self.bytes = 0
self.latest_packet_time = 0
self.send_interval = 0
def __init__(self):
self.packet_window = []
self.window_time = 1000000
self.bytes = 0
self.latest_packet_time = 0
self.send_interval = 0
def RemoveOldPackets(self):
for packet in self.packet_window:
if (self.latest_packet_time - packet.arrival_timestamp >
self.window_time):
self.bytes = self.bytes - packet.packet_size
self.packet_window.remove(packet)
def RemoveOldPackets(self):
for packet in self.packet_window:
if (self.latest_packet_time - packet.arrival_timestamp >
self.window_time):
self.bytes = self.bytes - packet.packet_size
self.packet_window.remove(packet)
def AddPacket(self, packet):
"""This functions returns bits / second"""
self.send_interval = packet.arrival_timestamp - self.latest_packet_time
self.latest_packet_time = packet.arrival_timestamp
self.RemoveOldPackets()
self.packet_window.append(packet)
self.bytes = self.bytes + packet.packet_size
return self.bytes * 8
def AddPacket(self, packet):
"""This functions returns bits / second"""
self.send_interval = packet.arrival_timestamp - self.latest_packet_time
self.latest_packet_time = packet.arrival_timestamp
self.RemoveOldPackets()
self.packet_window.append(packet)
self.bytes = self.bytes + packet.packet_size
return self.bytes * 8
def CreateReceiveBiratePlot(packets, plot):
bitrate = MovingAverageBitrate()
y = [bitrate.AddPacket(packet) for packet in packets]
plot.grid(True)
plot.set_title("Receive birate [bps]")
plot.plot(GetTimeAxis(packets), y)
bitrate = MovingAverageBitrate()
y = [bitrate.AddPacket(packet) for packet in packets]
plot.grid(True)
plot.set_title("Receive birate [bps]")
plot.plot(GetTimeAxis(packets), y)
def CreatePacketlossPlot(packets, plot):
packets_look_up = {}
first_sequence_number = packets[0].sequence_number
last_sequence_number = packets[-1].sequence_number
for packet in packets:
packets_look_up[packet.sequence_number] = packet
y = []
x = []
first_arrival_time = 0
last_arrival_time = 0
last_arrival_time_diff = 0
for sequence_number in range(first_sequence_number, last_sequence_number + 1):
if sequence_number in packets_look_up:
y.append(0)
if first_arrival_time == 0:
first_arrival_time = packets_look_up[sequence_number].arrival_timestamp
x_time = (packets_look_up[sequence_number].arrival_timestamp -
first_arrival_time)
if last_arrival_time != 0:
last_arrival_time_diff = x_time - last_arrival_time
last_arrival_time = x_time
x.append(x_time / 1000000.0)
else:
if last_arrival_time != 0 and last_arrival_time_diff != 0:
x.append((last_arrival_time + last_arrival_time_diff) / 1000000.0)
y.append(1)
plot.grid(True)
plot.set_title("Lost packets [0/1]")
plot.plot(x, y)
packets_look_up = {}
first_sequence_number = packets[0].sequence_number
last_sequence_number = packets[-1].sequence_number
for packet in packets:
packets_look_up[packet.sequence_number] = packet
y = []
x = []
first_arrival_time = 0
last_arrival_time = 0
last_arrival_time_diff = 0
for sequence_number in range(first_sequence_number,
last_sequence_number + 1):
if sequence_number in packets_look_up:
y.append(0)
if first_arrival_time == 0:
first_arrival_time = packets_look_up[
sequence_number].arrival_timestamp
x_time = (packets_look_up[sequence_number].arrival_timestamp -
first_arrival_time)
if last_arrival_time != 0:
last_arrival_time_diff = x_time - last_arrival_time
last_arrival_time = x_time
x.append(x_time / 1000000.0)
else:
if last_arrival_time != 0 and last_arrival_time_diff != 0:
x.append(
(last_arrival_time + last_arrival_time_diff) / 1000000.0)
y.append(1)
plot.grid(True)
plot.set_title("Lost packets [0/1]")
plot.plot(x, y)
def main():
parser = OptionParser()
parser.add_option("-f",
"--packet_log_file",
dest="packet_log_file",
help="packet_log file to parse")
parser = OptionParser()
parser.add_option("-f",
"--packet_log_file",
dest="packet_log_file",
help="packet_log file to parse")
options = parser.parse_args()[0]
options = parser.parse_args()[0]
packets = ParsePacketLog(options.packet_log_file)
f, plots = plt.subplots(3, sharex=True)
plt.xlabel('time [sec]')
CreateSendTimeDiffPlot(packets, plots[0])
CreateReceiveBiratePlot(packets, plots[1])
CreatePacketlossPlot(packets, plots[2])
f.subplots_adjust(hspace=0.3)
plt.show()
packets = ParsePacketLog(options.packet_log_file)
f, plots = plt.subplots(3, sharex=True)
plt.xlabel('time [sec]')
CreateSendTimeDiffPlot(packets, plots[0])
CreateReceiveBiratePlot(packets, plots[1])
CreatePacketlossPlot(packets, plots[2])
f.subplots_adjust(hspace=0.3)
plt.show()
if __name__ == "__main__":
main()
main()

View File

@ -5,7 +5,6 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Utility functions for calculating statistics.
"""
@ -15,18 +14,17 @@ import sys
def CountReordered(sequence_numbers):
"""Returns number of reordered indices.
"""Returns number of reordered indices.
A reordered index is an index `i` for which sequence_numbers[i] >=
sequence_numbers[i + 1]
"""
return sum(1 for (s1, s2) in zip(sequence_numbers,
sequence_numbers[1:]) if
s1 >= s2)
return sum(1 for (s1, s2) in zip(sequence_numbers, sequence_numbers[1:])
if s1 >= s2)
def SsrcNormalizedSizeTable(data_points):
"""Counts proportion of data for every SSRC.
"""Counts proportion of data for every SSRC.
Args:
data_points: list of pb_parse.DataPoint
@ -37,14 +35,14 @@ def SsrcNormalizedSizeTable(data_points):
SSRC `s` to the total size of all packets.
"""
mapping = collections.defaultdict(int)
for point in data_points:
mapping[point.ssrc] += point.size
return NormalizeCounter(mapping)
mapping = collections.defaultdict(int)
for point in data_points:
mapping[point.ssrc] += point.size
return NormalizeCounter(mapping)
def NormalizeCounter(counter):
"""Returns a normalized version of the dictionary `counter`.
"""Returns a normalized version of the dictionary `counter`.
Does not modify `counter`.
@ -52,12 +50,12 @@ def NormalizeCounter(counter):
A new dictionary, in which every value in `counter`
has been divided by the total to sum up to 1.
"""
total = sum(counter.values())
return {key: counter[key] / total for key in counter}
total = sum(counter.values())
return {key: counter[key] / total for key in counter}
def Unwrap(data, mod):
"""Returns `data` unwrapped modulo `mod`. Does not modify data.
"""Returns `data` unwrapped modulo `mod`. Does not modify data.
Adds integer multiples of mod to all elements of data except the
first, such that all pairs of consecutive elements (a, b) satisfy
@ -66,22 +64,22 @@ def Unwrap(data, mod):
E.g. Unwrap([0, 1, 2, 0, 1, 2, 7, 8], 3) -> [0, 1, 2, 3,
4, 5, 4, 5]
"""
lst = data[:]
for i in range(1, len(data)):
lst[i] = lst[i - 1] + (lst[i] - lst[i - 1] +
mod // 2) % mod - (mod // 2)
return lst
lst = data[:]
for i in range(1, len(data)):
lst[i] = lst[i - 1] + (lst[i] - lst[i - 1] + mod // 2) % mod - (mod //
2)
return lst
def SsrcDirections(data_points):
ssrc_is_incoming = {}
for point in data_points:
ssrc_is_incoming[point.ssrc] = point.incoming
return ssrc_is_incoming
ssrc_is_incoming = {}
for point in data_points:
ssrc_is_incoming[point.ssrc] = point.incoming
return ssrc_is_incoming
# Python 2/3-compatible input function
if sys.version_info[0] <= 2:
get_input = raw_input # pylint: disable=invalid-name
get_input = raw_input # pylint: disable=invalid-name
else:
get_input = input # pylint: disable=invalid-name
get_input = input # pylint: disable=invalid-name

View File

@ -6,7 +6,6 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Run the tests with
python misc_test.py
@ -22,51 +21,52 @@ import misc
class TestMisc(unittest.TestCase):
def testUnwrapMod3(self):
data = [0, 1, 2, 0, -1, -2, -3, -4]
unwrapped_3 = misc.Unwrap(data, 3)
self.assertEqual([0, 1, 2, 3, 2, 1, 0, -1], unwrapped_3)
def testUnwrapMod3(self):
data = [0, 1, 2, 0, -1, -2, -3, -4]
unwrapped_3 = misc.Unwrap(data, 3)
self.assertEqual([0, 1, 2, 3, 2, 1, 0, -1], unwrapped_3)
def testUnwrapMod4(self):
data = [0, 1, 2, 0, -1, -2, -3, -4]
unwrapped_4 = misc.Unwrap(data, 4)
self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], unwrapped_4)
def testUnwrapMod4(self):
data = [0, 1, 2, 0, -1, -2, -3, -4]
unwrapped_4 = misc.Unwrap(data, 4)
self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], unwrapped_4)
def testDataShouldNotChangeAfterUnwrap(self):
data = [0, 1, 2, 0, -1, -2, -3, -4]
_ = misc.Unwrap(data, 4)
def testDataShouldNotChangeAfterUnwrap(self):
data = [0, 1, 2, 0, -1, -2, -3, -4]
_ = misc.Unwrap(data, 4)
self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], data)
self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], data)
def testRandomlyMultiplesOfModAdded(self):
# `unwrap` definition says only multiples of mod are added.
random_data = [random.randint(0, 9) for _ in range(100)]
def testRandomlyMultiplesOfModAdded(self):
# `unwrap` definition says only multiples of mod are added.
random_data = [random.randint(0, 9) for _ in range(100)]
for mod in range(1, 100):
random_data_unwrapped_mod = misc.Unwrap(random_data, mod)
for mod in range(1, 100):
random_data_unwrapped_mod = misc.Unwrap(random_data, mod)
for (old_a, a) in zip(random_data, random_data_unwrapped_mod):
self.assertEqual((old_a - a) % mod, 0)
for (old_a, a) in zip(random_data, random_data_unwrapped_mod):
self.assertEqual((old_a - a) % mod, 0)
def testRandomlyAgainstInequalityDefinition(self):
# Data has to satisfy -mod/2 <= difference < mod/2 for every
# difference between consecutive values after unwrap.
random_data = [random.randint(0, 9) for _ in range(100)]
def testRandomlyAgainstInequalityDefinition(self):
# Data has to satisfy -mod/2 <= difference < mod/2 for every
# difference between consecutive values after unwrap.
random_data = [random.randint(0, 9) for _ in range(100)]
for mod in range(1, 100):
random_data_unwrapped_mod = misc.Unwrap(random_data, mod)
for mod in range(1, 100):
random_data_unwrapped_mod = misc.Unwrap(random_data, mod)
for (a, b) in zip(random_data_unwrapped_mod,
random_data_unwrapped_mod[1:]):
self.assertTrue(-mod / 2 <= b - a < mod / 2)
for (a, b) in zip(random_data_unwrapped_mod,
random_data_unwrapped_mod[1:]):
self.assertTrue(-mod / 2 <= b - a < mod / 2)
def testRandomlyDataShouldNotChangeAfterUnwrap(self):
random_data = [random.randint(0, 9) for _ in range(100)]
random_data_copy = random_data[:]
for mod in range(1, 100):
_ = misc.Unwrap(random_data, mod)
def testRandomlyDataShouldNotChangeAfterUnwrap(self):
random_data = [random.randint(0, 9) for _ in range(100)]
random_data_copy = random_data[:]
for mod in range(1, 100):
_ = misc.Unwrap(random_data, mod)
self.assertEqual(random_data, random_data_copy)
self.assertEqual(random_data, random_data_copy)
if __name__ == "__main__":
unittest.main()
unittest.main()

View File

@ -5,7 +5,6 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Parses protobuf RTC dumps."""
from __future__ import division
@ -14,26 +13,26 @@ import pyproto.logging.rtc_event_log.rtc_event_log_pb2 as rtc_pb
class DataPoint(object):
"""Simple container class for RTP events."""
"""Simple container class for RTP events."""
def __init__(self, rtp_header_str, packet_size,
arrival_timestamp_us, incoming):
"""Builds a data point by parsing an RTP header, size and arrival time.
def __init__(self, rtp_header_str, packet_size, arrival_timestamp_us,
incoming):
"""Builds a data point by parsing an RTP header, size and arrival time.
RTP header structure is defined in RFC 3550 section 5.1.
"""
self.size = packet_size
self.arrival_timestamp_ms = arrival_timestamp_us / 1000
self.incoming = incoming
header = struct.unpack_from("!HHII", rtp_header_str, 0)
(first2header_bytes, self.sequence_number, self.timestamp,
self.ssrc) = header
self.payload_type = first2header_bytes & 0b01111111
self.marker_bit = (first2header_bytes & 0b10000000) >> 7
self.size = packet_size
self.arrival_timestamp_ms = arrival_timestamp_us / 1000
self.incoming = incoming
header = struct.unpack_from("!HHII", rtp_header_str, 0)
(first2header_bytes, self.sequence_number, self.timestamp,
self.ssrc) = header
self.payload_type = first2header_bytes & 0b01111111
self.marker_bit = (first2header_bytes & 0b10000000) >> 7
def ParseProtobuf(file_path):
"""Parses RTC event log from protobuf file.
"""Parses RTC event log from protobuf file.
Args:
file_path: path to protobuf file of RTC event stream
@ -41,12 +40,12 @@ def ParseProtobuf(file_path):
Returns:
all RTP packet events from the event stream as a list of DataPoints
"""
event_stream = rtc_pb.EventStream()
with open(file_path, "rb") as f:
event_stream.ParseFromString(f.read())
event_stream = rtc_pb.EventStream()
with open(file_path, "rb") as f:
event_stream.ParseFromString(f.read())
return [DataPoint(event.rtp_packet.header,
event.rtp_packet.packet_length,
event.timestamp_us, event.rtp_packet.incoming)
for event in event_stream.stream
if event.HasField("rtp_packet")]
return [
DataPoint(event.rtp_packet.header, event.rtp_packet.packet_length,
event.timestamp_us, event.rtp_packet.incoming)
for event in event_stream.stream if event.HasField("rtp_packet")
]

View File

@ -5,7 +5,6 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Displays statistics and plots graphs from RTC protobuf dump."""
from __future__ import division
@ -24,13 +23,13 @@ import pb_parse
class RTPStatistics(object):
"""Has methods for calculating and plotting RTP stream statistics."""
"""Has methods for calculating and plotting RTP stream statistics."""
BANDWIDTH_SMOOTHING_WINDOW_SIZE = 10
PLOT_RESOLUTION_MS = 50
BANDWIDTH_SMOOTHING_WINDOW_SIZE = 10
PLOT_RESOLUTION_MS = 50
def __init__(self, data_points):
"""Initializes object with data_points and computes simple statistics.
def __init__(self, data_points):
"""Initializes object with data_points and computes simple statistics.
Computes percentages of number of packets and packet sizes by
SSRC.
@ -41,238 +40,245 @@ class RTPStatistics(object):
"""
self.data_points = data_points
self.ssrc_frequencies = misc.NormalizeCounter(
collections.Counter([pt.ssrc for pt in self.data_points]))
self.ssrc_size_table = misc.SsrcNormalizedSizeTable(self.data_points)
self.bandwidth_kbps = None
self.smooth_bw_kbps = None
self.data_points = data_points
self.ssrc_frequencies = misc.NormalizeCounter(
collections.Counter([pt.ssrc for pt in self.data_points]))
self.ssrc_size_table = misc.SsrcNormalizedSizeTable(self.data_points)
self.bandwidth_kbps = None
self.smooth_bw_kbps = None
def PrintHeaderStatistics(self):
print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format(
"SeqNo", "TimeStamp", "SendTime", "Size", "PT", "M", "SSRC"))
for point in self.data_points:
print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format(
point.sequence_number, point.timestamp,
int(point.arrival_timestamp_ms), point.size, point.payload_type,
point.marker_bit, "0x{:x}".format(point.ssrc)))
def PrintHeaderStatistics(self):
print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format(
"SeqNo", "TimeStamp", "SendTime", "Size", "PT", "M", "SSRC"))
for point in self.data_points:
print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format(
point.sequence_number, point.timestamp,
int(point.arrival_timestamp_ms), point.size,
point.payload_type, point.marker_bit,
"0x{:x}".format(point.ssrc)))
def PrintSsrcInfo(self, ssrc_id, ssrc):
"""Prints packet and size statistics for a given SSRC.
def PrintSsrcInfo(self, ssrc_id, ssrc):
"""Prints packet and size statistics for a given SSRC.
Args:
ssrc_id: textual identifier of SSRC printed beside statistics for it.
ssrc: SSRC by which to filter data and display statistics
"""
filtered_ssrc = [point for point in self.data_points if point.ssrc
== ssrc]
payloads = misc.NormalizeCounter(
collections.Counter([point.payload_type for point in
filtered_ssrc]))
filtered_ssrc = [
point for point in self.data_points if point.ssrc == ssrc
]
payloads = misc.NormalizeCounter(
collections.Counter(
[point.payload_type for point in filtered_ssrc]))
payload_info = "payload type(s): {}".format(
", ".join(str(payload) for payload in payloads))
print("{} 0x{:x} {}, {:.2f}% packets, {:.2f}% data".format(
ssrc_id, ssrc, payload_info, self.ssrc_frequencies[ssrc] * 100,
self.ssrc_size_table[ssrc] * 100))
print(" packet sizes:")
(bin_counts, bin_bounds) = numpy.histogram([point.size for point in
filtered_ssrc], bins=5,
density=False)
bin_proportions = bin_counts / sum(bin_counts)
print("\n".join([
" {:.1f} - {:.1f}: {:.2f}%".format(bin_bounds[i], bin_bounds[i + 1],
bin_proportions[i] * 100)
for i in range(len(bin_proportions))
]))
payload_info = "payload type(s): {}".format(", ".join(
str(payload) for payload in payloads))
print("{} 0x{:x} {}, {:.2f}% packets, {:.2f}% data".format(
ssrc_id, ssrc, payload_info, self.ssrc_frequencies[ssrc] * 100,
self.ssrc_size_table[ssrc] * 100))
print(" packet sizes:")
(bin_counts,
bin_bounds) = numpy.histogram([point.size for point in filtered_ssrc],
bins=5,
density=False)
bin_proportions = bin_counts / sum(bin_counts)
print("\n".join([
" {:.1f} - {:.1f}: {:.2f}%".format(bin_bounds[i],
bin_bounds[i + 1],
bin_proportions[i] * 100)
for i in range(len(bin_proportions))
]))
def ChooseSsrc(self):
"""Queries user for SSRC."""
def ChooseSsrc(self):
"""Queries user for SSRC."""
if len(self.ssrc_frequencies) == 1:
chosen_ssrc = self.ssrc_frequencies.keys()[0]
self.PrintSsrcInfo("", chosen_ssrc)
return chosen_ssrc
if len(self.ssrc_frequencies) == 1:
chosen_ssrc = self.ssrc_frequencies.keys()[0]
self.PrintSsrcInfo("", chosen_ssrc)
return chosen_ssrc
ssrc_is_incoming = misc.SsrcDirections(self.data_points)
incoming = [ssrc for ssrc in ssrc_is_incoming if ssrc_is_incoming[ssrc]]
outgoing = [ssrc for ssrc in ssrc_is_incoming if not ssrc_is_incoming[ssrc]]
ssrc_is_incoming = misc.SsrcDirections(self.data_points)
incoming = [
ssrc for ssrc in ssrc_is_incoming if ssrc_is_incoming[ssrc]
]
outgoing = [
ssrc for ssrc in ssrc_is_incoming if not ssrc_is_incoming[ssrc]
]
print("\nIncoming:\n")
for (i, ssrc) in enumerate(incoming):
self.PrintSsrcInfo(i, ssrc)
print("\nIncoming:\n")
for (i, ssrc) in enumerate(incoming):
self.PrintSsrcInfo(i, ssrc)
print("\nOutgoing:\n")
for (i, ssrc) in enumerate(outgoing):
self.PrintSsrcInfo(i + len(incoming), ssrc)
print("\nOutgoing:\n")
for (i, ssrc) in enumerate(outgoing):
self.PrintSsrcInfo(i + len(incoming), ssrc)
while True:
chosen_index = int(misc.get_input("choose one> "))
if 0 <= chosen_index < len(self.ssrc_frequencies):
return (incoming + outgoing)[chosen_index]
else:
print("Invalid index!")
while True:
chosen_index = int(misc.get_input("choose one> "))
if 0 <= chosen_index < len(self.ssrc_frequencies):
return (incoming + outgoing)[chosen_index]
else:
print("Invalid index!")
def FilterSsrc(self, chosen_ssrc):
"""Filters and wraps data points.
def FilterSsrc(self, chosen_ssrc):
"""Filters and wraps data points.
Removes data points with `ssrc != chosen_ssrc`. Unwraps sequence
numbers and timestamps for the chosen selection.
"""
self.data_points = [point for point in self.data_points if
point.ssrc == chosen_ssrc]
unwrapped_sequence_numbers = misc.Unwrap(
[point.sequence_number for point in self.data_points], 2**16 - 1)
for (data_point, sequence_number) in zip(self.data_points,
unwrapped_sequence_numbers):
data_point.sequence_number = sequence_number
self.data_points = [
point for point in self.data_points if point.ssrc == chosen_ssrc
]
unwrapped_sequence_numbers = misc.Unwrap(
[point.sequence_number for point in self.data_points], 2**16 - 1)
for (data_point, sequence_number) in zip(self.data_points,
unwrapped_sequence_numbers):
data_point.sequence_number = sequence_number
unwrapped_timestamps = misc.Unwrap([point.timestamp for point in
self.data_points], 2**32 - 1)
unwrapped_timestamps = misc.Unwrap(
[point.timestamp for point in self.data_points], 2**32 - 1)
for (data_point, timestamp) in zip(self.data_points,
unwrapped_timestamps):
data_point.timestamp = timestamp
for (data_point, timestamp) in zip(self.data_points,
unwrapped_timestamps):
data_point.timestamp = timestamp
def PrintSequenceNumberStatistics(self):
seq_no_set = set(point.sequence_number for point in
self.data_points)
missing_sequence_numbers = max(seq_no_set) - min(seq_no_set) + (
1 - len(seq_no_set))
print("Missing sequence numbers: {} out of {} ({:.2f}%)".format(
missing_sequence_numbers,
len(seq_no_set),
100 * missing_sequence_numbers / len(seq_no_set)
))
print("Duplicated packets: {}".format(len(self.data_points) -
len(seq_no_set)))
print("Reordered packets: {}".format(
misc.CountReordered([point.sequence_number for point in
self.data_points])))
def PrintSequenceNumberStatistics(self):
seq_no_set = set(point.sequence_number for point in self.data_points)
missing_sequence_numbers = max(seq_no_set) - min(seq_no_set) + (
1 - len(seq_no_set))
print("Missing sequence numbers: {} out of {} ({:.2f}%)".format(
missing_sequence_numbers, len(seq_no_set),
100 * missing_sequence_numbers / len(seq_no_set)))
print("Duplicated packets: {}".format(
len(self.data_points) - len(seq_no_set)))
print("Reordered packets: {}".format(
misc.CountReordered(
[point.sequence_number for point in self.data_points])))
def EstimateFrequency(self, always_query_sample_rate):
"""Estimates frequency and updates data.
def EstimateFrequency(self, always_query_sample_rate):
"""Estimates frequency and updates data.
Guesses the most probable frequency by looking at changes in
timestamps (RFC 3550 section 5.1), calculates clock drifts and
sending time of packets. Updates `self.data_points` with changes
in delay and send time.
"""
delta_timestamp = (self.data_points[-1].timestamp -
self.data_points[0].timestamp)
delta_arr_timestamp = float((self.data_points[-1].arrival_timestamp_ms -
self.data_points[0].arrival_timestamp_ms))
freq_est = delta_timestamp / delta_arr_timestamp
delta_timestamp = (self.data_points[-1].timestamp -
self.data_points[0].timestamp)
delta_arr_timestamp = float(
(self.data_points[-1].arrival_timestamp_ms -
self.data_points[0].arrival_timestamp_ms))
freq_est = delta_timestamp / delta_arr_timestamp
freq_vec = [8, 16, 32, 48, 90]
freq = None
for f in freq_vec:
if abs((freq_est - f) / f) < 0.05:
freq = f
freq_vec = [8, 16, 32, 48, 90]
freq = None
for f in freq_vec:
if abs((freq_est - f) / f) < 0.05:
freq = f
print("Estimated frequency: {:.3f}kHz".format(freq_est))
if freq is None or always_query_sample_rate:
if not always_query_sample_rate:
print ("Frequency could not be guessed.", end=" ")
freq = int(misc.get_input("Input frequency (in kHz)> "))
else:
print("Guessed frequency: {}kHz".format(freq))
print("Estimated frequency: {:.3f}kHz".format(freq_est))
if freq is None or always_query_sample_rate:
if not always_query_sample_rate:
print("Frequency could not be guessed.", end=" ")
freq = int(misc.get_input("Input frequency (in kHz)> "))
else:
print("Guessed frequency: {}kHz".format(freq))
for point in self.data_points:
point.real_send_time_ms = (point.timestamp -
self.data_points[0].timestamp) / freq
point.delay = point.arrival_timestamp_ms - point.real_send_time_ms
for point in self.data_points:
point.real_send_time_ms = (point.timestamp -
self.data_points[0].timestamp) / freq
point.delay = point.arrival_timestamp_ms - point.real_send_time_ms
def PrintDurationStatistics(self):
"""Prints delay, clock drift and bitrate statistics."""
def PrintDurationStatistics(self):
"""Prints delay, clock drift and bitrate statistics."""
min_delay = min(point.delay for point in self.data_points)
min_delay = min(point.delay for point in self.data_points)
for point in self.data_points:
point.absdelay = point.delay - min_delay
for point in self.data_points:
point.absdelay = point.delay - min_delay
stream_duration_sender = self.data_points[-1].real_send_time_ms / 1000
print("Stream duration at sender: {:.1f} seconds".format(
stream_duration_sender
))
stream_duration_sender = self.data_points[-1].real_send_time_ms / 1000
print("Stream duration at sender: {:.1f} seconds".format(
stream_duration_sender))
arrival_timestamps_ms = [point.arrival_timestamp_ms for point in
self.data_points]
stream_duration_receiver = (max(arrival_timestamps_ms) -
min(arrival_timestamps_ms)) / 1000
print("Stream duration at receiver: {:.1f} seconds".format(
stream_duration_receiver
))
arrival_timestamps_ms = [
point.arrival_timestamp_ms for point in self.data_points
]
stream_duration_receiver = (max(arrival_timestamps_ms) -
min(arrival_timestamps_ms)) / 1000
print("Stream duration at receiver: {:.1f} seconds".format(
stream_duration_receiver))
print("Clock drift: {:.2f}%".format(
100 * (stream_duration_receiver / stream_duration_sender - 1)
))
print("Clock drift: {:.2f}%".format(
100 * (stream_duration_receiver / stream_duration_sender - 1)))
total_size = sum(point.size for point in self.data_points) * 8 / 1000
print("Send average bitrate: {:.2f} kbps".format(
total_size / stream_duration_sender))
total_size = sum(point.size for point in self.data_points) * 8 / 1000
print("Send average bitrate: {:.2f} kbps".format(
total_size / stream_duration_sender))
print("Receive average bitrate: {:.2f} kbps".format(
total_size / stream_duration_receiver))
print("Receive average bitrate: {:.2f} kbps".format(
total_size / stream_duration_receiver))
def RemoveReordered(self):
last = self.data_points[0]
data_points_ordered = [last]
for point in self.data_points[1:]:
if point.sequence_number > last.sequence_number and (
point.real_send_time_ms > last.real_send_time_ms):
data_points_ordered.append(point)
last = point
self.data_points = data_points_ordered
def RemoveReordered(self):
last = self.data_points[0]
data_points_ordered = [last]
for point in self.data_points[1:]:
if point.sequence_number > last.sequence_number and (
point.real_send_time_ms > last.real_send_time_ms):
data_points_ordered.append(point)
last = point
self.data_points = data_points_ordered
def ComputeBandwidth(self):
"""Computes bandwidth averaged over several consecutive packets.
def ComputeBandwidth(self):
"""Computes bandwidth averaged over several consecutive packets.
The number of consecutive packets used in the average is
BANDWIDTH_SMOOTHING_WINDOW_SIZE. Averaging is done with
numpy.correlate.
"""
start_ms = self.data_points[0].real_send_time_ms
stop_ms = self.data_points[-1].real_send_time_ms
(self.bandwidth_kbps, _) = numpy.histogram(
[point.real_send_time_ms for point in self.data_points],
bins=numpy.arange(start_ms, stop_ms,
RTPStatistics.PLOT_RESOLUTION_MS),
weights=[point.size * 8 / RTPStatistics.PLOT_RESOLUTION_MS
for point in self.data_points]
)
correlate_filter = (numpy.ones(
RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) /
RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE)
self.smooth_bw_kbps = numpy.correlate(self.bandwidth_kbps, correlate_filter)
start_ms = self.data_points[0].real_send_time_ms
stop_ms = self.data_points[-1].real_send_time_ms
(self.bandwidth_kbps, _) = numpy.histogram(
[point.real_send_time_ms for point in self.data_points],
bins=numpy.arange(start_ms, stop_ms,
RTPStatistics.PLOT_RESOLUTION_MS),
weights=[
point.size * 8 / RTPStatistics.PLOT_RESOLUTION_MS
for point in self.data_points
])
correlate_filter = (
numpy.ones(RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) /
RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE)
self.smooth_bw_kbps = numpy.correlate(self.bandwidth_kbps,
correlate_filter)
def PlotStatistics(self):
"""Plots changes in delay and average bandwidth."""
def PlotStatistics(self):
"""Plots changes in delay and average bandwidth."""
start_ms = self.data_points[0].real_send_time_ms
stop_ms = self.data_points[-1].real_send_time_ms
time_axis = numpy.arange(start_ms / 1000, stop_ms / 1000,
RTPStatistics.PLOT_RESOLUTION_MS / 1000)
start_ms = self.data_points[0].real_send_time_ms
stop_ms = self.data_points[-1].real_send_time_ms
time_axis = numpy.arange(start_ms / 1000, stop_ms / 1000,
RTPStatistics.PLOT_RESOLUTION_MS / 1000)
delay = CalculateDelay(start_ms, stop_ms,
RTPStatistics.PLOT_RESOLUTION_MS,
self.data_points)
delay = CalculateDelay(start_ms, stop_ms,
RTPStatistics.PLOT_RESOLUTION_MS,
self.data_points)
plt.figure(1)
plt.plot(time_axis, delay[:len(time_axis)])
plt.xlabel("Send time [s]")
plt.ylabel("Relative transport delay [ms]")
plt.figure(1)
plt.plot(time_axis, delay[:len(time_axis)])
plt.xlabel("Send time [s]")
plt.ylabel("Relative transport delay [ms]")
plt.figure(2)
plt.plot(time_axis[:len(self.smooth_bw_kbps)], self.smooth_bw_kbps)
plt.xlabel("Send time [s]")
plt.ylabel("Bandwidth [kbps]")
plt.figure(2)
plt.plot(time_axis[:len(self.smooth_bw_kbps)], self.smooth_bw_kbps)
plt.xlabel("Send time [s]")
plt.ylabel("Bandwidth [kbps]")
plt.show()
plt.show()
def CalculateDelay(start, stop, step, points):
"""Quantizes the time coordinates for the delay.
"""Quantizes the time coordinates for the delay.
Quantizes points by rounding the timestamps downwards to the nearest
point in the time sequence start, start+step, start+2*step... Takes
@ -280,61 +286,67 @@ def CalculateDelay(start, stop, step, points):
masked array, in which time points with no value are masked.
"""
grouped_delays = [[] for _ in numpy.arange(start, stop + step, step)]
rounded_value_index = lambda x: int((x - start) / step)
for point in points:
grouped_delays[rounded_value_index(point.real_send_time_ms)
].append(point.absdelay)
regularized_delays = [numpy.average(arr) if arr else -1 for arr in
grouped_delays]
return numpy.ma.masked_values(regularized_delays, -1)
grouped_delays = [[] for _ in numpy.arange(start, stop + step, step)]
rounded_value_index = lambda x: int((x - start) / step)
for point in points:
grouped_delays[rounded_value_index(point.real_send_time_ms)].append(
point.absdelay)
regularized_delays = [
numpy.average(arr) if arr else -1 for arr in grouped_delays
]
return numpy.ma.masked_values(regularized_delays, -1)
def main():
usage = "Usage: %prog [options] <filename of rtc event log>"
parser = optparse.OptionParser(usage=usage)
parser.add_option("--dump_header_to_stdout",
default=False, action="store_true",
help="print header info to stdout; similar to rtp_analyze")
parser.add_option("--query_sample_rate",
default=False, action="store_true",
help="always query user for real sample rate")
usage = "Usage: %prog [options] <filename of rtc event log>"
parser = optparse.OptionParser(usage=usage)
parser.add_option(
"--dump_header_to_stdout",
default=False,
action="store_true",
help="print header info to stdout; similar to rtp_analyze")
parser.add_option("--query_sample_rate",
default=False,
action="store_true",
help="always query user for real sample rate")
parser.add_option("--working_directory",
default=None, action="store",
help="directory in which to search for relative paths")
parser.add_option("--working_directory",
default=None,
action="store",
help="directory in which to search for relative paths")
(options, args) = parser.parse_args()
(options, args) = parser.parse_args()
if len(args) < 1:
parser.print_help()
sys.exit(0)
if len(args) < 1:
parser.print_help()
sys.exit(0)
input_file = args[0]
input_file = args[0]
if options.working_directory and not os.path.isabs(input_file):
input_file = os.path.join(options.working_directory, input_file)
if options.working_directory and not os.path.isabs(input_file):
input_file = os.path.join(options.working_directory, input_file)
data_points = pb_parse.ParseProtobuf(input_file)
rtp_stats = RTPStatistics(data_points)
data_points = pb_parse.ParseProtobuf(input_file)
rtp_stats = RTPStatistics(data_points)
if options.dump_header_to_stdout:
print("Printing header info to stdout.", file=sys.stderr)
rtp_stats.PrintHeaderStatistics()
sys.exit(0)
if options.dump_header_to_stdout:
print("Printing header info to stdout.", file=sys.stderr)
rtp_stats.PrintHeaderStatistics()
sys.exit(0)
chosen_ssrc = rtp_stats.ChooseSsrc()
print("Chosen SSRC: 0X{:X}".format(chosen_ssrc))
chosen_ssrc = rtp_stats.ChooseSsrc()
print("Chosen SSRC: 0X{:X}".format(chosen_ssrc))
rtp_stats.FilterSsrc(chosen_ssrc)
rtp_stats.FilterSsrc(chosen_ssrc)
print("Statistics:")
rtp_stats.PrintSequenceNumberStatistics()
rtp_stats.EstimateFrequency(options.query_sample_rate)
rtp_stats.PrintDurationStatistics()
rtp_stats.RemoveReordered()
rtp_stats.ComputeBandwidth()
rtp_stats.PlotStatistics()
print("Statistics:")
rtp_stats.PrintSequenceNumberStatistics()
rtp_stats.EstimateFrequency(options.query_sample_rate)
rtp_stats.PrintDurationStatistics()
rtp_stats.RemoveReordered()
rtp_stats.ComputeBandwidth()
rtp_stats.PlotStatistics()
if __name__ == "__main__":
main()
main()

View File

@ -6,7 +6,6 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Run the tests with
python rtp_analyzer_test.py
@ -19,43 +18,43 @@ import unittest
MISSING_NUMPY = False # pylint: disable=invalid-name
try:
import numpy
import rtp_analyzer
import numpy
import rtp_analyzer
except ImportError:
MISSING_NUMPY = True
MISSING_NUMPY = True
FakePoint = collections.namedtuple("FakePoint",
["real_send_time_ms", "absdelay"])
class TestDelay(unittest.TestCase):
def AssertMaskEqual(self, masked_array, data, mask):
self.assertEqual(list(masked_array.data), data)
def AssertMaskEqual(self, masked_array, data, mask):
self.assertEqual(list(masked_array.data), data)
if isinstance(masked_array.mask, numpy.bool_):
array_mask = masked_array.mask
else:
array_mask = list(masked_array.mask)
self.assertEqual(array_mask, mask)
if isinstance(masked_array.mask, numpy.bool_):
array_mask = masked_array.mask
else:
array_mask = list(masked_array.mask)
self.assertEqual(array_mask, mask)
def testCalculateDelaySimple(self):
points = [FakePoint(0, 0), FakePoint(1, 0)]
mask = rtp_analyzer.CalculateDelay(0, 1, 1, points)
self.AssertMaskEqual(mask, [0, 0], False)
def testCalculateDelaySimple(self):
points = [FakePoint(0, 0), FakePoint(1, 0)]
mask = rtp_analyzer.CalculateDelay(0, 1, 1, points)
self.AssertMaskEqual(mask, [0, 0], False)
def testCalculateDelayMissing(self):
points = [FakePoint(0, 0), FakePoint(2, 0)]
mask = rtp_analyzer.CalculateDelay(0, 2, 1, points)
self.AssertMaskEqual(mask, [0, -1, 0], [False, True, False])
def testCalculateDelayMissing(self):
points = [FakePoint(0, 0), FakePoint(2, 0)]
mask = rtp_analyzer.CalculateDelay(0, 2, 1, points)
self.AssertMaskEqual(mask, [0, -1, 0], [False, True, False])
def testCalculateDelayBorders(self):
points = [FakePoint(0, 0), FakePoint(2, 0)]
mask = rtp_analyzer.CalculateDelay(0, 3, 2, points)
self.AssertMaskEqual(mask, [0, 0, -1], [False, False, True])
def testCalculateDelayBorders(self):
points = [FakePoint(0, 0), FakePoint(2, 0)]
mask = rtp_analyzer.CalculateDelay(0, 3, 2, points)
self.AssertMaskEqual(mask, [0, 0, -1], [False, False, True])
if __name__ == "__main__":
if MISSING_NUMPY:
print "Missing numpy, skipping test."
else:
unittest.main()
if MISSING_NUMPY:
print "Missing numpy, skipping test."
else:
unittest.main()

View File

@ -6,7 +6,6 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Builds the AppRTC collider using the golang toolchain.
The golang toolchain is downloaded by download_apprtc.py. We use that here
@ -24,44 +23,44 @@ import sys
import utils
USAGE_STR = "Usage: {} <apprtc_dir> <go_dir> <output_dir>"
def _ConfigureApprtcServerToDeveloperMode(app_yaml_path):
for line in fileinput.input(app_yaml_path, inplace=True):
# We can't click past these in browser-based tests, so disable them.
line = line.replace('BYPASS_JOIN_CONFIRMATION: false',
'BYPASS_JOIN_CONFIRMATION: true')
sys.stdout.write(line)
for line in fileinput.input(app_yaml_path, inplace=True):
# We can't click past these in browser-based tests, so disable them.
line = line.replace('BYPASS_JOIN_CONFIRMATION: false',
'BYPASS_JOIN_CONFIRMATION: true')
sys.stdout.write(line)
def main(argv):
if len(argv) != 4:
return USAGE_STR.format(argv[0])
if len(argv) != 4:
return USAGE_STR.format(argv[0])
apprtc_dir = os.path.abspath(argv[1])
go_root_dir = os.path.abspath(argv[2])
golang_workspace = os.path.abspath(argv[3])
apprtc_dir = os.path.abspath(argv[1])
go_root_dir = os.path.abspath(argv[2])
golang_workspace = os.path.abspath(argv[3])
app_yaml_path = os.path.join(apprtc_dir, 'out', 'app_engine', 'app.yaml')
_ConfigureApprtcServerToDeveloperMode(app_yaml_path)
app_yaml_path = os.path.join(apprtc_dir, 'out', 'app_engine', 'app.yaml')
_ConfigureApprtcServerToDeveloperMode(app_yaml_path)
utils.RemoveDirectory(golang_workspace)
utils.RemoveDirectory(golang_workspace)
collider_dir = os.path.join(apprtc_dir, 'src', 'collider')
shutil.copytree(collider_dir, os.path.join(golang_workspace, 'src'))
collider_dir = os.path.join(apprtc_dir, 'src', 'collider')
shutil.copytree(collider_dir, os.path.join(golang_workspace, 'src'))
golang_path = os.path.join(go_root_dir, 'bin',
'go' + utils.GetExecutableExtension())
golang_env = os.environ.copy()
golang_env['GOROOT'] = go_root_dir
golang_env['GOPATH'] = golang_workspace
collider_out = os.path.join(golang_workspace,
'collidermain' + utils.GetExecutableExtension())
subprocess.check_call([golang_path, 'build', '-o', collider_out,
'collidermain'], env=golang_env)
golang_path = os.path.join(go_root_dir, 'bin',
'go' + utils.GetExecutableExtension())
golang_env = os.environ.copy()
golang_env['GOROOT'] = go_root_dir
golang_env['GOPATH'] = golang_workspace
collider_out = os.path.join(
golang_workspace, 'collidermain' + utils.GetExecutableExtension())
subprocess.check_call(
[golang_path, 'build', '-o', collider_out, 'collidermain'],
env=golang_env)
if __name__ == '__main__':
sys.exit(main(sys.argv))
sys.exit(main(sys.argv))

View File

@ -6,7 +6,6 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Downloads prebuilt AppRTC and Go from WebRTC storage and unpacks it.
Requires that depot_tools is installed and in the PATH.
@ -21,38 +20,37 @@ import sys
import utils
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
def _GetGoArchivePathForPlatform():
archive_extension = 'zip' if utils.GetPlatform() == 'win' else 'tar.gz'
return os.path.join(utils.GetPlatform(), 'go.%s' % archive_extension)
archive_extension = 'zip' if utils.GetPlatform() == 'win' else 'tar.gz'
return os.path.join(utils.GetPlatform(), 'go.%s' % archive_extension)
def main(argv):
if len(argv) > 2:
return 'Usage: %s [output_dir]' % argv[0]
if len(argv) > 2:
return 'Usage: %s [output_dir]' % argv[0]
output_dir = os.path.abspath(argv[1]) if len(argv) > 1 else None
output_dir = os.path.abspath(argv[1]) if len(argv) > 1 else None
apprtc_zip_path = os.path.join(SCRIPT_DIR, 'prebuilt_apprtc.zip')
if os.path.isfile(apprtc_zip_path + '.sha1'):
utils.DownloadFilesFromGoogleStorage(SCRIPT_DIR, auto_platform=False)
apprtc_zip_path = os.path.join(SCRIPT_DIR, 'prebuilt_apprtc.zip')
if os.path.isfile(apprtc_zip_path + '.sha1'):
utils.DownloadFilesFromGoogleStorage(SCRIPT_DIR, auto_platform=False)
if output_dir is not None:
utils.RemoveDirectory(os.path.join(output_dir, 'apprtc'))
utils.UnpackArchiveTo(apprtc_zip_path, output_dir)
if output_dir is not None:
utils.RemoveDirectory(os.path.join(output_dir, 'apprtc'))
utils.UnpackArchiveTo(apprtc_zip_path, output_dir)
golang_path = os.path.join(SCRIPT_DIR, 'golang')
golang_zip_path = os.path.join(golang_path, _GetGoArchivePathForPlatform())
if os.path.isfile(golang_zip_path + '.sha1'):
utils.DownloadFilesFromGoogleStorage(golang_path)
golang_path = os.path.join(SCRIPT_DIR, 'golang')
golang_zip_path = os.path.join(golang_path, _GetGoArchivePathForPlatform())
if os.path.isfile(golang_zip_path + '.sha1'):
utils.DownloadFilesFromGoogleStorage(golang_path)
if output_dir is not None:
utils.RemoveDirectory(os.path.join(output_dir, 'go'))
utils.UnpackArchiveTo(golang_zip_path, output_dir)
if output_dir is not None:
utils.RemoveDirectory(os.path.join(output_dir, 'go'))
utils.UnpackArchiveTo(golang_zip_path, output_dir)
if __name__ == '__main__':
sys.exit(main(sys.argv))
sys.exit(main(sys.argv))

View File

@ -6,7 +6,6 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""This script sets up AppRTC and its dependencies.
Requires that depot_tools is installed and in the PATH.
@ -19,27 +18,26 @@ import sys
import utils
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
def main(argv):
if len(argv) == 1:
return 'Usage %s <output_dir>' % argv[0]
if len(argv) == 1:
return 'Usage %s <output_dir>' % argv[0]
output_dir = os.path.abspath(argv[1])
output_dir = os.path.abspath(argv[1])
download_apprtc_path = os.path.join(SCRIPT_DIR, 'download_apprtc.py')
utils.RunSubprocessWithRetry([sys.executable, download_apprtc_path,
output_dir])
download_apprtc_path = os.path.join(SCRIPT_DIR, 'download_apprtc.py')
utils.RunSubprocessWithRetry(
[sys.executable, download_apprtc_path, output_dir])
build_apprtc_path = os.path.join(SCRIPT_DIR, 'build_apprtc.py')
apprtc_dir = os.path.join(output_dir, 'apprtc')
go_dir = os.path.join(output_dir, 'go')
collider_dir = os.path.join(output_dir, 'collider')
utils.RunSubprocessWithRetry([sys.executable, build_apprtc_path,
apprtc_dir, go_dir, collider_dir])
build_apprtc_path = os.path.join(SCRIPT_DIR, 'build_apprtc.py')
apprtc_dir = os.path.join(output_dir, 'apprtc')
go_dir = os.path.join(output_dir, 'go')
collider_dir = os.path.join(output_dir, 'collider')
utils.RunSubprocessWithRetry(
[sys.executable, build_apprtc_path, apprtc_dir, go_dir, collider_dir])
if __name__ == '__main__':
sys.exit(main(sys.argv))
sys.exit(main(sys.argv))

View File

@ -6,7 +6,6 @@
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Utilities for all our deps-management stuff."""
from __future__ import absolute_import
@ -23,36 +22,37 @@ import zipfile
def RunSubprocessWithRetry(cmd):
"""Invokes the subprocess and backs off exponentially on fail."""
for i in range(5):
try:
subprocess.check_call(cmd)
return
except subprocess.CalledProcessError as exception:
backoff = pow(2, i)
print('Got %s, retrying in %d seconds...' % (exception, backoff))
time.sleep(backoff)
"""Invokes the subprocess and backs off exponentially on fail."""
for i in range(5):
try:
subprocess.check_call(cmd)
return
except subprocess.CalledProcessError as exception:
backoff = pow(2, i)
print('Got %s, retrying in %d seconds...' % (exception, backoff))
time.sleep(backoff)
print('Giving up.')
raise exception
print('Giving up.')
raise exception
def DownloadFilesFromGoogleStorage(path, auto_platform=True):
print('Downloading files in %s...' % path)
print('Downloading files in %s...' % path)
extension = 'bat' if 'win32' in sys.platform else 'py'
cmd = ['download_from_google_storage.%s' % extension,
'--bucket=chromium-webrtc-resources',
'--directory', path]
if auto_platform:
cmd += ['--auto_platform', '--recursive']
subprocess.check_call(cmd)
extension = 'bat' if 'win32' in sys.platform else 'py'
cmd = [
'download_from_google_storage.%s' % extension,
'--bucket=chromium-webrtc-resources', '--directory', path
]
if auto_platform:
cmd += ['--auto_platform', '--recursive']
subprocess.check_call(cmd)
# Code partially copied from
# https://cs.chromium.org#chromium/build/scripts/common/chromium_utils.py
def RemoveDirectory(*path):
"""Recursively removes a directory, even if it's marked read-only.
"""Recursively removes a directory, even if it's marked read-only.
Remove the directory located at *path, if it exists.
@ -67,62 +67,63 @@ def RemoveDirectory(*path):
bit and try again, so we do that too. It's hand-waving, but sometimes it
works. :/
"""
file_path = os.path.join(*path)
print('Deleting `{}`.'.format(file_path))
if not os.path.exists(file_path):
print('`{}` does not exist.'.format(file_path))
return
file_path = os.path.join(*path)
print('Deleting `{}`.'.format(file_path))
if not os.path.exists(file_path):
print('`{}` does not exist.'.format(file_path))
return
if sys.platform == 'win32':
# Give up and use cmd.exe's rd command.
file_path = os.path.normcase(file_path)
for _ in range(3):
print('RemoveDirectory running %s' % (' '.join(
['cmd.exe', '/c', 'rd', '/q', '/s', file_path])))
if not subprocess.call(['cmd.exe', '/c', 'rd', '/q', '/s', file_path]):
break
print(' Failed')
time.sleep(3)
return
else:
shutil.rmtree(file_path, ignore_errors=True)
if sys.platform == 'win32':
# Give up and use cmd.exe's rd command.
file_path = os.path.normcase(file_path)
for _ in range(3):
print('RemoveDirectory running %s' %
(' '.join(['cmd.exe', '/c', 'rd', '/q', '/s', file_path])))
if not subprocess.call(
['cmd.exe', '/c', 'rd', '/q', '/s', file_path]):
break
print(' Failed')
time.sleep(3)
return
else:
shutil.rmtree(file_path, ignore_errors=True)
def UnpackArchiveTo(archive_path, output_dir):
extension = os.path.splitext(archive_path)[1]
if extension == '.zip':
_UnzipArchiveTo(archive_path, output_dir)
else:
_UntarArchiveTo(archive_path, output_dir)
extension = os.path.splitext(archive_path)[1]
if extension == '.zip':
_UnzipArchiveTo(archive_path, output_dir)
else:
_UntarArchiveTo(archive_path, output_dir)
def _UnzipArchiveTo(archive_path, output_dir):
print('Unzipping {} in {}.'.format(archive_path, output_dir))
zip_file = zipfile.ZipFile(archive_path)
try:
zip_file.extractall(output_dir)
finally:
zip_file.close()
print('Unzipping {} in {}.'.format(archive_path, output_dir))
zip_file = zipfile.ZipFile(archive_path)
try:
zip_file.extractall(output_dir)
finally:
zip_file.close()
def _UntarArchiveTo(archive_path, output_dir):
print('Untarring {} in {}.'.format(archive_path, output_dir))
tar_file = tarfile.open(archive_path, 'r:gz')
try:
tar_file.extractall(output_dir)
finally:
tar_file.close()
print('Untarring {} in {}.'.format(archive_path, output_dir))
tar_file = tarfile.open(archive_path, 'r:gz')
try:
tar_file.extractall(output_dir)
finally:
tar_file.close()
def GetPlatform():
if sys.platform.startswith('win'):
return 'win'
if sys.platform.startswith('linux'):
return 'linux'
if sys.platform.startswith('darwin'):
return 'mac'
raise Exception("Can't run on platform %s." % sys.platform)
if sys.platform.startswith('win'):
return 'win'
if sys.platform.startswith('linux'):
return 'linux'
if sys.platform.startswith('darwin'):
return 'mac'
raise Exception("Can't run on platform %s." % sys.platform)
def GetExecutableExtension():
return '.exe' if GetPlatform() == 'win' else ''
return '.exe' if GetPlatform() == 'win' else ''