Fix lint errors to enable stricter PyLint rules
These fixes are needed to avoid errors after submitting https://codereview.webrtc.org/2737963003 BUG=webrtc:7303 NOTRY=True Review-Url: https://codereview.webrtc.org/2812273002 Cr-Commit-Position: refs/heads/master@{#17679}
This commit is contained in:
@ -53,7 +53,8 @@ def print_landmines(): # pylint: disable=invalid-name
|
||||
# is no longer the case.
|
||||
print 'Clobber due to iOS compile errors (crbug.com/694721)'
|
||||
print 'Clobber to unblock https://codereview.webrtc.org/2709573003'
|
||||
print 'Clobber to fix https://codereview.webrtc.org/2709573003 after landing'
|
||||
print ('Clobber to fix https://codereview.webrtc.org/2709573003 after '
|
||||
'landing')
|
||||
print ('Clobber to fix https://codereview.webrtc.org/2767383005 before'
|
||||
'landing (changing rtc_executable -> rtc_test on iOS)')
|
||||
print ('Clobber to fix https://codereview.webrtc.org/2767383005 before'
|
||||
|
||||
@ -8,6 +8,7 @@
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
"""
|
||||
This script acts as an interface between the Chromium infrastructure and
|
||||
gtest-parallel, renaming options and translating environment variables into
|
||||
@ -43,7 +44,7 @@ import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def cat_files(file_list, output_file):
|
||||
def CatFiles(file_list, output_file):
|
||||
with open(output_file, 'w') as output_file:
|
||||
for filename in file_list:
|
||||
with open(filename) as input_file:
|
||||
@ -127,7 +128,7 @@ def main():
|
||||
continue
|
||||
logs = [os.path.join(logs_dir, log) for log in os.listdir(logs_dir)]
|
||||
log_file = os.path.join(options.output_dir, '%s-tests.log' % test_status)
|
||||
cat_files(logs, log_file)
|
||||
CatFiles(logs, log_file)
|
||||
os.rmdir(logs_dir)
|
||||
|
||||
return exit_code
|
||||
|
||||
@ -37,16 +37,18 @@ SCRIPT_DIR = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
CHECKOUT_ROOT = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir))
|
||||
WEBRTC_ROOT = os.path.join(CHECKOUT_ROOT, 'webrtc')
|
||||
|
||||
|
||||
def GetThirdPartyLibraries(buildfile_dir, target_name):
|
||||
def extractLibName(s):
|
||||
def ExtractLibName(string_list):
|
||||
# Sample input:
|
||||
# [" //third_party/usrsctp:usrsctp", " //webrtc:webrtc_common"]
|
||||
# Sample output:
|
||||
# ["usrsctp"]
|
||||
return re.sub(r'\(.*\)', '', s).strip().split(os.path.sep)[-1].split(':')[0]
|
||||
return re.sub(r'\(.*\)', '', string_list).strip().split(
|
||||
os.path.sep)[-1].split(':')[0]
|
||||
output = subprocess.check_output(
|
||||
["gn", "desc", buildfile_dir, target_name, '--all']) .split(os.linesep)
|
||||
return [extractLibName(x) for x in output if re.search(r'third_party', x)]
|
||||
return [ExtractLibName(x) for x in output if re.search(r'third_party', x)]
|
||||
|
||||
|
||||
class LicenseBuilder(object):
|
||||
@ -112,7 +114,7 @@ class LicenseBuilder(object):
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Generate WebRTC LICENSE.html')
|
||||
parser.add_argument('target_name',
|
||||
help='Name of the GN target to generate a license for')
|
||||
@ -123,3 +125,7 @@ if __name__ == '__main__':
|
||||
args = parser.parse_args()
|
||||
builder = LicenseBuilder(args.buildfile_dirs, args.target_name)
|
||||
sys.exit(builder.GenerateLicenseText(args.output_dir))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
@ -61,16 +61,16 @@ import subprocess
|
||||
import sys
|
||||
|
||||
# Flags from YCM's default config.
|
||||
_default_flags = [
|
||||
_DEFAULT_FLAGS = [
|
||||
'-DUSE_CLANG_COMPLETER',
|
||||
'-std=c++11',
|
||||
'-x',
|
||||
'c++',
|
||||
]
|
||||
|
||||
_header_alternates = ('.cc', '.cpp', '.c', '.mm', '.m')
|
||||
_HEADER_ALTERNATES = ('.cc', '.cpp', '.c', '.mm', '.m')
|
||||
|
||||
_extension_flags = {
|
||||
_EXTENSION_FLAGS = {
|
||||
'.m': ['-x', 'objective-c'],
|
||||
'.mm': ['-x', 'objective-c++'],
|
||||
}
|
||||
@ -302,7 +302,7 @@ def GetClangOptionsFromNinjaForFilename(webrtc_root, filename):
|
||||
|
||||
basename, extension = os.path.splitext(filename)
|
||||
if extension == '.h':
|
||||
candidates = [basename + ext for ext in _header_alternates]
|
||||
candidates = [basename + ext for ext in _HEADER_ALTERNATES]
|
||||
else:
|
||||
candidates = [filename]
|
||||
|
||||
@ -314,7 +314,7 @@ def GetClangOptionsFromNinjaForFilename(webrtc_root, filename):
|
||||
buildable_extension = os.path.splitext(candidate)[1]
|
||||
break
|
||||
|
||||
additional_flags += _extension_flags.get(buildable_extension, [])
|
||||
additional_flags += _EXTENSION_FLAGS.get(buildable_extension, [])
|
||||
|
||||
if not clang_line:
|
||||
# If ninja didn't know about filename or it's companion files, then try a
|
||||
@ -349,7 +349,7 @@ def FlagsForFile(filename):
|
||||
# determine the flags again.
|
||||
should_cache_flags_for_file = bool(clang_flags)
|
||||
|
||||
final_flags = _default_flags + clang_flags
|
||||
final_flags = _DEFAULT_FLAGS + clang_flags
|
||||
|
||||
return {
|
||||
'flags': final_flags,
|
||||
|
||||
@ -19,13 +19,13 @@ import low_bandwidth_audio_test
|
||||
|
||||
|
||||
class TestExtractTestRuns(unittest.TestCase):
|
||||
def _testLog(self, log, *expected):
|
||||
def _TestLog(self, log, *expected):
|
||||
self.assertEqual(
|
||||
tuple(low_bandwidth_audio_test.ExtractTestRuns(log.splitlines(True))),
|
||||
expected)
|
||||
|
||||
def testLinux(self):
|
||||
self._testLog(LINUX_LOG,
|
||||
self._TestLog(LINUX_LOG,
|
||||
(None, 'GoodNetworkHighBitrate',
|
||||
'/webrtc/src/resources/voice_engine/audio_tiny16.wav',
|
||||
'/webrtc/src/out/LowBandwidth_GoodNetworkHighBitrate.wav'),
|
||||
@ -34,7 +34,7 @@ class TestExtractTestRuns(unittest.TestCase):
|
||||
'/webrtc/src/out/LowBandwidth_Mobile2GNetwork.wav'))
|
||||
|
||||
def testAndroid(self):
|
||||
self._testLog(ANDROID_LOG,
|
||||
self._TestLog(ANDROID_LOG,
|
||||
('ddfa6149', 'Mobile2GNetwork',
|
||||
'/sdcard/chromium_tests_root/resources/voice_engine/audio_tiny16.wav',
|
||||
'/sdcard/chromium_tests_root/LowBandwidth_Mobile2GNetwork.wav'),
|
||||
|
||||
@ -61,7 +61,7 @@ def _GenerateDefaultOverridden(config_override):
|
||||
def _GenerateAllDefaultButOne():
|
||||
"""Disables the flags enabled by default one-by-one.
|
||||
"""
|
||||
CONFIG_SETS = {
|
||||
config_sets = {
|
||||
'no_AEC': {'-aec': 0,},
|
||||
'no_AGC': {'-agc': 0,},
|
||||
'no_HP_filter': {'-hpf': 0,},
|
||||
@ -70,13 +70,13 @@ def _GenerateAllDefaultButOne():
|
||||
'no_transient_suppressor': {'-ts': 0,},
|
||||
'no_vad': {'-vad': 0,},
|
||||
}
|
||||
_GenerateDefaultOverridden(CONFIG_SETS)
|
||||
_GenerateDefaultOverridden(config_sets)
|
||||
|
||||
|
||||
def _GenerateAllDefaultPlusOne():
|
||||
"""Enables the flags disabled by default one-by-one.
|
||||
"""
|
||||
CONFIG_SETS = {
|
||||
config_sets = {
|
||||
'with_AECM': {'-aec': 0, '-aecm': 1,}, # AEC and AECM are exclusive.
|
||||
'with_AGC_limiter': {'-agc_limiter': 1,},
|
||||
'with_AEC_delay_agnostic': {'-delay_agnostic': 1,},
|
||||
@ -87,7 +87,7 @@ def _GenerateAllDefaultPlusOne():
|
||||
'with_LC': {'-lc': 1,},
|
||||
'with_refined_adaptive_filter': {'-refined_adaptive_filter': 1,},
|
||||
}
|
||||
_GenerateDefaultOverridden(CONFIG_SETS)
|
||||
_GenerateDefaultOverridden(config_sets)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@ -17,7 +17,7 @@ class TestSimulationScript(unittest.TestCase):
|
||||
"""Unit tests for the apm_quality_assessment module.
|
||||
"""
|
||||
|
||||
def test_main(self):
|
||||
def testMain(self):
|
||||
# Exit with error code if no arguments are passed.
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
apm_quality_assessment.main()
|
||||
|
||||
@ -51,7 +51,7 @@ class TestEvalScores(unittest.TestCase):
|
||||
"""Recursively delete temporary folder."""
|
||||
shutil.rmtree(self._output_path)
|
||||
|
||||
def test_registered_classes(self):
|
||||
def testRegisteredClasses(self):
|
||||
# Preliminary check.
|
||||
self.assertTrue(os.path.exists(self._output_path))
|
||||
|
||||
|
||||
@ -145,15 +145,14 @@ class ApmModuleSimulator(object):
|
||||
base_output_path=output_path)
|
||||
|
||||
# For each test data pair, simulate a call and evaluate.
|
||||
for test_data_generators_config_name in test_data_generators.config_names:
|
||||
logging.info(' - test data generator config: <%s>',
|
||||
test_data_generators_config_name)
|
||||
for config_name in test_data_generators.config_names:
|
||||
logging.info(' - test data generator config: <%s>', config_name)
|
||||
|
||||
# APM input and output signal paths.
|
||||
noisy_signal_filepath = test_data_generators.noisy_signal_filepaths[
|
||||
test_data_generators_config_name]
|
||||
config_name]
|
||||
evaluation_output_path = test_data_generators.apm_output_paths[
|
||||
test_data_generators_config_name]
|
||||
config_name]
|
||||
|
||||
# Simulate a call using the audio processing module.
|
||||
self._audioproc_wrapper.Run(
|
||||
@ -164,7 +163,7 @@ class ApmModuleSimulator(object):
|
||||
# Reference signal path for the evaluation step.
|
||||
reference_signal_filepath = (
|
||||
test_data_generators.reference_signal_filepaths[
|
||||
test_data_generators_config_name])
|
||||
config_name])
|
||||
|
||||
# Evaluate.
|
||||
self._evaluator.Run(
|
||||
|
||||
@ -191,10 +191,10 @@ class IdentityTestDataGenerator(TestDataGenerator):
|
||||
|
||||
def _Generate(
|
||||
self, input_signal_filepath, input_noise_cache_path, base_output_path):
|
||||
CONFIG_NAME = 'default'
|
||||
output_path = self._MakeDir(base_output_path, CONFIG_NAME)
|
||||
config_name = 'default'
|
||||
output_path = self._MakeDir(base_output_path, config_name)
|
||||
self._AddNoiseReferenceFilesPair(
|
||||
config_name=CONFIG_NAME,
|
||||
config_name=config_name,
|
||||
noisy_signal_filepath=input_signal_filepath,
|
||||
reference_signal_filepath=input_signal_filepath,
|
||||
output_path=output_path)
|
||||
|
||||
@ -22,7 +22,7 @@ import re
|
||||
import sys
|
||||
|
||||
# Change this to True to save the figure to a file. Look below for details.
|
||||
save_figure = False
|
||||
SAVE_FIGURE = False
|
||||
|
||||
class ParsePlotLineException(Exception):
|
||||
def __init__(self, reason, line):
|
||||
@ -31,7 +31,7 @@ class ParsePlotLineException(Exception):
|
||||
self.line = line
|
||||
|
||||
|
||||
def parse_plot_line(line):
|
||||
def ParsePlotLine(line):
|
||||
split_line = line.split()
|
||||
if len(split_line) != 5:
|
||||
raise ParsePlotLineException("Expected 5 arguments on line", line)
|
||||
@ -51,7 +51,7 @@ def parse_plot_line(line):
|
||||
return (var_name, ssrc, alg_name, time, value)
|
||||
|
||||
|
||||
def generate_label(var_name, ssrc, ssrc_count, alg_name):
|
||||
def GenerateLabel(var_name, ssrc, ssrc_count, alg_name):
|
||||
label = var_name
|
||||
if ssrc_count > 1 or ssrc != "0":
|
||||
label = label + " flow " + ssrc
|
||||
@ -65,18 +65,18 @@ class Figure(object):
|
||||
self.name = name
|
||||
self.subplots = []
|
||||
|
||||
def addSubplot(self, var_names, xlabel, ylabel):
|
||||
def AddSubplot(self, var_names, xlabel, ylabel):
|
||||
self.subplots.append(Subplot(var_names, xlabel, ylabel))
|
||||
|
||||
def addSample(self, var_name, ssrc, alg_name, time, value):
|
||||
def AddSample(self, var_name, ssrc, alg_name, time, value):
|
||||
for s in self.subplots:
|
||||
s.addSample(var_name, ssrc, alg_name, time, value)
|
||||
s.AddSample(var_name, ssrc, alg_name, time, value)
|
||||
|
||||
def plotFigure(self, fig):
|
||||
def PlotFigure(self, fig):
|
||||
n = len(self.subplots)
|
||||
for i in range(n):
|
||||
ax = fig.add_subplot(n, 1, i+1)
|
||||
self.subplots[i].plotSubplot(ax)
|
||||
axis = fig.add_subplot(n, 1, i+1)
|
||||
self.subplots[i].PlotSubplot(axis)
|
||||
|
||||
|
||||
class Subplot(object):
|
||||
@ -86,7 +86,7 @@ class Subplot(object):
|
||||
self.var_names = var_names
|
||||
self.samples = dict()
|
||||
|
||||
def addSample(self, var_name, ssrc, alg_name, time, value):
|
||||
def AddSample(self, var_name, ssrc, alg_name, time, value):
|
||||
if var_name not in self.var_names:
|
||||
return
|
||||
|
||||
@ -99,9 +99,9 @@ class Subplot(object):
|
||||
|
||||
self.samples[alg_name][ssrc][var_name].append((time, value))
|
||||
|
||||
def plotSubplot(self, ax):
|
||||
ax.set_xlabel(self.xlabel)
|
||||
ax.set_ylabel(self.ylabel)
|
||||
def PlotSubplot(self, axis):
|
||||
axis.set_xlabel(self.xlabel)
|
||||
axis.set_ylabel(self.ylabel)
|
||||
|
||||
count = 0
|
||||
for alg_name in self.samples.keys():
|
||||
@ -113,7 +113,7 @@ class Subplot(object):
|
||||
y = numpy.array(y)
|
||||
|
||||
ssrc_count = len(self.samples[alg_name].keys())
|
||||
l = generate_label(var_name, ssrc, ssrc_count, alg_name)
|
||||
l = GenerateLabel(var_name, ssrc, ssrc_count, alg_name)
|
||||
plt.plot(x, y, label=l, linewidth=2.0)
|
||||
count += 1
|
||||
|
||||
@ -124,29 +124,29 @@ class Subplot(object):
|
||||
|
||||
def main():
|
||||
receiver = Figure("PacketReceiver")
|
||||
receiver.addSubplot(['Throughput_kbps', 'MaxThroughput_', 'Capacity_kbps',
|
||||
receiver.AddSubplot(['Throughput_kbps', 'MaxThroughput_', 'Capacity_kbps',
|
||||
'PerFlowCapacity_kbps', 'MetricRecorderThroughput_kbps'],
|
||||
"Time (s)", "Throughput (kbps)")
|
||||
receiver.addSubplot(['Delay_ms_', 'Delay_ms'], "Time (s)",
|
||||
receiver.AddSubplot(['Delay_ms_', 'Delay_ms'], "Time (s)",
|
||||
"One-way delay (ms)")
|
||||
receiver.addSubplot(['Packet_Loss_'], "Time (s)", "Packet Loss Ratio")
|
||||
receiver.AddSubplot(['Packet_Loss_'], "Time (s)", "Packet Loss Ratio")
|
||||
|
||||
kalman_state = Figure("KalmanState")
|
||||
kalman_state.addSubplot(['kc', 'km'], "Time (s)", "Kalman gain")
|
||||
kalman_state.addSubplot(['slope_1/bps'], "Time (s)", "Slope")
|
||||
kalman_state.addSubplot(['var_noise'], "Time (s)", "Var noise")
|
||||
kalman_state.AddSubplot(['kc', 'km'], "Time (s)", "Kalman gain")
|
||||
kalman_state.AddSubplot(['slope_1/bps'], "Time (s)", "Slope")
|
||||
kalman_state.AddSubplot(['var_noise'], "Time (s)", "Var noise")
|
||||
|
||||
detector_state = Figure("DetectorState")
|
||||
detector_state.addSubplot(['offset_ms'], "Time (s)", "Offset")
|
||||
detector_state.addSubplot(['gamma_ms'], "Time (s)", "Gamma")
|
||||
detector_state.AddSubplot(['offset_ms'], "Time (s)", "Offset")
|
||||
detector_state.AddSubplot(['gamma_ms'], "Time (s)", "Gamma")
|
||||
|
||||
trendline_state = Figure("TrendlineState")
|
||||
trendline_state.addSubplot(["accumulated_delay_ms", "smoothed_delay_ms"],
|
||||
trendline_state.AddSubplot(["accumulated_delay_ms", "smoothed_delay_ms"],
|
||||
"Time (s)", "Delay (ms)")
|
||||
trendline_state.addSubplot(["trendline_slope"], "Time (s)", "Slope")
|
||||
trendline_state.AddSubplot(["trendline_slope"], "Time (s)", "Slope")
|
||||
|
||||
target_bitrate = Figure("TargetBitrate")
|
||||
target_bitrate.addSubplot(['target_bitrate_bps'], "Time (s)", "Bitrate (bps)")
|
||||
target_bitrate.AddSubplot(['target_bitrate_bps'], "Time (s)", "Bitrate (bps)")
|
||||
|
||||
# Select which figures to plot here.
|
||||
figures = [receiver, detector_state, trendline_state, target_bitrate]
|
||||
@ -157,10 +157,10 @@ def main():
|
||||
test_name = re.search(r'\.(\w+)', line).group(1)
|
||||
if line.startswith("PLOT"):
|
||||
try:
|
||||
(var_name, ssrc, alg_name, time, value) = parse_plot_line(line)
|
||||
(var_name, ssrc, alg_name, time, value) = ParsePlotLine(line)
|
||||
for f in figures:
|
||||
# The sample will be ignored bv the figures that don't need it.
|
||||
f.addSample(var_name, ssrc, alg_name, time, value)
|
||||
f.AddSample(var_name, ssrc, alg_name, time, value)
|
||||
except ParsePlotLineException as e:
|
||||
print e.reason
|
||||
print e.line
|
||||
@ -168,8 +168,8 @@ def main():
|
||||
# Plot figures.
|
||||
for f in figures:
|
||||
fig = plt.figure(f.name)
|
||||
f.plotFigure(fig)
|
||||
if save_figure:
|
||||
f.PlotFigure(fig)
|
||||
if SAVE_FIGURE:
|
||||
fig.savefig(test_name + f.name + ".png")
|
||||
plt.show()
|
||||
|
||||
|
||||
@ -116,16 +116,16 @@ def ParseSetting(filename, setting):
|
||||
|
||||
settings = []
|
||||
|
||||
f = open(filename)
|
||||
settings_file = open(filename)
|
||||
while True:
|
||||
line = f.readline()
|
||||
line = settings_file.readline()
|
||||
if not line:
|
||||
break
|
||||
if re.search(r'%s' % EVENT_START, line):
|
||||
# Parse event.
|
||||
parsed = {}
|
||||
while True:
|
||||
line = f.readline()
|
||||
line = settings_file.readline()
|
||||
if not line:
|
||||
break
|
||||
if re.search(r'%s' % EVENT_END, line):
|
||||
@ -136,9 +136,9 @@ def ParseSetting(filename, setting):
|
||||
settings.append(s)
|
||||
break
|
||||
|
||||
TryFindMetric(parsed, line, f)
|
||||
TryFindMetric(parsed, line, settings_file)
|
||||
|
||||
f.close()
|
||||
settings_file.close()
|
||||
return settings
|
||||
|
||||
|
||||
@ -181,16 +181,16 @@ def ParseMetrics(filename, setting1, setting2):
|
||||
metrics = {}
|
||||
|
||||
# Parse events.
|
||||
f = open(filename)
|
||||
settings_file = open(filename)
|
||||
while True:
|
||||
line = f.readline()
|
||||
line = settings_file.readline()
|
||||
if not line:
|
||||
break
|
||||
if re.search(r'%s' % EVENT_START, line):
|
||||
# Parse event.
|
||||
parsed = {}
|
||||
while True:
|
||||
line = f.readline()
|
||||
line = settings_file.readline()
|
||||
if not line:
|
||||
break
|
||||
if re.search(r'%s' % EVENT_END, line):
|
||||
@ -209,13 +209,13 @@ def ParseMetrics(filename, setting1, setting2):
|
||||
|
||||
break
|
||||
|
||||
TryFindMetric(parsed, line, f)
|
||||
TryFindMetric(parsed, line, settings_file)
|
||||
|
||||
f.close()
|
||||
settings_file.close()
|
||||
return metrics
|
||||
|
||||
|
||||
def TryFindMetric(parsed, line, f):
|
||||
def TryFindMetric(parsed, line, settings_file):
|
||||
for metric in METRICS_TO_PARSE:
|
||||
name = metric[0]
|
||||
label = metric[1]
|
||||
@ -224,13 +224,13 @@ def TryFindMetric(parsed, line, f):
|
||||
if not found:
|
||||
# TODO(asapersson): Change format.
|
||||
# Try find min, max, average stats.
|
||||
found, minimum = GetMetric("Min", f.readline())
|
||||
found, minimum = GetMetric("Min", settings_file.readline())
|
||||
if not found:
|
||||
return
|
||||
found, maximum = GetMetric("Max", f.readline())
|
||||
found, maximum = GetMetric("Max", settings_file.readline())
|
||||
if not found:
|
||||
return
|
||||
found, average = GetMetric("Average", f.readline())
|
||||
found, average = GetMetric("Average", settings_file.readline())
|
||||
if not found:
|
||||
return
|
||||
|
||||
|
||||
@ -14,7 +14,7 @@ import collections
|
||||
import sys
|
||||
|
||||
|
||||
def count_reordered(sequence_numbers):
|
||||
def CountReordered(sequence_numbers):
|
||||
"""Returns number of reordered indices.
|
||||
|
||||
A reordered index is an index `i` for which sequence_numbers[i] >=
|
||||
@ -25,7 +25,7 @@ def count_reordered(sequence_numbers):
|
||||
s1 >= s2)
|
||||
|
||||
|
||||
def ssrc_normalized_size_table(data_points):
|
||||
def SsrcNormalizedSizeTable(data_points):
|
||||
"""Counts proportion of data for every SSRC.
|
||||
|
||||
Args:
|
||||
@ -40,10 +40,10 @@ def ssrc_normalized_size_table(data_points):
|
||||
mapping = collections.defaultdict(int)
|
||||
for point in data_points:
|
||||
mapping[point.ssrc] += point.size
|
||||
return normalize_counter(mapping)
|
||||
return NormalizeCounter(mapping)
|
||||
|
||||
|
||||
def normalize_counter(counter):
|
||||
def NormalizeCounter(counter):
|
||||
"""Returns a normalized version of the dictionary `counter`.
|
||||
|
||||
Does not modify `counter`.
|
||||
@ -56,14 +56,14 @@ def normalize_counter(counter):
|
||||
return {key: counter[key] / total for key in counter}
|
||||
|
||||
|
||||
def unwrap(data, mod):
|
||||
def Unwrap(data, mod):
|
||||
"""Returns `data` unwrapped modulo `mod`. Does not modify data.
|
||||
|
||||
Adds integer multiples of mod to all elements of data except the
|
||||
first, such that all pairs of consecutive elements (a, b) satisfy
|
||||
-mod / 2 <= b - a < mod / 2.
|
||||
|
||||
E.g. unwrap([0, 1, 2, 0, 1, 2, 7, 8], 3) -> [0, 1, 2, 3,
|
||||
E.g. Unwrap([0, 1, 2, 0, 1, 2, 7, 8], 3) -> [0, 1, 2, 3,
|
||||
4, 5, 4, 5]
|
||||
"""
|
||||
lst = data[:]
|
||||
@ -73,7 +73,7 @@ def unwrap(data, mod):
|
||||
return lst
|
||||
|
||||
|
||||
def ssrc_directions(data_points):
|
||||
def SsrcDirections(data_points):
|
||||
ssrc_is_incoming = {}
|
||||
for point in data_points:
|
||||
ssrc_is_incoming[point.ssrc] = point.incoming
|
||||
@ -82,6 +82,6 @@ def ssrc_directions(data_points):
|
||||
|
||||
# Python 2/3-compatible input function
|
||||
if sys.version_info[0] <= 2:
|
||||
get_input = raw_input
|
||||
get_input = raw_input # pylint: disable=invalid-name
|
||||
else:
|
||||
get_input = input
|
||||
get_input = input # pylint: disable=invalid-name
|
||||
|
||||
@ -24,17 +24,17 @@ import misc
|
||||
class TestMisc(unittest.TestCase):
|
||||
def testUnwrapMod3(self):
|
||||
data = [0, 1, 2, 0, -1, -2, -3, -4]
|
||||
unwrapped_3 = misc.unwrap(data, 3)
|
||||
unwrapped_3 = misc.Unwrap(data, 3)
|
||||
self.assertEqual([0, 1, 2, 3, 2, 1, 0, -1], unwrapped_3)
|
||||
|
||||
def testUnwrapMod4(self):
|
||||
data = [0, 1, 2, 0, -1, -2, -3, -4]
|
||||
unwrapped_4 = misc.unwrap(data, 4)
|
||||
unwrapped_4 = misc.Unwrap(data, 4)
|
||||
self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], unwrapped_4)
|
||||
|
||||
def testDataShouldNotChangeAfterUnwrap(self):
|
||||
data = [0, 1, 2, 0, -1, -2, -3, -4]
|
||||
_ = misc.unwrap(data, 4)
|
||||
_ = misc.Unwrap(data, 4)
|
||||
|
||||
self.assertEqual([0, 1, 2, 0, -1, -2, -3, -4], data)
|
||||
|
||||
@ -43,7 +43,7 @@ class TestMisc(unittest.TestCase):
|
||||
random_data = [random.randint(0, 9) for _ in range(100)]
|
||||
|
||||
for mod in range(1, 100):
|
||||
random_data_unwrapped_mod = misc.unwrap(random_data, mod)
|
||||
random_data_unwrapped_mod = misc.Unwrap(random_data, mod)
|
||||
|
||||
for (old_a, a) in zip(random_data, random_data_unwrapped_mod):
|
||||
self.assertEqual((old_a - a) % mod, 0)
|
||||
@ -54,7 +54,7 @@ class TestMisc(unittest.TestCase):
|
||||
random_data = [random.randint(0, 9) for _ in range(100)]
|
||||
|
||||
for mod in range(1, 100):
|
||||
random_data_unwrapped_mod = misc.unwrap(random_data, mod)
|
||||
random_data_unwrapped_mod = misc.Unwrap(random_data, mod)
|
||||
|
||||
for (a, b) in zip(random_data_unwrapped_mod,
|
||||
random_data_unwrapped_mod[1:]):
|
||||
@ -64,7 +64,7 @@ class TestMisc(unittest.TestCase):
|
||||
random_data = [random.randint(0, 9) for _ in range(100)]
|
||||
random_data_copy = random_data[:]
|
||||
for mod in range(1, 100):
|
||||
_ = misc.unwrap(random_data, mod)
|
||||
_ = misc.Unwrap(random_data, mod)
|
||||
|
||||
self.assertEqual(random_data, random_data_copy)
|
||||
|
||||
|
||||
@ -32,7 +32,7 @@ class DataPoint(object):
|
||||
self.marker_bit = (first2header_bytes & 0b10000000) >> 7
|
||||
|
||||
|
||||
def parse_protobuf(file_path):
|
||||
def ParseProtobuf(file_path):
|
||||
"""Parses RTC event log from protobuf file.
|
||||
|
||||
Args:
|
||||
|
||||
@ -42,13 +42,13 @@ class RTPStatistics(object):
|
||||
"""
|
||||
|
||||
self.data_points = data_points
|
||||
self.ssrc_frequencies = misc.normalize_counter(
|
||||
self.ssrc_frequencies = misc.NormalizeCounter(
|
||||
collections.Counter([pt.ssrc for pt in self.data_points]))
|
||||
self.ssrc_size_table = misc.ssrc_normalized_size_table(self.data_points)
|
||||
self.ssrc_size_table = misc.SsrcNormalizedSizeTable(self.data_points)
|
||||
self.bandwidth_kbps = None
|
||||
self.smooth_bw_kbps = None
|
||||
|
||||
def print_header_statistics(self):
|
||||
def PrintHeaderStatistics(self):
|
||||
print("{:>6}{:>14}{:>14}{:>6}{:>6}{:>3}{:>11}".format(
|
||||
"SeqNo", "TimeStamp", "SendTime", "Size", "PT", "M", "SSRC"))
|
||||
for point in self.data_points:
|
||||
@ -57,7 +57,7 @@ class RTPStatistics(object):
|
||||
int(point.arrival_timestamp_ms), point.size, point.payload_type,
|
||||
point.marker_bit, "0x{:x}".format(point.ssrc)))
|
||||
|
||||
def print_ssrc_info(self, ssrc_id, ssrc):
|
||||
def PrintSsrcInfo(self, ssrc_id, ssrc):
|
||||
"""Prints packet and size statistics for a given SSRC.
|
||||
|
||||
Args:
|
||||
@ -66,7 +66,7 @@ class RTPStatistics(object):
|
||||
"""
|
||||
filtered_ssrc = [point for point in self.data_points if point.ssrc
|
||||
== ssrc]
|
||||
payloads = misc.normalize_counter(
|
||||
payloads = misc.NormalizeCounter(
|
||||
collections.Counter([point.payload_type for point in
|
||||
filtered_ssrc]))
|
||||
|
||||
@ -86,25 +86,25 @@ class RTPStatistics(object):
|
||||
for i in range(len(bin_proportions))
|
||||
]))
|
||||
|
||||
def choose_ssrc(self):
|
||||
def ChooseSsrc(self):
|
||||
"""Queries user for SSRC."""
|
||||
|
||||
if len(self.ssrc_frequencies) == 1:
|
||||
chosen_ssrc = self.ssrc_frequencies[0][-1]
|
||||
self.print_ssrc_info("", chosen_ssrc)
|
||||
self.PrintSsrcInfo("", chosen_ssrc)
|
||||
return chosen_ssrc
|
||||
|
||||
ssrc_is_incoming = misc.ssrc_directions(self.data_points)
|
||||
ssrc_is_incoming = misc.SsrcDirections(self.data_points)
|
||||
incoming = [ssrc for ssrc in ssrc_is_incoming if ssrc_is_incoming[ssrc]]
|
||||
outgoing = [ssrc for ssrc in ssrc_is_incoming if not ssrc_is_incoming[ssrc]]
|
||||
|
||||
print("\nIncoming:\n")
|
||||
for (i, ssrc) in enumerate(incoming):
|
||||
self.print_ssrc_info(i, ssrc)
|
||||
self.PrintSsrcInfo(i, ssrc)
|
||||
|
||||
print("\nOutgoing:\n")
|
||||
for (i, ssrc) in enumerate(outgoing):
|
||||
self.print_ssrc_info(i + len(incoming), ssrc)
|
||||
self.PrintSsrcInfo(i + len(incoming), ssrc)
|
||||
|
||||
while True:
|
||||
chosen_index = int(misc.get_input("choose one> "))
|
||||
@ -113,7 +113,7 @@ class RTPStatistics(object):
|
||||
else:
|
||||
print("Invalid index!")
|
||||
|
||||
def filter_ssrc(self, chosen_ssrc):
|
||||
def FilterSsrc(self, chosen_ssrc):
|
||||
"""Filters and wraps data points.
|
||||
|
||||
Removes data points with `ssrc != chosen_ssrc`. Unwraps sequence
|
||||
@ -121,20 +121,20 @@ class RTPStatistics(object):
|
||||
"""
|
||||
self.data_points = [point for point in self.data_points if
|
||||
point.ssrc == chosen_ssrc]
|
||||
unwrapped_sequence_numbers = misc.unwrap(
|
||||
unwrapped_sequence_numbers = misc.Unwrap(
|
||||
[point.sequence_number for point in self.data_points], 2**16 - 1)
|
||||
for (data_point, sequence_number) in zip(self.data_points,
|
||||
unwrapped_sequence_numbers):
|
||||
data_point.sequence_number = sequence_number
|
||||
|
||||
unwrapped_timestamps = misc.unwrap([point.timestamp for point in
|
||||
unwrapped_timestamps = misc.Unwrap([point.timestamp for point in
|
||||
self.data_points], 2**32 - 1)
|
||||
|
||||
for (data_point, timestamp) in zip(self.data_points,
|
||||
unwrapped_timestamps):
|
||||
data_point.timestamp = timestamp
|
||||
|
||||
def print_sequence_number_statistics(self):
|
||||
def PrintSequenceNumberStatistics(self):
|
||||
seq_no_set = set(point.sequence_number for point in
|
||||
self.data_points)
|
||||
missing_sequence_numbers = max(seq_no_set) - min(seq_no_set) + (
|
||||
@ -147,10 +147,10 @@ class RTPStatistics(object):
|
||||
print("Duplicated packets: {}".format(len(self.data_points) -
|
||||
len(seq_no_set)))
|
||||
print("Reordered packets: {}".format(
|
||||
misc.count_reordered([point.sequence_number for point in
|
||||
misc.CountReordered([point.sequence_number for point in
|
||||
self.data_points])))
|
||||
|
||||
def estimate_frequency(self, always_query_sample_rate):
|
||||
def EstimateFrequency(self, always_query_sample_rate):
|
||||
"""Estimates frequency and updates data.
|
||||
|
||||
Guesses the most probable frequency by looking at changes in
|
||||
@ -183,7 +183,7 @@ class RTPStatistics(object):
|
||||
self.data_points[0].timestamp) / freq
|
||||
point.delay = point.arrival_timestamp_ms - point.real_send_time_ms
|
||||
|
||||
def print_duration_statistics(self):
|
||||
def PrintDurationStatistics(self):
|
||||
"""Prints delay, clock drift and bitrate statistics."""
|
||||
|
||||
min_delay = min(point.delay for point in self.data_points)
|
||||
@ -215,7 +215,7 @@ class RTPStatistics(object):
|
||||
print("Receive average bitrate: {:.2f} kbps".format(
|
||||
total_size / stream_duration_receiver))
|
||||
|
||||
def remove_reordered(self):
|
||||
def RemoveReordered(self):
|
||||
last = self.data_points[0]
|
||||
data_points_ordered = [last]
|
||||
for point in self.data_points[1:]:
|
||||
@ -225,7 +225,7 @@ class RTPStatistics(object):
|
||||
last = point
|
||||
self.data_points = data_points_ordered
|
||||
|
||||
def compute_bandwidth(self):
|
||||
def ComputeBandwidth(self):
|
||||
"""Computes bandwidth averaged over several consecutive packets.
|
||||
|
||||
The number of consecutive packets used in the average is
|
||||
@ -246,7 +246,7 @@ class RTPStatistics(object):
|
||||
RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE)
|
||||
self.smooth_bw_kbps = numpy.correlate(self.bandwidth_kbps, correlate_filter)
|
||||
|
||||
def plot_statistics(self):
|
||||
def PlotStatistics(self):
|
||||
"""Plots changes in delay and average bandwidth."""
|
||||
|
||||
start_ms = self.data_points[0].real_send_time_ms
|
||||
@ -254,7 +254,7 @@ class RTPStatistics(object):
|
||||
time_axis = numpy.arange(start_ms / 1000, stop_ms / 1000,
|
||||
RTPStatistics.PLOT_RESOLUTION_MS / 1000)
|
||||
|
||||
delay = calculate_delay(start_ms, stop_ms,
|
||||
delay = CalculateDelay(start_ms, stop_ms,
|
||||
RTPStatistics.PLOT_RESOLUTION_MS,
|
||||
self.data_points)
|
||||
|
||||
@ -271,7 +271,7 @@ class RTPStatistics(object):
|
||||
plt.show()
|
||||
|
||||
|
||||
def calculate_delay(start, stop, step, points):
|
||||
def CalculateDelay(start, stop, step, points):
|
||||
"""Quantizes the time coordinates for the delay.
|
||||
|
||||
Quantizes points by rounding the timestamps downwards to the nearest
|
||||
@ -315,26 +315,26 @@ def main():
|
||||
if options.working_directory and not os.path.isabs(input_file):
|
||||
input_file = os.path.join(options.working_directory, input_file)
|
||||
|
||||
data_points = pb_parse.parse_protobuf(input_file)
|
||||
data_points = pb_parse.ParseProtobuf(input_file)
|
||||
rtp_stats = RTPStatistics(data_points)
|
||||
|
||||
if options.dump_header_to_stdout:
|
||||
print("Printing header info to stdout.", file=sys.stderr)
|
||||
rtp_stats.print_header_statistics()
|
||||
rtp_stats.PrintHeaderStatistics()
|
||||
sys.exit(0)
|
||||
|
||||
chosen_ssrc = rtp_stats.choose_ssrc()
|
||||
chosen_ssrc = rtp_stats.ChooseSsrc()
|
||||
print("Chosen SSRC: 0X{:X}".format(chosen_ssrc))
|
||||
|
||||
rtp_stats.filter_ssrc(chosen_ssrc)
|
||||
rtp_stats.FilterSsrc(chosen_ssrc)
|
||||
|
||||
print("Statistics:")
|
||||
rtp_stats.print_sequence_number_statistics()
|
||||
rtp_stats.estimate_frequency(options.query_sample_rate)
|
||||
rtp_stats.print_duration_statistics()
|
||||
rtp_stats.remove_reordered()
|
||||
rtp_stats.compute_bandwidth()
|
||||
rtp_stats.plot_statistics()
|
||||
rtp_stats.PrintSequenceNumberStatistics()
|
||||
rtp_stats.EstimateFrequency(options.query_sample_rate)
|
||||
rtp_stats.PrintDurationStatistics()
|
||||
rtp_stats.RemoveReordered()
|
||||
rtp_stats.ComputeBandwidth()
|
||||
rtp_stats.PlotStatistics()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@ -17,19 +17,19 @@ or
|
||||
import collections
|
||||
import unittest
|
||||
|
||||
missing_numpy = False
|
||||
MISSING_NUMPY = False # pylint: disable=invalid-name
|
||||
try:
|
||||
import numpy
|
||||
import rtp_analyzer
|
||||
except ImportError:
|
||||
missing_numpy = True
|
||||
MISSING_NUMPY = True
|
||||
|
||||
FakePoint = collections.namedtuple("FakePoint",
|
||||
["real_send_time_ms", "absdelay"])
|
||||
|
||||
|
||||
class TestDelay(unittest.TestCase):
|
||||
def assertMaskEqual(self, masked_array, data, mask):
|
||||
def AssertMaskEqual(self, masked_array, data, mask):
|
||||
self.assertEqual(list(masked_array.data), data)
|
||||
|
||||
if isinstance(masked_array.mask, numpy.bool_):
|
||||
@ -40,23 +40,22 @@ class TestDelay(unittest.TestCase):
|
||||
|
||||
def testCalculateDelaySimple(self):
|
||||
points = [FakePoint(0, 0), FakePoint(1, 0)]
|
||||
mask = rtp_analyzer.calculate_delay(0, 1, 1, points)
|
||||
self.assertMaskEqual(mask, [0, 0], False)
|
||||
mask = rtp_analyzer.CalculateDelay(0, 1, 1, points)
|
||||
self.AssertMaskEqual(mask, [0, 0], False)
|
||||
|
||||
def testCalculateDelayMissing(self):
|
||||
points = [FakePoint(0, 0), FakePoint(2, 0)]
|
||||
mask = rtp_analyzer.calculate_delay(0, 2, 1, points)
|
||||
self.assertMaskEqual(mask, [0, -1, 0], [False, True, False])
|
||||
mask = rtp_analyzer.CalculateDelay(0, 2, 1, points)
|
||||
self.AssertMaskEqual(mask, [0, -1, 0], [False, True, False])
|
||||
|
||||
def testCalculateDelayBorders(self):
|
||||
points = [FakePoint(0, 0), FakePoint(2, 0)]
|
||||
mask = rtp_analyzer.calculate_delay(0, 3, 2, points)
|
||||
self.assertMaskEqual(mask, [0, 0, -1], [False, False, True])
|
||||
mask = rtp_analyzer.CalculateDelay(0, 3, 2, points)
|
||||
self.AssertMaskEqual(mask, [0, 0, -1], [False, False, True])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if missing_numpy:
|
||||
# pylint: disable=superfluous-parens
|
||||
print("Missing numpy, skipping test.")
|
||||
if MISSING_NUMPY:
|
||||
print "Missing numpy, skipping test."
|
||||
else:
|
||||
unittest.main()
|
||||
|
||||
@ -13,28 +13,28 @@ from video_analysis import FindUsbPortForV4lDevices
|
||||
|
||||
|
||||
class RunVideoAnalysisTest(unittest.TestCase):
|
||||
def setGlobPath(self, path1, path2):
|
||||
def SetGlobPath(self, path1, path2):
|
||||
self.path1 = path1
|
||||
self.path2 = path2
|
||||
|
||||
def setUp(self):
|
||||
self.path1 = ''
|
||||
self.path2 = ''
|
||||
self.requestNbr = 1
|
||||
self.request_nbr = 1
|
||||
|
||||
def glob_mock(string):
|
||||
def GlobMock(string):
|
||||
# Eat incoming string.
|
||||
del string
|
||||
if self.requestNbr == 1:
|
||||
self.requestNbr += 1
|
||||
if self.request_nbr == 1:
|
||||
self.request_nbr += 1
|
||||
return self.path1
|
||||
else:
|
||||
self.requestNbr = 1
|
||||
self.request_nbr = 1
|
||||
return self.path2
|
||||
|
||||
# Override the glob function with our own that returns a string set by the
|
||||
# test.
|
||||
glob.glob = glob_mock
|
||||
glob.glob = GlobMock
|
||||
|
||||
# Verifies that the correct USB id is returned.
|
||||
def testFindUSBPortForV4lDevices(self):
|
||||
@ -42,7 +42,7 @@ class RunVideoAnalysisTest(unittest.TestCase):
|
||||
'video4linux/video0')
|
||||
short_path2 = ('/sys/bus/usb/devices/usb1/1-1/driver/4-3/4-3:1.0/'
|
||||
'video4linux/video1')
|
||||
self.setGlobPath(short_path1, short_path2)
|
||||
self.SetGlobPath(short_path1, short_path2)
|
||||
short_usb_ids = ['4-4', '4-3']
|
||||
self.assertEqual(FindUsbPortForV4lDevices('video0', 'video1'),
|
||||
short_usb_ids)
|
||||
@ -51,16 +51,16 @@ class RunVideoAnalysisTest(unittest.TestCase):
|
||||
'video4linux/video0')
|
||||
long_path2 = ('/sys/bus/usb/devices/usb1/1-1/driver/3-2/3-2.1:1.0/'
|
||||
'video4linux/video1')
|
||||
self.setGlobPath(long_path1, long_path2)
|
||||
self.SetGlobPath(long_path1, long_path2)
|
||||
long_usb_ids = ['3-3.1', '3-2.1']
|
||||
self.assertEqual(FindUsbPortForV4lDevices('video0', 'video1'), long_usb_ids)
|
||||
|
||||
|
||||
def testFindUSBPortForV4lDevicesNoDevice(self):
|
||||
noDeviceFound = ('')
|
||||
V4lDevice = ('/sys/bus/usb/devices/usb1/1-1/driver/3-2/3-2.1:1.0/'
|
||||
no_device_found = ('')
|
||||
v4l_device = ('/sys/bus/usb/devices/usb1/1-1/driver/3-2/3-2.1:1.0/'
|
||||
'video4linux/video1')
|
||||
self.setGlobPath(noDeviceFound, V4lDevice)
|
||||
self.SetGlobPath(no_device_found, v4l_device)
|
||||
empty_list = []
|
||||
self.assertEqual(FindUsbPortForV4lDevices('video0', 'video1'), empty_list)
|
||||
|
||||
|
||||
@ -57,7 +57,7 @@ HIDE_DROPPED = 256
|
||||
RIGHT_Y_AXIS = 512
|
||||
|
||||
# internal field id, field name, title
|
||||
_fields = [
|
||||
_FIELDS = [
|
||||
# Raw
|
||||
(DROPPED, "dropped", "dropped"),
|
||||
(INPUT_TIME, "input_time_ms", "input time"),
|
||||
@ -75,16 +75,16 @@ _fields = [
|
||||
(RENDERED_DELTA, "rendered_delta", "rendered delta"),
|
||||
]
|
||||
|
||||
name_to_id = {field[1]: field[0] for field in _fields}
|
||||
id_to_title = {field[0]: field[2] for field in _fields}
|
||||
NAME_TO_ID = {field[1]: field[0] for field in _FIELDS}
|
||||
ID_TO_TITLE = {field[0]: field[2] for field in _FIELDS}
|
||||
|
||||
def field_arg_to_id(arg):
|
||||
def FieldArgToId(arg):
|
||||
if arg == "none":
|
||||
return None
|
||||
if arg in name_to_id:
|
||||
return name_to_id[arg]
|
||||
if arg + "_ms" in name_to_id:
|
||||
return name_to_id[arg + "_ms"]
|
||||
if arg in NAME_TO_ID:
|
||||
return NAME_TO_ID[arg]
|
||||
if arg + "_ms" in NAME_TO_ID:
|
||||
return NAME_TO_ID[arg + "_ms"]
|
||||
raise Exception("Unrecognized field name \"{}\"".format(arg))
|
||||
|
||||
|
||||
@ -105,9 +105,9 @@ class Data(object):
|
||||
self.length = 0
|
||||
self.samples = defaultdict(list)
|
||||
|
||||
self._read_samples(filename)
|
||||
self._ReadSamples(filename)
|
||||
|
||||
def _read_samples(self, filename):
|
||||
def _ReadSamples(self, filename):
|
||||
"""Reads graph data from the given file."""
|
||||
f = open(filename)
|
||||
it = iter(f)
|
||||
@ -115,7 +115,7 @@ class Data(object):
|
||||
self.title = it.next().strip()
|
||||
self.length = int(it.next())
|
||||
field_names = [name.strip() for name in it.next().split()]
|
||||
field_ids = [name_to_id[name] for name in field_names]
|
||||
field_ids = [NAME_TO_ID[name] for name in field_names]
|
||||
|
||||
for field_id in field_ids:
|
||||
self.samples[field_id] = [0.0] * self.length
|
||||
@ -124,18 +124,18 @@ class Data(object):
|
||||
for col, value in enumerate(it.next().split()):
|
||||
self.samples[field_ids[col]][sample_id] = float(value)
|
||||
|
||||
self._subtract_first_input_time()
|
||||
self._generate_additional_data()
|
||||
self._SubtractFirstInputTime()
|
||||
self._GenerateAdditionalData()
|
||||
|
||||
f.close()
|
||||
|
||||
def _subtract_first_input_time(self):
|
||||
def _SubtractFirstInputTime(self):
|
||||
offset = self.samples[INPUT_TIME][0]
|
||||
for field in [INPUT_TIME, SEND_TIME, RECV_TIME, RENDER_TIME]:
|
||||
if field in self.samples:
|
||||
self.samples[field] = [x - offset for x in self.samples[field]]
|
||||
|
||||
def _generate_additional_data(self):
|
||||
def _GenerateAdditionalData(self):
|
||||
"""Calculates sender time, receiver time etc. from the raw data."""
|
||||
s = self.samples
|
||||
last_render_time = 0
|
||||
@ -153,16 +153,16 @@ class Data(object):
|
||||
s[RENDERED_DELTA][k] = decoded_time - last_render_time
|
||||
last_render_time = decoded_time
|
||||
|
||||
def _hide(self, values):
|
||||
def _Hide(self, values):
|
||||
"""
|
||||
Replaces values for dropped frames with None.
|
||||
These values are then skipped by the plot() method.
|
||||
These values are then skipped by the Plot() method.
|
||||
"""
|
||||
|
||||
return [None if self.samples[DROPPED][k] else values[k]
|
||||
for k in range(len(values))]
|
||||
|
||||
def add_samples(self, config, target_lines_list):
|
||||
def AddSamples(self, config, target_lines_list):
|
||||
"""Creates graph lines from the current data set with given config."""
|
||||
for field in config.fields:
|
||||
# field is None means the user wants just to skip the color.
|
||||
@ -174,14 +174,14 @@ class Data(object):
|
||||
values = self.samples[field_id]
|
||||
|
||||
if field & HIDE_DROPPED:
|
||||
values = self._hide(values)
|
||||
values = self._Hide(values)
|
||||
|
||||
target_lines_list.append(PlotLine(
|
||||
self.title + " " + id_to_title[field_id],
|
||||
self.title + " " + ID_TO_TITLE[field_id],
|
||||
values, field & ~FIELD_MASK))
|
||||
|
||||
|
||||
def average_over_cycle(values, length):
|
||||
def AverageOverCycle(values, length):
|
||||
"""
|
||||
Returns the list:
|
||||
[
|
||||
@ -220,16 +220,16 @@ class PlotConfig(object):
|
||||
self.output_filename = output_filename
|
||||
self.title = title
|
||||
|
||||
def plot(self, ax1):
|
||||
def Plot(self, ax1):
|
||||
lines = []
|
||||
for data in self.data_list:
|
||||
if not data:
|
||||
# Add None lines to skip the colors.
|
||||
lines.extend([None] * len(self.fields))
|
||||
else:
|
||||
data.add_samples(self, lines)
|
||||
data.AddSamples(self, lines)
|
||||
|
||||
def _slice_values(values):
|
||||
def _SliceValues(values):
|
||||
if self.offset:
|
||||
values = values[self.offset:]
|
||||
if self.frames:
|
||||
@ -241,9 +241,9 @@ class PlotConfig(object):
|
||||
if line is None:
|
||||
continue
|
||||
|
||||
line.values = _slice_values(line.values)
|
||||
line.values = _SliceValues(line.values)
|
||||
if self.cycle_length:
|
||||
line.values = average_over_cycle(line.values, self.cycle_length)
|
||||
line.values = AverageOverCycle(line.values, self.cycle_length)
|
||||
|
||||
if length is None:
|
||||
length = len(line.values)
|
||||
@ -272,7 +272,7 @@ class PlotConfig(object):
|
||||
x = numpy.array(range(self.offset, self.offset + len(line.values)))
|
||||
y = numpy.array(line.values)
|
||||
ax = ax2 if line.flags & RIGHT_Y_AXIS else ax1
|
||||
ax.plot(x, y, "o-", label=line.label, markersize=3.0, linewidth=1.0,
|
||||
ax.Plot(x, y, "o-", label=line.label, markersize=3.0, linewidth=1.0,
|
||||
color=color_iter.next())
|
||||
|
||||
ax1.grid(True)
|
||||
@ -283,20 +283,20 @@ class PlotConfig(object):
|
||||
ax1.legend(loc="best", shadow=True, fontsize="large")
|
||||
|
||||
|
||||
def load_files(filenames):
|
||||
def LoadFiles(filenames):
|
||||
result = []
|
||||
for filename in filenames:
|
||||
if filename in load_files.cache:
|
||||
result.append(load_files.cache[filename])
|
||||
if filename in LoadFiles.cache:
|
||||
result.append(LoadFiles.cache[filename])
|
||||
else:
|
||||
data = Data(filename)
|
||||
load_files.cache[filename] = data
|
||||
LoadFiles.cache[filename] = data
|
||||
result.append(data)
|
||||
return result
|
||||
load_files.cache = {}
|
||||
LoadFiles.cache = {}
|
||||
|
||||
|
||||
def get_parser():
|
||||
def GetParser():
|
||||
class CustomAction(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
if "ordered_args" not in namespace:
|
||||
@ -335,7 +335,7 @@ def get_parser():
|
||||
return parser
|
||||
|
||||
|
||||
def _plot_config_from_args(args, graph_num):
|
||||
def _PlotConfigFromArgs(args, graph_num):
|
||||
# Pylint complains about using kwargs, so have to do it this way.
|
||||
cycle_length = None
|
||||
frames = None
|
||||
@ -362,7 +362,7 @@ def _plot_config_from_args(args, graph_num):
|
||||
elif key == "right":
|
||||
mask |= RIGHT_Y_AXIS
|
||||
elif key == "field":
|
||||
field_id = field_arg_to_id(values[0])
|
||||
field_id = FieldArgToId(values[0])
|
||||
fields.append(field_id | mask if field_id is not None else None)
|
||||
mask = 0 # Reset mask after the field argument.
|
||||
elif key == "files":
|
||||
@ -373,12 +373,12 @@ def _plot_config_from_args(args, graph_num):
|
||||
if not fields:
|
||||
raise Exception("Missing field argument(s) for graph #{}".format(graph_num))
|
||||
|
||||
return PlotConfig(fields, load_files(files), cycle_length=cycle_length,
|
||||
return PlotConfig(fields, LoadFiles(files), cycle_length=cycle_length,
|
||||
frames=frames, offset=offset, output_filename=output_filename,
|
||||
title=title)
|
||||
|
||||
|
||||
def plot_configs_from_args(args):
|
||||
def PlotConfigsFromArgs(args):
|
||||
"""Generates plot configs for given command line arguments."""
|
||||
# The way it works:
|
||||
# First we detect separators -n/--next and split arguments into groups, one
|
||||
@ -388,21 +388,21 @@ def plot_configs_from_args(args):
|
||||
args = itertools.groupby(args, lambda x: x in ["-n", "--next"])
|
||||
args = list(list(group) for match, group in args if not match)
|
||||
|
||||
parser = get_parser()
|
||||
parser = GetParser()
|
||||
plot_configs = []
|
||||
for index, raw_args in enumerate(args):
|
||||
graph_args = parser.parse_args(raw_args).ordered_args
|
||||
plot_configs.append(_plot_config_from_args(graph_args, index))
|
||||
plot_configs.append(_PlotConfigFromArgs(graph_args, index))
|
||||
return plot_configs
|
||||
|
||||
|
||||
def show_or_save_plots(plot_configs):
|
||||
def ShowOrSavePlots(plot_configs):
|
||||
for config in plot_configs:
|
||||
fig = plt.figure(figsize=(14.0, 10.0))
|
||||
ax = fig.add_subplot(1, 1, 1)
|
||||
ax = fig.add_subPlot(1, 1, 1)
|
||||
|
||||
plt.title(config.title)
|
||||
config.plot(ax)
|
||||
config.Plot(ax)
|
||||
if config.output_filename:
|
||||
print "Saving to", config.output_filename
|
||||
fig.savefig(config.output_filename)
|
||||
@ -411,4 +411,4 @@ def show_or_save_plots(plot_configs):
|
||||
plt.show()
|
||||
|
||||
if __name__ == "__main__":
|
||||
show_or_save_plots(plot_configs_from_args(sys.argv[1:]))
|
||||
ShowOrSavePlots(PlotConfigsFromArgs(sys.argv[1:]))
|
||||
|
||||
Reference in New Issue
Block a user