New rtc dump analyzing tool in Python
R=henrik.lundin@webrtc.org, ivoc@webrtc.org, kwiberg@webrtc.org, peah@webrtc.org, phoglund@webrtc.org Review-Url: https://codereview.webrtc.org/1999113002 Cr-Commit-Position: refs/heads/master@{#13218}
This commit is contained in:
260
webrtc/tools/py_event_log_analyzer/rtp_analyzer.py
Normal file
260
webrtc/tools/py_event_log_analyzer/rtp_analyzer.py
Normal file
@ -0,0 +1,260 @@
|
||||
# Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
"""Displays statistics and plots graphs from RTC protobuf dump."""
|
||||
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
|
||||
import collections
|
||||
import sys
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy
|
||||
|
||||
import misc
|
||||
import pb_parse
|
||||
|
||||
|
||||
class RTPStatistics(object):
|
||||
"""Has methods for calculating and plotting RTP stream statistics."""
|
||||
|
||||
BANDWIDTH_SMOOTHING_WINDOW_SIZE = 10
|
||||
|
||||
def __init__(self, data_points):
|
||||
"""Initializes object with data_points and computes simple statistics.
|
||||
|
||||
Computes percentages of number of packets and packet sizes by
|
||||
SSRC.
|
||||
|
||||
Args:
|
||||
data_points: list of pb_parse.DataPoints on which statistics are
|
||||
calculated.
|
||||
|
||||
"""
|
||||
|
||||
self.data_points = data_points
|
||||
self.ssrc_frequencies = misc.normalize_counter(
|
||||
collections.Counter([pt.ssrc for pt in self.data_points]))
|
||||
self.ssrc_size_table = misc.ssrc_normalized_size_table(self.data_points)
|
||||
self.bandwidth_kbps = None
|
||||
self.smooth_bw_kbps = None
|
||||
|
||||
def print_ssrc_info(self, ssrc_id, ssrc):
|
||||
"""Prints packet and size statistics for a given SSRC.
|
||||
|
||||
Args:
|
||||
ssrc_id: textual identifier of SSRC printed beside statistics for it.
|
||||
ssrc: SSRC by which to filter data and display statistics
|
||||
"""
|
||||
filtered_ssrc = [point for point in self.data_points if point.ssrc
|
||||
== ssrc]
|
||||
payloads = misc.normalize_counter(
|
||||
collections.Counter([point.payload_type for point in
|
||||
filtered_ssrc]))
|
||||
|
||||
payload_info = "payload type(s): {}".format(
|
||||
", ".join(str(payload) for payload in payloads))
|
||||
print("{} 0x{:x} {}, {:.2f}% packets, {:.2f}% data".format(
|
||||
ssrc_id, ssrc, payload_info, self.ssrc_frequencies[ssrc] * 100,
|
||||
self.ssrc_size_table[ssrc] * 100))
|
||||
print(" packet sizes:")
|
||||
(bin_counts, bin_bounds) = numpy.histogram([point.size for point in
|
||||
filtered_ssrc], bins=5,
|
||||
density=False)
|
||||
bin_proportions = bin_counts / sum(bin_counts)
|
||||
print("\n".join([
|
||||
" {:.1f} - {:.1f}: {:.2f}%".format(bin_bounds[i], bin_bounds[i + 1],
|
||||
bin_proportions[i] * 100)
|
||||
for i in range(len(bin_proportions))
|
||||
]))
|
||||
|
||||
def choose_ssrc(self):
|
||||
"""Queries user for SSRC."""
|
||||
|
||||
if len(self.ssrc_frequencies) == 1:
|
||||
chosen_ssrc = self.ssrc_frequencies[0][-1]
|
||||
self.print_ssrc_info("", chosen_ssrc)
|
||||
return chosen_ssrc
|
||||
|
||||
for (i, ssrc) in enumerate(self.ssrc_frequencies):
|
||||
self.print_ssrc_info(i, ssrc)
|
||||
|
||||
while True:
|
||||
chosen_index = int(misc.get_input("choose one> "))
|
||||
if 0 <= chosen_index < len(self.ssrc_frequencies):
|
||||
return list(self.ssrc_frequencies)[chosen_index]
|
||||
else:
|
||||
print("Invalid index!")
|
||||
|
||||
def filter_ssrc(self, chosen_ssrc):
|
||||
"""Filters and wraps data points.
|
||||
|
||||
Removes data points with `ssrc != chosen_ssrc`. Unwraps sequence
|
||||
numbers and timestamps for the chosen selection.
|
||||
"""
|
||||
self.data_points = [point for point in self.data_points if
|
||||
point.ssrc == chosen_ssrc]
|
||||
unwrapped_sequence_numbers = misc.unwrap(
|
||||
[point.sequence_number for point in self.data_points], 2**16 - 1)
|
||||
for (data_point, sequence_number) in zip(self.data_points,
|
||||
unwrapped_sequence_numbers):
|
||||
data_point.sequence_number = sequence_number
|
||||
|
||||
unwrapped_timestamps = misc.unwrap([point.timestamp for point in
|
||||
self.data_points], 2**32 - 1)
|
||||
|
||||
for (data_point, timestamp) in zip(self.data_points,
|
||||
unwrapped_timestamps):
|
||||
data_point.timestamp = timestamp
|
||||
|
||||
def print_sequence_number_statistics(self):
|
||||
seq_no_set = set(point.sequence_number for point in
|
||||
self.data_points)
|
||||
print("Missing sequence numbers: {} out of {}".format(
|
||||
max(seq_no_set) - min(seq_no_set) + 1 - len(seq_no_set),
|
||||
len(seq_no_set)
|
||||
))
|
||||
print("Duplicated packets: {}".format(len(self.data_points) -
|
||||
len(seq_no_set)))
|
||||
print("Reordered packets: {}".format(
|
||||
misc.count_reordered([point.sequence_number for point in
|
||||
self.data_points])))
|
||||
|
||||
def estimate_frequency(self):
|
||||
"""Estimates frequency and updates data.
|
||||
|
||||
Guesses the most probable frequency by looking at changes in
|
||||
timestamps (RFC 3550 section 5.1), calculates clock drifts and
|
||||
sending time of packets. Updates `self.data_points` with changes
|
||||
in delay and send time.
|
||||
"""
|
||||
delta_timestamp = (self.data_points[-1].timestamp -
|
||||
self.data_points[0].timestamp)
|
||||
delta_arr_timestamp = float((self.data_points[-1].arrival_timestamp_ms -
|
||||
self.data_points[0].arrival_timestamp_ms))
|
||||
freq_est = delta_timestamp / delta_arr_timestamp
|
||||
|
||||
freq_vec = [8, 16, 32, 48, 90]
|
||||
freq = None
|
||||
for f in freq_vec:
|
||||
if abs((freq_est - f) / f) < 0.05:
|
||||
freq = f
|
||||
|
||||
print("Estimated frequency: {}kHz".format(freq_est))
|
||||
if freq is None:
|
||||
freq = int(misc.get_input(
|
||||
"Frequency could not be guessed. Input frequency (in kHz)> "))
|
||||
else:
|
||||
print("Guessed frequency: {}kHz".format(freq))
|
||||
|
||||
for point in self.data_points:
|
||||
point.real_send_time_ms = (point.timestamp -
|
||||
self.data_points[0].timestamp) / freq
|
||||
point.delay = point.arrival_timestamp_ms -point.real_send_time_ms
|
||||
|
||||
def print_duration_statistics(self):
|
||||
"""Prints delay, clock drift and bitrate statistics."""
|
||||
|
||||
min_delay = min(point.delay for point in self.data_points)
|
||||
|
||||
for point in self.data_points:
|
||||
point.absdelay = point.delay - min_delay
|
||||
|
||||
stream_duration_sender = self.data_points[-1].real_send_time_ms / 1000
|
||||
print("Stream duration at sender: {:.1f} seconds".format(
|
||||
stream_duration_sender
|
||||
))
|
||||
|
||||
arrival_timestamps_ms = [point.arrival_timestamp_ms for point in
|
||||
self.data_points]
|
||||
stream_duration_receiver = (max(arrival_timestamps_ms) -
|
||||
min(arrival_timestamps_ms)) / 1000
|
||||
print("Stream duration at receiver: {:.1f} seconds".format(
|
||||
stream_duration_receiver
|
||||
))
|
||||
|
||||
print("Clock drift: {:.2f}%".format(
|
||||
100 * (stream_duration_receiver / stream_duration_sender - 1)
|
||||
))
|
||||
|
||||
total_size = sum(point.size for point in self.data_points) * 8 / 1000
|
||||
print("Send average bitrate: {:.2f} kbps".format(
|
||||
total_size / stream_duration_sender))
|
||||
|
||||
print("Receive average bitrate: {:.2f} kbps".format(
|
||||
total_size / stream_duration_receiver))
|
||||
|
||||
def remove_reordered(self):
|
||||
last = self.data_points[0]
|
||||
data_points_ordered = [last]
|
||||
for point in self.data_points[1:]:
|
||||
if point.sequence_number > last.sequence_number and (
|
||||
point.real_send_time_ms > last.real_send_time_ms):
|
||||
data_points_ordered.append(point)
|
||||
last = point
|
||||
self.data_points = data_points_ordered
|
||||
|
||||
def compute_bandwidth(self):
|
||||
"""Computes bandwidth averaged over several consecutive packets.
|
||||
|
||||
The number of consecutive packets used in the average is
|
||||
BANDWIDTH_SMOOTHING_WINDOW_SIZE. Averaging is done with
|
||||
numpy.correlate.
|
||||
"""
|
||||
self.bandwidth_kbps = []
|
||||
for i in range(len(self.data_points) - 1):
|
||||
self.bandwidth_kbps.append(self.data_points[i].size * 8 /
|
||||
(self.data_points[i +
|
||||
1].real_send_time_ms -
|
||||
self.data_points[i].real_send_time_ms)
|
||||
)
|
||||
correlate_filter = (numpy.ones(
|
||||
RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE) /
|
||||
RTPStatistics.BANDWIDTH_SMOOTHING_WINDOW_SIZE)
|
||||
self.smooth_bw_kbps = numpy.correlate(self.bandwidth_kbps, correlate_filter)
|
||||
|
||||
def plot_statistics(self):
|
||||
"""Plots changes in delay and average bandwidth."""
|
||||
plt.figure(1)
|
||||
plt.plot([f.real_send_time_ms / 1000 for f in self.data_points],
|
||||
[f.absdelay for f in self.data_points])
|
||||
plt.xlabel("Send time [s]")
|
||||
plt.ylabel("Relative transport delay [ms]")
|
||||
|
||||
plt.figure(2)
|
||||
plt.plot([f.real_send_time_ms / 1000 for f in
|
||||
self.data_points][:len(self.smooth_bw_kbps)],
|
||||
self.smooth_bw_kbps[:len(self.data_points)])
|
||||
plt.xlabel("Send time [s]")
|
||||
plt.ylabel("Bandwidth [kbps]")
|
||||
|
||||
plt.show()
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python rtp_analyzer.py <filename of rtc event log>")
|
||||
sys.exit(0)
|
||||
|
||||
data_points = pb_parse.parse_protobuf(sys.argv[1])
|
||||
rtp_stats = RTPStatistics(data_points)
|
||||
chosen_ssrc = rtp_stats.choose_ssrc()
|
||||
print("Chosen SSRC: 0X{:X}".format(chosen_ssrc))
|
||||
|
||||
rtp_stats.filter_ssrc(chosen_ssrc)
|
||||
print("Statistics:")
|
||||
rtp_stats.print_sequence_number_statistics()
|
||||
rtp_stats.estimate_frequency()
|
||||
rtp_stats.print_duration_statistics()
|
||||
rtp_stats.remove_reordered()
|
||||
rtp_stats.compute_bandwidth()
|
||||
rtp_stats.plot_statistics()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user