Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1 @@
terelius@webrtc.org

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,207 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_ANALYZER_H_
#define WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_ANALYZER_H_
#include <map>
#include <memory>
#include <set>
#include <string>
#include <utility>
#include <vector>
#include "webrtc/logging/rtc_event_log/rtc_event_log_parser.h"
#include "webrtc/modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h"
#include "webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_packet.h"
#include "webrtc/rtc_base/function_view.h"
#include "webrtc/rtc_tools/event_log_visualizer/plot_base.h"
namespace webrtc {
namespace plotting {
struct LoggedRtpPacket {
LoggedRtpPacket(uint64_t timestamp, RTPHeader header, size_t total_length)
: timestamp(timestamp), header(header), total_length(total_length) {}
uint64_t timestamp;
// TODO(terelius): This allocates space for 15 CSRCs even if none are used.
RTPHeader header;
size_t total_length;
};
struct LoggedRtcpPacket {
LoggedRtcpPacket(uint64_t timestamp,
RTCPPacketType rtcp_type,
std::unique_ptr<rtcp::RtcpPacket> rtcp_packet)
: timestamp(timestamp), type(rtcp_type), packet(std::move(rtcp_packet)) {}
uint64_t timestamp;
RTCPPacketType type;
std::unique_ptr<rtcp::RtcpPacket> packet;
};
struct LossBasedBweUpdate {
uint64_t timestamp;
int32_t new_bitrate;
uint8_t fraction_loss;
int32_t expected_packets;
};
struct AudioNetworkAdaptationEvent {
uint64_t timestamp;
AudioEncoderRuntimeConfig config;
};
class EventLogAnalyzer {
public:
// The EventLogAnalyzer keeps a reference to the ParsedRtcEventLog for the
// duration of its lifetime. The ParsedRtcEventLog must not be destroyed or
// modified while the EventLogAnalyzer is being used.
explicit EventLogAnalyzer(const ParsedRtcEventLog& log);
void CreatePacketGraph(PacketDirection desired_direction, Plot* plot);
void CreateAccumulatedPacketsGraph(PacketDirection desired_direction,
Plot* plot);
void CreatePlayoutGraph(Plot* plot);
void CreateAudioLevelGraph(Plot* plot);
void CreateSequenceNumberGraph(Plot* plot);
void CreateIncomingPacketLossGraph(Plot* plot);
void CreateIncomingDelayDeltaGraph(Plot* plot);
void CreateIncomingDelayGraph(Plot* plot);
void CreateFractionLossGraph(Plot* plot);
void CreateTotalBitrateGraph(PacketDirection desired_direction,
Plot* plot,
bool show_detector_state = false);
void CreateStreamBitrateGraph(PacketDirection desired_direction, Plot* plot);
void CreateBweSimulationGraph(Plot* plot);
void CreateNetworkDelayFeedbackGraph(Plot* plot);
void CreateTimestampGraph(Plot* plot);
void CreateAudioEncoderTargetBitrateGraph(Plot* plot);
void CreateAudioEncoderFrameLengthGraph(Plot* plot);
void CreateAudioEncoderPacketLossGraph(Plot* plot);
void CreateAudioEncoderEnableFecGraph(Plot* plot);
void CreateAudioEncoderEnableDtxGraph(Plot* plot);
void CreateAudioEncoderNumChannelsGraph(Plot* plot);
void CreateAudioJitterBufferGraph(const std::string& replacement_file_name,
int file_sample_rate_hz,
Plot* plot);
// Returns a vector of capture and arrival timestamps for the video frames
// of the stream with the most number of frames.
std::vector<std::pair<int64_t, int64_t>> GetFrameTimestamps() const;
private:
class StreamId {
public:
StreamId(uint32_t ssrc, webrtc::PacketDirection direction)
: ssrc_(ssrc), direction_(direction) {}
bool operator<(const StreamId& other) const {
return std::tie(ssrc_, direction_) <
std::tie(other.ssrc_, other.direction_);
}
bool operator==(const StreamId& other) const {
return std::tie(ssrc_, direction_) ==
std::tie(other.ssrc_, other.direction_);
}
uint32_t GetSsrc() const { return ssrc_; }
webrtc::PacketDirection GetDirection() const { return direction_; }
private:
uint32_t ssrc_;
webrtc::PacketDirection direction_;
};
template <typename T>
void CreateAccumulatedPacketsTimeSeries(
PacketDirection desired_direction,
Plot* plot,
const std::map<StreamId, std::vector<T>>& packets,
const std::string& label_prefix);
bool IsRtxSsrc(StreamId stream_id) const;
bool IsVideoSsrc(StreamId stream_id) const;
bool IsAudioSsrc(StreamId stream_id) const;
std::string GetStreamName(StreamId) const;
const ParsedRtcEventLog& parsed_log_;
// A list of SSRCs we are interested in analysing.
// If left empty, all SSRCs will be considered relevant.
std::vector<uint32_t> desired_ssrc_;
// Tracks what each stream is configured for. Note that a single SSRC can be
// in several sets. For example, the SSRC used for sending video over RTX
// will appear in both video_ssrcs_ and rtx_ssrcs_. In the unlikely case that
// an SSRC is reconfigured to a different media type mid-call, it will also
// appear in multiple sets.
std::set<StreamId> rtx_ssrcs_;
std::set<StreamId> video_ssrcs_;
std::set<StreamId> audio_ssrcs_;
// Maps a stream identifier consisting of ssrc and direction to the parsed
// RTP headers in that stream. Header extensions are parsed if the stream
// has been configured.
std::map<StreamId, std::vector<LoggedRtpPacket>> rtp_packets_;
std::map<StreamId, std::vector<LoggedRtcpPacket>> rtcp_packets_;
// Maps an SSRC to the timestamps of parsed audio playout events.
std::map<uint32_t, std::vector<uint64_t>> audio_playout_events_;
// Stores the timestamps for all log segments, in the form of associated start
// and end events.
std::vector<std::pair<uint64_t, uint64_t>> log_segments_;
// A list of all updates from the send-side loss-based bandwidth estimator.
std::vector<LossBasedBweUpdate> bwe_loss_updates_;
std::vector<AudioNetworkAdaptationEvent> audio_network_adaptation_events_;
std::vector<ParsedRtcEventLog::BweProbeClusterCreatedEvent>
bwe_probe_cluster_created_events_;
std::vector<ParsedRtcEventLog::BweProbeResultEvent> bwe_probe_result_events_;
std::vector<ParsedRtcEventLog::BweDelayBasedUpdate> bwe_delay_updates_;
// Window and step size used for calculating moving averages, e.g. bitrate.
// The generated data points will be |step_| microseconds apart.
// Only events occuring at most |window_duration_| microseconds before the
// current data point will be part of the average.
uint64_t window_duration_;
uint64_t step_;
// First and last events of the log.
uint64_t begin_time_;
uint64_t end_time_;
// Duration (in seconds) of log file.
float call_duration_s_;
};
} // namespace plotting
} // namespace webrtc
#endif // WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_ANALYZER_H_

View File

@ -0,0 +1,38 @@
// Describes a chart generated from WebRTC event log data.
syntax = "proto3";
option optimize_for = LITE_RUNTIME;
package webrtc.analytics;
message ChartStyle {
enum Type {
UNDEFINED = 0;
LINE_CHART = 1;
BAR_CHART = 2;
LINE_STEP_CHART = 3;
SCATTER_CHART = 4;
}
}
message DataSet {
repeated float x_values = 1;
repeated float y_values = 2;
string label = 3;
ChartStyle.Type style = 4;
bool highlight_points = 5;
}
message Chart {
repeated DataSet data_sets = 1;
float xaxis_min = 2;
float xaxis_max = 3;
string xaxis_label = 4;
float yaxis_min = 5;
float yaxis_max = 6;
string yaxis_label = 7;
string title = 8;
}
message ChartCollection {
repeated Chart charts = 1;
}

View File

@ -0,0 +1,304 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <iostream>
#include "webrtc/logging/rtc_event_log/rtc_event_log_parser.h"
#include "webrtc/rtc_base/flags.h"
#include "webrtc/rtc_tools/event_log_visualizer/analyzer.h"
#include "webrtc/rtc_tools/event_log_visualizer/plot_base.h"
#include "webrtc/rtc_tools/event_log_visualizer/plot_python.h"
#include "webrtc/test/field_trial.h"
#include "webrtc/test/testsupport/fileutils.h"
DEFINE_string(plot_profile,
"default",
"A profile that selects a certain subset of the plots. Currently "
"defined profiles are \"all\", \"none\" and \"default\"");
DEFINE_bool(plot_incoming_packet_sizes,
false,
"Plot bar graph showing the size of each incoming packet.");
DEFINE_bool(plot_outgoing_packet_sizes,
false,
"Plot bar graph showing the size of each outgoing packet.");
DEFINE_bool(plot_incoming_packet_count,
false,
"Plot the accumulated number of packets for each incoming stream.");
DEFINE_bool(plot_outgoing_packet_count,
false,
"Plot the accumulated number of packets for each outgoing stream.");
DEFINE_bool(plot_audio_playout,
false,
"Plot bar graph showing the time between each audio playout.");
DEFINE_bool(plot_audio_level,
false,
"Plot line graph showing the audio level of incoming audio.");
DEFINE_bool(plot_incoming_sequence_number_delta,
false,
"Plot the sequence number difference between consecutive incoming "
"packets.");
DEFINE_bool(
plot_incoming_delay_delta,
false,
"Plot the difference in 1-way path delay between consecutive packets.");
DEFINE_bool(plot_incoming_delay,
true,
"Plot the 1-way path delay for incoming packets, normalized so "
"that the first packet has delay 0.");
DEFINE_bool(plot_incoming_loss_rate,
true,
"Compute the loss rate for incoming packets using a method that's "
"similar to the one used for RTCP SR and RR fraction lost. Note "
"that the loss rate can be negative if packets are duplicated or "
"reordered.");
DEFINE_bool(plot_incoming_bitrate,
true,
"Plot the total bitrate used by all incoming streams.");
DEFINE_bool(plot_outgoing_bitrate,
true,
"Plot the total bitrate used by all outgoing streams.");
DEFINE_bool(plot_incoming_stream_bitrate,
true,
"Plot the bitrate used by each incoming stream.");
DEFINE_bool(plot_outgoing_stream_bitrate,
true,
"Plot the bitrate used by each outgoing stream.");
DEFINE_bool(plot_simulated_sendside_bwe,
false,
"Run the send-side bandwidth estimator with the outgoing rtp and "
"incoming rtcp and plot the resulting estimate.");
DEFINE_bool(plot_network_delay_feedback,
true,
"Compute network delay based on sent packets and the received "
"transport feedback.");
DEFINE_bool(plot_fraction_loss_feedback,
true,
"Plot packet loss in percent for outgoing packets (as perceived by "
"the send-side bandwidth estimator).");
DEFINE_bool(plot_timestamps,
false,
"Plot the rtp timestamps of all rtp and rtcp packets over time.");
DEFINE_bool(plot_audio_encoder_bitrate_bps,
false,
"Plot the audio encoder target bitrate.");
DEFINE_bool(plot_audio_encoder_frame_length_ms,
false,
"Plot the audio encoder frame length.");
DEFINE_bool(
plot_audio_encoder_packet_loss,
false,
"Plot the uplink packet loss fraction which is sent to the audio encoder.");
DEFINE_bool(plot_audio_encoder_fec, false, "Plot the audio encoder FEC.");
DEFINE_bool(plot_audio_encoder_dtx, false, "Plot the audio encoder DTX.");
DEFINE_bool(plot_audio_encoder_num_channels,
false,
"Plot the audio encoder number of channels.");
DEFINE_bool(plot_audio_jitter_buffer,
false,
"Plot the audio jitter buffer delay profile.");
DEFINE_string(
force_fieldtrials,
"",
"Field trials control experimental feature code which can be forced. "
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enabled/"
" will assign the group Enabled to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\"");
DEFINE_string(wav_filename,
"",
"Path to wav file used for simulation of jitter buffer");
DEFINE_bool(help, false, "prints this message");
DEFINE_bool(show_detector_state,
false,
"Show the state of the delay based BWE detector on the total "
"bitrate graph");
void SetAllPlotFlags(bool setting);
int main(int argc, char* argv[]) {
std::string program_name = argv[0];
std::string usage =
"A tool for visualizing WebRTC event logs.\n"
"Example usage:\n" +
program_name + " <logfile> | python\n" + "Run " + program_name +
" --help for a list of command line options\n";
// Parse command line flags without removing them. We're only interested in
// the |plot_profile| flag.
rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, false);
if (strcmp(FLAG_plot_profile, "all") == 0) {
SetAllPlotFlags(true);
} else if (strcmp(FLAG_plot_profile, "none") == 0) {
SetAllPlotFlags(false);
} else if (strcmp(FLAG_plot_profile, "default") == 0) {
// Do nothing.
} else {
rtc::Flag* plot_profile_flag = rtc::FlagList::Lookup("plot_profile");
RTC_CHECK(plot_profile_flag);
plot_profile_flag->Print(false);
}
// Parse the remaining flags. They are applied relative to the chosen profile.
rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true);
if (argc != 2 || FLAG_help) {
// Print usage information.
std::cout << usage;
if (FLAG_help)
rtc::FlagList::Print(nullptr, false);
return 0;
}
webrtc::test::SetExecutablePath(argv[0]);
webrtc::test::InitFieldTrialsFromString(FLAG_force_fieldtrials);
std::string filename = argv[1];
webrtc::ParsedRtcEventLog parsed_log;
if (!parsed_log.ParseFile(filename)) {
std::cerr << "Could not parse the entire log file." << std::endl;
std::cerr << "Proceeding to analyze the first "
<< parsed_log.GetNumberOfEvents() << " events in the file."
<< std::endl;
}
webrtc::plotting::EventLogAnalyzer analyzer(parsed_log);
std::unique_ptr<webrtc::plotting::PlotCollection> collection(
new webrtc::plotting::PythonPlotCollection());
if (FLAG_plot_incoming_packet_sizes) {
analyzer.CreatePacketGraph(webrtc::PacketDirection::kIncomingPacket,
collection->AppendNewPlot());
}
if (FLAG_plot_outgoing_packet_sizes) {
analyzer.CreatePacketGraph(webrtc::PacketDirection::kOutgoingPacket,
collection->AppendNewPlot());
}
if (FLAG_plot_incoming_packet_count) {
analyzer.CreateAccumulatedPacketsGraph(
webrtc::PacketDirection::kIncomingPacket, collection->AppendNewPlot());
}
if (FLAG_plot_outgoing_packet_count) {
analyzer.CreateAccumulatedPacketsGraph(
webrtc::PacketDirection::kOutgoingPacket, collection->AppendNewPlot());
}
if (FLAG_plot_audio_playout) {
analyzer.CreatePlayoutGraph(collection->AppendNewPlot());
}
if (FLAG_plot_audio_level) {
analyzer.CreateAudioLevelGraph(collection->AppendNewPlot());
}
if (FLAG_plot_incoming_sequence_number_delta) {
analyzer.CreateSequenceNumberGraph(collection->AppendNewPlot());
}
if (FLAG_plot_incoming_delay_delta) {
analyzer.CreateIncomingDelayDeltaGraph(collection->AppendNewPlot());
}
if (FLAG_plot_incoming_delay) {
analyzer.CreateIncomingDelayGraph(collection->AppendNewPlot());
}
if (FLAG_plot_incoming_loss_rate) {
analyzer.CreateIncomingPacketLossGraph(collection->AppendNewPlot());
}
if (FLAG_plot_incoming_bitrate) {
analyzer.CreateTotalBitrateGraph(webrtc::PacketDirection::kIncomingPacket,
collection->AppendNewPlot(),
FLAG_show_detector_state);
}
if (FLAG_plot_outgoing_bitrate) {
analyzer.CreateTotalBitrateGraph(webrtc::PacketDirection::kOutgoingPacket,
collection->AppendNewPlot(),
FLAG_show_detector_state);
}
if (FLAG_plot_incoming_stream_bitrate) {
analyzer.CreateStreamBitrateGraph(webrtc::PacketDirection::kIncomingPacket,
collection->AppendNewPlot());
}
if (FLAG_plot_outgoing_stream_bitrate) {
analyzer.CreateStreamBitrateGraph(webrtc::PacketDirection::kOutgoingPacket,
collection->AppendNewPlot());
}
if (FLAG_plot_simulated_sendside_bwe) {
analyzer.CreateBweSimulationGraph(collection->AppendNewPlot());
}
if (FLAG_plot_network_delay_feedback) {
analyzer.CreateNetworkDelayFeedbackGraph(collection->AppendNewPlot());
}
if (FLAG_plot_fraction_loss_feedback) {
analyzer.CreateFractionLossGraph(collection->AppendNewPlot());
}
if (FLAG_plot_timestamps) {
analyzer.CreateTimestampGraph(collection->AppendNewPlot());
}
if (FLAG_plot_audio_encoder_bitrate_bps) {
analyzer.CreateAudioEncoderTargetBitrateGraph(collection->AppendNewPlot());
}
if (FLAG_plot_audio_encoder_frame_length_ms) {
analyzer.CreateAudioEncoderFrameLengthGraph(collection->AppendNewPlot());
}
if (FLAG_plot_audio_encoder_packet_loss) {
analyzer.CreateAudioEncoderPacketLossGraph(collection->AppendNewPlot());
}
if (FLAG_plot_audio_encoder_fec) {
analyzer.CreateAudioEncoderEnableFecGraph(collection->AppendNewPlot());
}
if (FLAG_plot_audio_encoder_dtx) {
analyzer.CreateAudioEncoderEnableDtxGraph(collection->AppendNewPlot());
}
if (FLAG_plot_audio_encoder_num_channels) {
analyzer.CreateAudioEncoderNumChannelsGraph(collection->AppendNewPlot());
}
if (FLAG_plot_audio_jitter_buffer) {
std::string wav_path;
if (FLAG_wav_filename[0] != '\0') {
wav_path = FLAG_wav_filename;
} else {
wav_path = webrtc::test::ResourcePath(
"audio_processing/conversational_speech/EN_script2_F_sp2_B1", "wav");
}
analyzer.CreateAudioJitterBufferGraph(wav_path, 48000,
collection->AppendNewPlot());
}
collection->Draw();
return 0;
}
void SetAllPlotFlags(bool setting) {
FLAG_plot_incoming_packet_sizes = setting;
FLAG_plot_outgoing_packet_sizes = setting;
FLAG_plot_incoming_packet_count = setting;
FLAG_plot_outgoing_packet_count = setting;
FLAG_plot_audio_playout = setting;
FLAG_plot_audio_level = setting;
FLAG_plot_incoming_sequence_number_delta = setting;
FLAG_plot_incoming_delay_delta = setting;
FLAG_plot_incoming_delay = setting;
FLAG_plot_incoming_loss_rate = setting;
FLAG_plot_incoming_bitrate = setting;
FLAG_plot_outgoing_bitrate = setting;
FLAG_plot_incoming_stream_bitrate = setting;
FLAG_plot_outgoing_stream_bitrate = setting;
FLAG_plot_simulated_sendside_bwe = setting;
FLAG_plot_network_delay_feedback = setting;
FLAG_plot_fraction_loss_feedback = setting;
FLAG_plot_timestamps = setting;
FLAG_plot_audio_encoder_bitrate_bps = setting;
FLAG_plot_audio_encoder_frame_length_ms = setting;
FLAG_plot_audio_encoder_packet_loss = setting;
FLAG_plot_audio_encoder_fec = setting;
FLAG_plot_audio_encoder_dtx = setting;
FLAG_plot_audio_encoder_num_channels = setting;
FLAG_plot_audio_jitter_buffer = setting;
}

View File

@ -0,0 +1,89 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/rtc_tools/event_log_visualizer/plot_base.h"
#include <algorithm>
#include "webrtc/rtc_base/checks.h"
namespace webrtc {
namespace plotting {
void Plot::SetXAxis(float min_value,
float max_value,
std::string label,
float left_margin,
float right_margin) {
RTC_DCHECK_LE(min_value, max_value);
xaxis_min_ = min_value - left_margin * (max_value - min_value);
xaxis_max_ = max_value + right_margin * (max_value - min_value);
xaxis_label_ = label;
}
void Plot::SetSuggestedXAxis(float min_value,
float max_value,
std::string label,
float left_margin,
float right_margin) {
for (const auto& series : series_list_) {
for (const auto& point : series.points) {
min_value = std::min(min_value, point.x);
max_value = std::max(max_value, point.x);
}
}
SetXAxis(min_value, max_value, label, left_margin, right_margin);
}
void Plot::SetYAxis(float min_value,
float max_value,
std::string label,
float bottom_margin,
float top_margin) {
RTC_DCHECK_LE(min_value, max_value);
yaxis_min_ = min_value - bottom_margin * (max_value - min_value);
yaxis_max_ = max_value + top_margin * (max_value - min_value);
yaxis_label_ = label;
}
void Plot::SetSuggestedYAxis(float min_value,
float max_value,
std::string label,
float bottom_margin,
float top_margin) {
for (const auto& series : series_list_) {
for (const auto& point : series.points) {
min_value = std::min(min_value, point.y);
max_value = std::max(max_value, point.y);
}
}
SetYAxis(min_value, max_value, label, bottom_margin, top_margin);
}
void Plot::SetTitle(std::string title) {
title_ = title;
}
void Plot::AppendTimeSeries(TimeSeries&& time_series) {
series_list_.emplace_back(std::move(time_series));
}
void Plot::AppendIntervalSeries(IntervalSeries&& interval_series) {
interval_list_.emplace_back(std::move(interval_series));
}
void Plot::AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series) {
if (time_series.points.size() > 0) {
series_list_.emplace_back(std::move(time_series));
}
}
} // namespace plotting
} // namespace webrtc

View File

@ -0,0 +1,167 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_PLOT_BASE_H_
#define WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_PLOT_BASE_H_
#include <memory>
#include <string>
#include <utility>
#include <vector>
namespace webrtc {
namespace plotting {
enum PlotStyle {
LINE_GRAPH,
LINE_DOT_GRAPH,
BAR_GRAPH,
LINE_STEP_GRAPH,
DOT_GRAPH
};
struct TimeSeriesPoint {
TimeSeriesPoint(float x, float y) : x(x), y(y) {}
float x;
float y;
};
struct TimeSeries {
TimeSeries() = default;
TimeSeries(const char* label, PlotStyle style) : label(label), style(style) {}
TimeSeries(const std::string& label, PlotStyle style)
: label(label), style(style) {}
TimeSeries(TimeSeries&& other)
: label(std::move(other.label)),
style(other.style),
points(std::move(other.points)) {}
TimeSeries& operator=(TimeSeries&& other) {
label = std::move(other.label);
style = other.style;
points = std::move(other.points);
return *this;
}
std::string label;
PlotStyle style;
std::vector<TimeSeriesPoint> points;
};
struct Interval {
Interval() = default;
Interval(double begin, double end) : begin(begin), end(end) {}
double begin;
double end;
};
struct IntervalSeries {
enum Orientation { kHorizontal, kVertical };
IntervalSeries() = default;
IntervalSeries(const std::string& label,
const std::string& color,
IntervalSeries::Orientation orientation)
: label(label), color(color), orientation(orientation) {}
std::string label;
std::string color;
Orientation orientation;
std::vector<Interval> intervals;
};
// A container that represents a general graph, with axes, title and one or
// more data series. A subclass should define the output format by overriding
// the Draw() method.
class Plot {
public:
virtual ~Plot() {}
// Overloaded to draw the plot.
virtual void Draw() = 0;
// Sets the lower x-axis limit to min_value (if left_margin == 0).
// Sets the upper x-axis limit to max_value (if right_margin == 0).
// The margins are measured as fractions of the interval
// (max_value - min_value) and are added to either side of the plot.
void SetXAxis(float min_value,
float max_value,
std::string label,
float left_margin = 0,
float right_margin = 0);
// Sets the lower and upper x-axis limits based on min_value and max_value,
// but modified such that all points in the data series can be represented
// on the x-axis. The margins are measured as fractions of the range of
// x-values and are added to either side of the plot.
void SetSuggestedXAxis(float min_value,
float max_value,
std::string label,
float left_margin = 0,
float right_margin = 0);
// Sets the lower y-axis limit to min_value (if bottom_margin == 0).
// Sets the upper y-axis limit to max_value (if top_margin == 0).
// The margins are measured as fractions of the interval
// (max_value - min_value) and are added to either side of the plot.
void SetYAxis(float min_value,
float max_value,
std::string label,
float bottom_margin = 0,
float top_margin = 0);
// Sets the lower and upper y-axis limits based on min_value and max_value,
// but modified such that all points in the data series can be represented
// on the y-axis. The margins are measured as fractions of the range of
// y-values and are added to either side of the plot.
void SetSuggestedYAxis(float min_value,
float max_value,
std::string label,
float bottom_margin = 0,
float top_margin = 0);
// Sets the title of the plot.
void SetTitle(std::string title);
// Add a new TimeSeries to the plot.
void AppendTimeSeries(TimeSeries&& time_series);
// Add a new IntervalSeries to the plot.
void AppendIntervalSeries(IntervalSeries&& interval_series);
// Add a new TimeSeries to the plot if the series contains contains data.
// Otherwise, the call has no effect and the timeseries is destroyed.
void AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series);
protected:
float xaxis_min_;
float xaxis_max_;
std::string xaxis_label_;
float yaxis_min_;
float yaxis_max_;
std::string yaxis_label_;
std::string title_;
std::vector<TimeSeries> series_list_;
std::vector<IntervalSeries> interval_list_;
};
class PlotCollection {
public:
virtual ~PlotCollection() {}
virtual void Draw() = 0;
virtual Plot* AppendNewPlot() = 0;
protected:
std::vector<std::unique_ptr<Plot> > plots_;
};
} // namespace plotting
} // namespace webrtc
#endif // WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_PLOT_BASE_H_

View File

@ -0,0 +1,88 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/rtc_tools/event_log_visualizer/plot_protobuf.h"
#include <memory>
namespace webrtc {
namespace plotting {
ProtobufPlot::ProtobufPlot() {}
ProtobufPlot::~ProtobufPlot() {}
void ProtobufPlot::Draw() {}
void ProtobufPlot::ExportProtobuf(webrtc::analytics::Chart* chart) {
for (size_t i = 0; i < series_list_.size(); i++) {
webrtc::analytics::DataSet* data_set = chart->add_data_sets();
for (const auto& point : series_list_[i].points) {
data_set->add_x_values(point.x);
}
for (const auto& point : series_list_[i].points) {
data_set->add_y_values(point.y);
}
if (series_list_[i].style == BAR_GRAPH) {
data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART);
} else if (series_list_[i].style == LINE_GRAPH) {
data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART);
} else if (series_list_[i].style == LINE_DOT_GRAPH) {
data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART);
data_set->set_highlight_points(true);
} else if (series_list_[i].style == LINE_STEP_GRAPH) {
data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART);
} else if (series_list_[i].style == DOT_GRAPH) {
data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART);
data_set->set_highlight_points(true);
} else {
data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED);
}
data_set->set_label(series_list_[i].label);
}
chart->set_xaxis_min(xaxis_min_);
chart->set_xaxis_max(xaxis_max_);
chart->set_yaxis_min(yaxis_min_);
chart->set_yaxis_max(yaxis_max_);
chart->set_xaxis_label(xaxis_label_);
chart->set_yaxis_label(yaxis_label_);
chart->set_title(title_);
}
ProtobufPlotCollection::ProtobufPlotCollection() {}
ProtobufPlotCollection::~ProtobufPlotCollection() {}
void ProtobufPlotCollection::Draw() {}
void ProtobufPlotCollection::ExportProtobuf(
webrtc::analytics::ChartCollection* collection) {
for (const auto& plot : plots_) {
// TODO(terelius): Ensure that there is no way to insert plots other than
// ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast
// here.
webrtc::analytics::Chart* protobuf_representation
= collection->add_charts();
static_cast<ProtobufPlot*>(plot.get())
->ExportProtobuf(protobuf_representation);
}
}
Plot* ProtobufPlotCollection::AppendNewPlot() {
Plot* plot = new ProtobufPlot();
plots_.push_back(std::unique_ptr<Plot>(plot));
return plot;
}
} // namespace plotting
} // namespace webrtc

View File

@ -0,0 +1,42 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_PLOT_PROTOBUF_H_
#define WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_PLOT_PROTOBUF_H_
#include "webrtc/rtc_base/ignore_wundef.h"
RTC_PUSH_IGNORING_WUNDEF()
#include "webrtc/rtc_tools/event_log_visualizer/chart.pb.h"
RTC_POP_IGNORING_WUNDEF()
#include "webrtc/rtc_tools/event_log_visualizer/plot_base.h"
namespace webrtc {
namespace plotting {
class ProtobufPlot final : public Plot {
public:
ProtobufPlot();
~ProtobufPlot() override;
void Draw() override;
void ExportProtobuf(webrtc::analytics::Chart* chart);
};
class ProtobufPlotCollection final : public PlotCollection {
public:
ProtobufPlotCollection();
~ProtobufPlotCollection() override;
void Draw() override;
Plot* AppendNewPlot() override;
void ExportProtobuf(webrtc::analytics::ChartCollection* collection);
};
} // namespace plotting
} // namespace webrtc
#endif // WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_PLOT_PROTOBUF_H_

View File

@ -0,0 +1,172 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/rtc_tools/event_log_visualizer/plot_python.h"
#include <stdio.h>
#include <memory>
#include "webrtc/rtc_base/checks.h"
namespace webrtc {
namespace plotting {
PythonPlot::PythonPlot() {}
PythonPlot::~PythonPlot() {}
void PythonPlot::Draw() {
// Write python commands to stdout. Intended program usage is
// ./event_log_visualizer event_log160330.dump | python
if (!series_list_.empty()) {
printf("color_count = %zu\n", series_list_.size());
printf(
"hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i "
"in range(color_count)]\n");
printf("rgb_colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n");
for (size_t i = 0; i < series_list_.size(); i++) {
printf("\n# === Series: %s ===\n", series_list_[i].label.c_str());
// List x coordinates
printf("x%zu = [", i);
if (series_list_[i].points.size() > 0)
printf("%G", series_list_[i].points[0].x);
for (size_t j = 1; j < series_list_[i].points.size(); j++)
printf(", %G", series_list_[i].points[j].x);
printf("]\n");
// List y coordinates
printf("y%zu = [", i);
if (series_list_[i].points.size() > 0)
printf("%G", series_list_[i].points[0].y);
for (size_t j = 1; j < series_list_[i].points.size(); j++)
printf(", %G", series_list_[i].points[j].y);
printf("]\n");
if (series_list_[i].style == BAR_GRAPH) {
// There is a plt.bar function that draws bar plots,
// but it is *way* too slow to be useful.
printf(
"plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: "
"max(t,0), y%zu), color=rgb_colors[%zu], "
"label=\'%s\')\n",
i, i, i, i, series_list_[i].label.c_str());
} else if (series_list_[i].style == LINE_GRAPH) {
printf("plt.plot(x%zu, y%zu, color=rgb_colors[%zu], label=\'%s\')\n", i,
i, i, series_list_[i].label.c_str());
} else if (series_list_[i].style == LINE_DOT_GRAPH) {
printf(
"plt.plot(x%zu, y%zu, color=rgb_colors[%zu], label=\'%s\', "
"marker='.')\n",
i, i, i, series_list_[i].label.c_str());
} else if (series_list_[i].style == LINE_STEP_GRAPH) {
// Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on
// to illustrate the "steps". This can be expressed by duplicating all
// elements except the first in x and the last in y.
printf("x%zu = [v for dup in x%zu for v in [dup, dup]]\n", i, i);
printf("y%zu = [v for dup in y%zu for v in [dup, dup]]\n", i, i);
printf(
"plt.plot(x%zu[1:], y%zu[:-1], color=rgb_colors[%zu], "
"path_effects=[pe.Stroke(linewidth=2, foreground='black'), "
"pe.Normal()], "
"label=\'%s\')\n",
i, i, i, series_list_[i].label.c_str());
} else if (series_list_[i].style == DOT_GRAPH) {
printf(
"plt.plot(x%zu, y%zu, color=rgb_colors[%zu], label=\'%s\', "
"marker='o', ls=' ')\n",
i, i, i, series_list_[i].label.c_str());
} else {
printf("raise Exception(\"Unknown graph type\")\n");
}
}
// IntervalSeries
printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4']\n");
RTC_CHECK_LE(interval_list_.size(), 3);
// To get the intervals to show up in the legend we have to created patches
// for them.
printf("legend_patches = []\n");
for (size_t i = 0; i < interval_list_.size(); i++) {
// List intervals
printf("\n# === IntervalSeries: %s ===\n",
interval_list_[i].label.c_str());
printf("ival%zu = [", i);
if (interval_list_[i].intervals.size() > 0) {
printf("(%G, %G)", interval_list_[i].intervals[0].begin,
interval_list_[i].intervals[0].end);
}
for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) {
printf(", (%G, %G)", interval_list_[i].intervals[j].begin,
interval_list_[i].intervals[j].end);
}
printf("]\n");
printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size());
if (interval_list_[i].orientation == IntervalSeries::kVertical) {
printf(
" plt.axhspan(ival%zu[i][0], ival%zu[i][1], "
"facecolor=interval_colors[%zu], "
"alpha=0.3)\n",
i, i, i);
} else {
printf(
" plt.axvspan(ival%zu[i][0], ival%zu[i][1], "
"facecolor=interval_colors[%zu], "
"alpha=0.3)\n",
i, i, i);
}
printf(
"legend_patches.append(mpatches.Patch(ec=\'black\', "
"fc=interval_colors[%zu], label='%s'))\n",
i, interval_list_[i].label.c_str());
}
}
printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_);
printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_);
printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str());
printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str());
printf("plt.title(\'%s\')\n", title_.c_str());
if (!series_list_.empty() || !interval_list_.empty()) {
printf("handles, labels = plt.gca().get_legend_handles_labels()\n");
printf("for lp in legend_patches:\n");
printf(" handles.append(lp)\n");
printf(" labels.append(lp.get_label())\n");
printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n");
}
}
PythonPlotCollection::PythonPlotCollection() {}
PythonPlotCollection::~PythonPlotCollection() {}
void PythonPlotCollection::Draw() {
printf("import matplotlib.pyplot as plt\n");
printf("import matplotlib.patches as mpatches\n");
printf("import matplotlib.patheffects as pe\n");
printf("import colorsys\n");
for (size_t i = 0; i < plots_.size(); i++) {
printf("plt.figure(%zu)\n", i);
plots_[i]->Draw();
}
printf("plt.show()\n");
}
Plot* PythonPlotCollection::AppendNewPlot() {
Plot* plot = new PythonPlot();
plots_.push_back(std::unique_ptr<Plot>(plot));
return plot;
}
} // namespace plotting
} // namespace webrtc

View File

@ -0,0 +1,36 @@
/*
* Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_PLOT_PYTHON_H_
#define WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_PLOT_PYTHON_H_
#include "webrtc/rtc_tools/event_log_visualizer/plot_base.h"
namespace webrtc {
namespace plotting {
class PythonPlot final : public Plot {
public:
PythonPlot();
~PythonPlot() override;
void Draw() override;
};
class PythonPlotCollection final : public PlotCollection {
public:
PythonPlotCollection();
~PythonPlotCollection() override;
void Draw() override;
Plot* AppendNewPlot() override;
};
} // namespace plotting
} // namespace webrtc
#endif // WEBRTC_RTC_TOOLS_EVENT_LOG_VISUALIZER_PLOT_PYTHON_H_