Merge remote tracking branch 'upstream-master'

Bug: 153469641
Test: run cuttlefish locally
Change-Id: Ida3bfe62ef5c6549278f4c155a1f690b008e9b9d
This commit is contained in:
Jorge E. Moreira
2020-07-23 13:07:40 -07:00
1236 changed files with 50564 additions and 32463 deletions

View File

@ -0,0 +1,227 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "rtc_tools/rtc_event_log_visualizer/alerts.h"
#include <stdio.h>
#include <algorithm>
#include <limits>
#include <map>
#include <string>
#include "logging/rtc_event_log/rtc_event_processor.h"
#include "rtc_base/checks.h"
#include "rtc_base/format_macros.h"
#include "rtc_base/logging.h"
#include "rtc_base/numerics/sequence_number_util.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
void TriageHelper::Print(FILE* file) {
fprintf(file, "========== TRIAGE NOTIFICATIONS ==========\n");
for (const auto& alert : triage_alerts_) {
fprintf(file, "%d %s. First occurrence at %3.3lf\n", alert.second.count,
alert.second.explanation.c_str(), alert.second.first_occurrence);
}
fprintf(file, "========== END TRIAGE NOTIFICATIONS ==========\n");
}
void TriageHelper::AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log,
PacketDirection direction) {
// With 100 packets/s (~800kbps), false positives would require 10 s without
// data.
constexpr int64_t kMaxSeqNumJump = 1000;
// With a 90 kHz clock, false positives would require 10 s without data.
constexpr int64_t kTicksPerMillisec = 90;
constexpr int64_t kCaptureTimeGraceMs = 10000;
std::string seq_num_explanation =
direction == kIncomingPacket
? "Incoming RTP sequence number jumps more than 1000. Counter may "
"have been reset or rewritten incorrectly in a group call."
: "Outgoing RTP sequence number jumps more than 1000. Counter may "
"have been reset.";
std::string capture_time_explanation =
direction == kIncomingPacket ? "Incoming capture time jumps more than "
"10s. Clock might have been reset."
: "Outgoing capture time jumps more than "
"10s. Clock might have been reset.";
TriageAlertType seq_num_alert = direction == kIncomingPacket
? TriageAlertType::kIncomingSeqNumJump
: TriageAlertType::kOutgoingSeqNumJump;
TriageAlertType capture_time_alert =
direction == kIncomingPacket ? TriageAlertType::kIncomingCaptureTimeJump
: TriageAlertType::kOutgoingCaptureTimeJump;
const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us();
// Check for gaps in sequence numbers and capture timestamps.
for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) {
if (IsRtxSsrc(parsed_log, direction, stream.ssrc)) {
continue;
}
auto packets = stream.packet_view;
if (packets.empty()) {
continue;
}
SeqNumUnwrapper<uint16_t> seq_num_unwrapper;
int64_t last_seq_num =
seq_num_unwrapper.Unwrap(packets[0].header.sequenceNumber);
SeqNumUnwrapper<uint32_t> capture_time_unwrapper;
int64_t last_capture_time =
capture_time_unwrapper.Unwrap(packets[0].header.timestamp);
int64_t last_log_time_ms = packets[0].log_time_ms();
for (const auto& packet : packets) {
if (packet.log_time_us() > segment_end_us) {
// Only process the first (LOG_START, LOG_END) segment.
break;
}
int64_t seq_num = seq_num_unwrapper.Unwrap(packet.header.sequenceNumber);
if (std::abs(seq_num - last_seq_num) > kMaxSeqNumJump) {
Alert(seq_num_alert, config_.GetCallTimeSec(packet.log_time_us()),
seq_num_explanation);
}
last_seq_num = seq_num;
int64_t capture_time =
capture_time_unwrapper.Unwrap(packet.header.timestamp);
if (std::abs(capture_time - last_capture_time) >
kTicksPerMillisec *
(kCaptureTimeGraceMs + packet.log_time_ms() - last_log_time_ms)) {
Alert(capture_time_alert, config_.GetCallTimeSec(packet.log_time_us()),
capture_time_explanation);
}
last_capture_time = capture_time;
}
}
}
void TriageHelper::AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log,
PacketDirection direction) {
constexpr int64_t kMaxRtpTransmissionGap = 500000;
constexpr int64_t kMaxRtcpTransmissionGap = 3000000;
std::string rtp_explanation =
direction == kIncomingPacket
? "No RTP packets received for more than 500ms. This indicates a "
"network problem. Temporary video freezes and choppy or robotic "
"audio is unavoidable. Unnecessary BWE drops is a known issue."
: "No RTP packets sent for more than 500 ms. This might be an issue "
"with the pacer.";
std::string rtcp_explanation =
direction == kIncomingPacket
? "No RTCP packets received for more than 3 s. Could be a longer "
"connection outage"
: "No RTCP packets sent for more than 3 s. This is most likely a "
"bug.";
TriageAlertType rtp_alert = direction == kIncomingPacket
? TriageAlertType::kIncomingRtpGap
: TriageAlertType::kOutgoingRtpGap;
TriageAlertType rtcp_alert = direction == kIncomingPacket
? TriageAlertType::kIncomingRtcpGap
: TriageAlertType::kOutgoingRtcpGap;
const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us();
// TODO(terelius): The parser could provide a list of all packets, ordered
// by time, for each direction.
std::multimap<int64_t, const LoggedRtpPacket*> rtp_in_direction;
for (const auto& stream : parsed_log.rtp_packets_by_ssrc(direction)) {
for (const LoggedRtpPacket& rtp_packet : stream.packet_view)
rtp_in_direction.emplace(rtp_packet.log_time_us(), &rtp_packet);
}
absl::optional<int64_t> last_rtp_time;
for (const auto& kv : rtp_in_direction) {
int64_t timestamp = kv.first;
if (timestamp > segment_end_us) {
// Only process the first (LOG_START, LOG_END) segment.
break;
}
int64_t duration = timestamp - last_rtp_time.value_or(0);
if (last_rtp_time.has_value() && duration > kMaxRtpTransmissionGap) {
// No packet sent/received for more than 500 ms.
Alert(rtp_alert, config_.GetCallTimeSec(timestamp), rtp_explanation);
}
last_rtp_time.emplace(timestamp);
}
absl::optional<int64_t> last_rtcp_time;
if (direction == kIncomingPacket) {
for (const auto& rtcp : parsed_log.incoming_rtcp_packets()) {
if (rtcp.log_time_us() > segment_end_us) {
// Only process the first (LOG_START, LOG_END) segment.
break;
}
int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
// No feedback sent/received for more than 2000 ms.
Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()),
rtcp_explanation);
}
last_rtcp_time.emplace(rtcp.log_time_us());
}
} else {
for (const auto& rtcp : parsed_log.outgoing_rtcp_packets()) {
if (rtcp.log_time_us() > segment_end_us) {
// Only process the first (LOG_START, LOG_END) segment.
break;
}
int64_t duration = rtcp.log_time_us() - last_rtcp_time.value_or(0);
if (last_rtcp_time.has_value() && duration > kMaxRtcpTransmissionGap) {
// No feedback sent/received for more than 2000 ms.
Alert(rtcp_alert, config_.GetCallTimeSec(rtcp.log_time_us()),
rtcp_explanation);
}
last_rtcp_time.emplace(rtcp.log_time_us());
}
}
}
// TODO(terelius): Notifications could possibly be generated by the same code
// that produces the graphs. There is some code duplication that could be
// avoided, but that might be solved anyway when we move functionality from the
// analyzer to the parser.
void TriageHelper::AnalyzeLog(const ParsedRtcEventLog& parsed_log) {
AnalyzeStreamGaps(parsed_log, kIncomingPacket);
AnalyzeStreamGaps(parsed_log, kOutgoingPacket);
AnalyzeTransmissionGaps(parsed_log, kIncomingPacket);
AnalyzeTransmissionGaps(parsed_log, kOutgoingPacket);
const int64_t segment_end_us = parsed_log.first_log_segment().stop_time_us();
int64_t first_occurrence = parsed_log.last_timestamp();
constexpr double kMaxLossFraction = 0.05;
// Loss feedback
int64_t total_lost_packets = 0;
int64_t total_expected_packets = 0;
for (auto& bwe_update : parsed_log.bwe_loss_updates()) {
if (bwe_update.log_time_us() > segment_end_us) {
// Only process the first (LOG_START, LOG_END) segment.
break;
}
int64_t lost_packets = static_cast<double>(bwe_update.fraction_lost) / 255 *
bwe_update.expected_packets;
total_lost_packets += lost_packets;
total_expected_packets += bwe_update.expected_packets;
if (bwe_update.fraction_lost >= 255 * kMaxLossFraction) {
first_occurrence = std::min(first_occurrence, bwe_update.log_time_us());
}
}
double avg_outgoing_loss =
static_cast<double>(total_lost_packets) / total_expected_packets;
if (avg_outgoing_loss > kMaxLossFraction) {
Alert(TriageAlertType::kOutgoingHighLoss, first_occurrence,
"More than 5% of outgoing packets lost.");
}
}
} // namespace webrtc

View File

@ -0,0 +1,86 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_
#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_
#include <stdio.h>
#include <map>
#include <string>
#include <utility>
#include "absl/strings/string_view.h"
#include "logging/rtc_event_log/rtc_event_log_parser.h"
#include "rtc_base/constructor_magic.h"
#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
namespace webrtc {
enum class TriageAlertType {
kUnknown = 0,
kIncomingRtpGap,
kOutgoingRtpGap,
kIncomingRtcpGap,
kOutgoingRtcpGap,
kIncomingSeqNumJump,
kOutgoingSeqNumJump,
kIncomingCaptureTimeJump,
kOutgoingCaptureTimeJump,
kOutgoingHighLoss,
kLast,
};
struct TriageAlert {
TriageAlertType type = TriageAlertType::kUnknown;
int count = 0;
float first_occurrence = -1;
std::string explanation;
};
class TriageHelper {
public:
explicit TriageHelper(const AnalyzerConfig& config) : config_(config) {}
void AnalyzeLog(const ParsedRtcEventLog& parsed_log);
void AnalyzeStreamGaps(const ParsedRtcEventLog& parsed_log,
PacketDirection direction);
void AnalyzeTransmissionGaps(const ParsedRtcEventLog& parsed_log,
PacketDirection direction);
void Print(FILE* file);
private:
AnalyzerConfig config_;
std::map<TriageAlertType, TriageAlert> triage_alerts_;
void Alert(TriageAlertType type,
float time_seconds,
absl::string_view explanation) {
std::map<TriageAlertType, TriageAlert>::iterator it =
triage_alerts_.find(type);
if (it == triage_alerts_.end()) {
TriageAlert alert;
alert.type = type;
alert.first_occurrence = time_seconds;
alert.count = 1;
alert.explanation = std::string(explanation);
triage_alerts_.insert(std::make_pair(type, alert));
} else {
it->second.count += 1;
}
}
RTC_DISALLOW_COPY_AND_ASSIGN(TriageHelper);
};
} // namespace webrtc
#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ALERTS_H_

View File

@ -0,0 +1,503 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h"
#include <memory>
#include <set>
#include <utility>
#include <vector>
#include "modules/audio_coding/neteq/tools/audio_sink.h"
#include "modules/audio_coding/neteq/tools/fake_decode_from_file.h"
#include "modules/audio_coding/neteq/tools/neteq_delay_analyzer.h"
#include "modules/audio_coding/neteq/tools/neteq_replacement_input.h"
#include "modules/audio_coding/neteq/tools/neteq_test.h"
#include "modules/audio_coding/neteq/tools/resample_input_audio_file.h"
#include "rtc_base/ref_counted_object.h"
namespace webrtc {
void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot) {
TimeSeries time_series("Audio encoder target bitrate", LineStyle::kLine,
PointStyle::kHighlight);
auto GetAnaBitrateBps = [](const LoggedAudioNetworkAdaptationEvent& ana_event)
-> absl::optional<float> {
if (ana_event.config.bitrate_bps)
return absl::optional<float>(
static_cast<float>(*ana_event.config.bitrate_bps));
return absl::nullopt;
};
auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
return config.GetCallTimeSec(packet.log_time_us());
};
ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
ToCallTime, GetAnaBitrateBps,
parsed_log.audio_network_adaptation_events(), &time_series);
plot->AppendTimeSeries(std::move(time_series));
plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
kLeftMargin, kRightMargin);
plot->SetSuggestedYAxis(0, 1, "Bitrate (bps)", kBottomMargin, kTopMargin);
plot->SetTitle("Reported audio encoder target bitrate");
}
void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot) {
TimeSeries time_series("Audio encoder frame length", LineStyle::kLine,
PointStyle::kHighlight);
auto GetAnaFrameLengthMs =
[](const LoggedAudioNetworkAdaptationEvent& ana_event) {
if (ana_event.config.frame_length_ms)
return absl::optional<float>(
static_cast<float>(*ana_event.config.frame_length_ms));
return absl::optional<float>();
};
auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
return config.GetCallTimeSec(packet.log_time_us());
};
ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
ToCallTime, GetAnaFrameLengthMs,
parsed_log.audio_network_adaptation_events(), &time_series);
plot->AppendTimeSeries(std::move(time_series));
plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
kLeftMargin, kRightMargin);
plot->SetSuggestedYAxis(0, 1, "Frame length (ms)", kBottomMargin, kTopMargin);
plot->SetTitle("Reported audio encoder frame length");
}
void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot) {
TimeSeries time_series("Audio encoder uplink packet loss fraction",
LineStyle::kLine, PointStyle::kHighlight);
auto GetAnaPacketLoss =
[](const LoggedAudioNetworkAdaptationEvent& ana_event) {
if (ana_event.config.uplink_packet_loss_fraction)
return absl::optional<float>(static_cast<float>(
*ana_event.config.uplink_packet_loss_fraction));
return absl::optional<float>();
};
auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
return config.GetCallTimeSec(packet.log_time_us());
};
ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
ToCallTime, GetAnaPacketLoss,
parsed_log.audio_network_adaptation_events(), &time_series);
plot->AppendTimeSeries(std::move(time_series));
plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
kLeftMargin, kRightMargin);
plot->SetSuggestedYAxis(0, 10, "Percent lost packets", kBottomMargin,
kTopMargin);
plot->SetTitle("Reported audio encoder lost packets");
}
void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot) {
TimeSeries time_series("Audio encoder FEC", LineStyle::kLine,
PointStyle::kHighlight);
auto GetAnaFecEnabled =
[](const LoggedAudioNetworkAdaptationEvent& ana_event) {
if (ana_event.config.enable_fec)
return absl::optional<float>(
static_cast<float>(*ana_event.config.enable_fec));
return absl::optional<float>();
};
auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
return config.GetCallTimeSec(packet.log_time_us());
};
ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
ToCallTime, GetAnaFecEnabled,
parsed_log.audio_network_adaptation_events(), &time_series);
plot->AppendTimeSeries(std::move(time_series));
plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
kLeftMargin, kRightMargin);
plot->SetSuggestedYAxis(0, 1, "FEC (false/true)", kBottomMargin, kTopMargin);
plot->SetTitle("Reported audio encoder FEC");
}
void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot) {
TimeSeries time_series("Audio encoder DTX", LineStyle::kLine,
PointStyle::kHighlight);
auto GetAnaDtxEnabled =
[](const LoggedAudioNetworkAdaptationEvent& ana_event) {
if (ana_event.config.enable_dtx)
return absl::optional<float>(
static_cast<float>(*ana_event.config.enable_dtx));
return absl::optional<float>();
};
auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
return config.GetCallTimeSec(packet.log_time_us());
};
ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
ToCallTime, GetAnaDtxEnabled,
parsed_log.audio_network_adaptation_events(), &time_series);
plot->AppendTimeSeries(std::move(time_series));
plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
kLeftMargin, kRightMargin);
plot->SetSuggestedYAxis(0, 1, "DTX (false/true)", kBottomMargin, kTopMargin);
plot->SetTitle("Reported audio encoder DTX");
}
void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot) {
TimeSeries time_series("Audio encoder number of channels", LineStyle::kLine,
PointStyle::kHighlight);
auto GetAnaNumChannels =
[](const LoggedAudioNetworkAdaptationEvent& ana_event) {
if (ana_event.config.num_channels)
return absl::optional<float>(
static_cast<float>(*ana_event.config.num_channels));
return absl::optional<float>();
};
auto ToCallTime = [config](const LoggedAudioNetworkAdaptationEvent& packet) {
return config.GetCallTimeSec(packet.log_time_us());
};
ProcessPoints<LoggedAudioNetworkAdaptationEvent>(
ToCallTime, GetAnaNumChannels,
parsed_log.audio_network_adaptation_events(), &time_series);
plot->AppendTimeSeries(std::move(time_series));
plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
kLeftMargin, kRightMargin);
plot->SetSuggestedYAxis(0, 1, "Number of channels (1 (mono)/2 (stereo))",
kBottomMargin, kTopMargin);
plot->SetTitle("Reported audio encoder number of channels");
}
class NetEqStreamInput : public test::NetEqInput {
public:
// Does not take any ownership, and all pointers must refer to valid objects
// that outlive the one constructed.
NetEqStreamInput(const std::vector<LoggedRtpPacketIncoming>* packet_stream,
const std::vector<LoggedAudioPlayoutEvent>* output_events,
absl::optional<int64_t> end_time_ms)
: packet_stream_(*packet_stream),
packet_stream_it_(packet_stream_.begin()),
output_events_it_(output_events->begin()),
output_events_end_(output_events->end()),
end_time_ms_(end_time_ms) {
RTC_DCHECK(packet_stream);
RTC_DCHECK(output_events);
}
absl::optional<int64_t> NextPacketTime() const override {
if (packet_stream_it_ == packet_stream_.end()) {
return absl::nullopt;
}
if (end_time_ms_ && packet_stream_it_->rtp.log_time_ms() > *end_time_ms_) {
return absl::nullopt;
}
return packet_stream_it_->rtp.log_time_ms();
}
absl::optional<int64_t> NextOutputEventTime() const override {
if (output_events_it_ == output_events_end_) {
return absl::nullopt;
}
if (end_time_ms_ && output_events_it_->log_time_ms() > *end_time_ms_) {
return absl::nullopt;
}
return output_events_it_->log_time_ms();
}
std::unique_ptr<PacketData> PopPacket() override {
if (packet_stream_it_ == packet_stream_.end()) {
return std::unique_ptr<PacketData>();
}
std::unique_ptr<PacketData> packet_data(new PacketData());
packet_data->header = packet_stream_it_->rtp.header;
packet_data->time_ms = packet_stream_it_->rtp.log_time_ms();
// This is a header-only "dummy" packet. Set the payload to all zeros, with
// length according to the virtual length.
packet_data->payload.SetSize(packet_stream_it_->rtp.total_length -
packet_stream_it_->rtp.header_length);
std::fill_n(packet_data->payload.data(), packet_data->payload.size(), 0);
++packet_stream_it_;
return packet_data;
}
void AdvanceOutputEvent() override {
if (output_events_it_ != output_events_end_) {
++output_events_it_;
}
}
bool ended() const override { return !NextEventTime(); }
absl::optional<RTPHeader> NextHeader() const override {
if (packet_stream_it_ == packet_stream_.end()) {
return absl::nullopt;
}
return packet_stream_it_->rtp.header;
}
private:
const std::vector<LoggedRtpPacketIncoming>& packet_stream_;
std::vector<LoggedRtpPacketIncoming>::const_iterator packet_stream_it_;
std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_it_;
const std::vector<LoggedAudioPlayoutEvent>::const_iterator output_events_end_;
const absl::optional<int64_t> end_time_ms_;
};
namespace {
// Factory to create a "replacement decoder" that produces the decoded audio
// by reading from a file rather than from the encoded payloads.
class ReplacementAudioDecoderFactory : public AudioDecoderFactory {
public:
ReplacementAudioDecoderFactory(const absl::string_view replacement_file_name,
int file_sample_rate_hz)
: replacement_file_name_(replacement_file_name),
file_sample_rate_hz_(file_sample_rate_hz) {}
std::vector<AudioCodecSpec> GetSupportedDecoders() override {
RTC_NOTREACHED();
return {};
}
bool IsSupportedDecoder(const SdpAudioFormat& format) override {
return true;
}
std::unique_ptr<AudioDecoder> MakeAudioDecoder(
const SdpAudioFormat& format,
absl::optional<AudioCodecPairId> codec_pair_id) override {
auto replacement_file = std::make_unique<test::ResampleInputAudioFile>(
replacement_file_name_, file_sample_rate_hz_);
replacement_file->set_output_rate_hz(48000);
return std::make_unique<test::FakeDecodeFromFile>(
std::move(replacement_file), 48000, false);
}
private:
const std::string replacement_file_name_;
const int file_sample_rate_hz_;
};
// Creates a NetEq test object and all necessary input and output helpers. Runs
// the test and returns the NetEqDelayAnalyzer object that was used to
// instrument the test.
std::unique_ptr<test::NetEqStatsGetter> CreateNetEqTestAndRun(
const std::vector<LoggedRtpPacketIncoming>* packet_stream,
const std::vector<LoggedAudioPlayoutEvent>* output_events,
absl::optional<int64_t> end_time_ms,
const std::string& replacement_file_name,
int file_sample_rate_hz) {
std::unique_ptr<test::NetEqInput> input(
new NetEqStreamInput(packet_stream, output_events, end_time_ms));
constexpr int kReplacementPt = 127;
std::set<uint8_t> cn_types;
std::set<uint8_t> forbidden_types;
input.reset(new test::NetEqReplacementInput(std::move(input), kReplacementPt,
cn_types, forbidden_types));
NetEq::Config config;
config.max_packets_in_buffer = 200;
config.enable_fast_accelerate = true;
std::unique_ptr<test::VoidAudioSink> output(new test::VoidAudioSink());
rtc::scoped_refptr<AudioDecoderFactory> decoder_factory =
new rtc::RefCountedObject<ReplacementAudioDecoderFactory>(
replacement_file_name, file_sample_rate_hz);
test::NetEqTest::DecoderMap codecs = {
{kReplacementPt, SdpAudioFormat("l16", 48000, 1)}};
std::unique_ptr<test::NetEqDelayAnalyzer> delay_cb(
new test::NetEqDelayAnalyzer);
std::unique_ptr<test::NetEqStatsGetter> neteq_stats_getter(
new test::NetEqStatsGetter(std::move(delay_cb)));
test::DefaultNetEqTestErrorCallback error_cb;
test::NetEqTest::Callbacks callbacks;
callbacks.error_callback = &error_cb;
callbacks.post_insert_packet = neteq_stats_getter->delay_analyzer();
callbacks.get_audio_callback = neteq_stats_getter.get();
test::NetEqTest test(config, decoder_factory, codecs, /*text_log=*/nullptr,
/*factory=*/nullptr, std::move(input), std::move(output),
callbacks);
test.Run();
return neteq_stats_getter;
}
} // namespace
NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
const std::string& replacement_file_name,
int file_sample_rate_hz) {
NetEqStatsGetterMap neteq_stats;
for (const auto& stream : parsed_log.incoming_rtp_packets_by_ssrc()) {
const uint32_t ssrc = stream.ssrc;
if (!IsAudioSsrc(parsed_log, kIncomingPacket, ssrc))
continue;
const std::vector<LoggedRtpPacketIncoming>* audio_packets =
&stream.incoming_packets;
if (audio_packets == nullptr) {
// No incoming audio stream found.
continue;
}
RTC_DCHECK(neteq_stats.find(ssrc) == neteq_stats.end());
std::map<uint32_t, std::vector<LoggedAudioPlayoutEvent>>::const_iterator
output_events_it = parsed_log.audio_playout_events().find(ssrc);
if (output_events_it == parsed_log.audio_playout_events().end()) {
// Could not find output events with SSRC matching the input audio stream.
// Using the first available stream of output events.
output_events_it = parsed_log.audio_playout_events().cbegin();
}
int64_t end_time_ms = parsed_log.first_log_segment().stop_time_ms();
neteq_stats[ssrc] = CreateNetEqTestAndRun(
audio_packets, &output_events_it->second, end_time_ms,
replacement_file_name, file_sample_rate_hz);
}
return neteq_stats;
}
// Given a NetEqStatsGetter and the SSRC that the NetEqStatsGetter was created
// for, this method generates a plot for the jitter buffer delay profile.
void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
uint32_t ssrc,
const test::NetEqStatsGetter* stats_getter,
Plot* plot) {
test::NetEqDelayAnalyzer::Delays arrival_delay_ms;
test::NetEqDelayAnalyzer::Delays corrected_arrival_delay_ms;
test::NetEqDelayAnalyzer::Delays playout_delay_ms;
test::NetEqDelayAnalyzer::Delays target_delay_ms;
stats_getter->delay_analyzer()->CreateGraphs(
&arrival_delay_ms, &corrected_arrival_delay_ms, &playout_delay_ms,
&target_delay_ms);
TimeSeries time_series_packet_arrival("packet arrival delay",
LineStyle::kLine);
TimeSeries time_series_relative_packet_arrival(
"Relative packet arrival delay", LineStyle::kLine);
TimeSeries time_series_play_time("Playout delay", LineStyle::kLine);
TimeSeries time_series_target_time("Target delay", LineStyle::kLine,
PointStyle::kHighlight);
for (const auto& data : arrival_delay_ms) {
const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
const float y = data.second;
time_series_packet_arrival.points.emplace_back(TimeSeriesPoint(x, y));
}
for (const auto& data : corrected_arrival_delay_ms) {
const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
const float y = data.second;
time_series_relative_packet_arrival.points.emplace_back(
TimeSeriesPoint(x, y));
}
for (const auto& data : playout_delay_ms) {
const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
const float y = data.second;
time_series_play_time.points.emplace_back(TimeSeriesPoint(x, y));
}
for (const auto& data : target_delay_ms) {
const float x = config.GetCallTimeSec(data.first * 1000); // ms to us.
const float y = data.second;
time_series_target_time.points.emplace_back(TimeSeriesPoint(x, y));
}
plot->AppendTimeSeries(std::move(time_series_packet_arrival));
plot->AppendTimeSeries(std::move(time_series_relative_packet_arrival));
plot->AppendTimeSeries(std::move(time_series_play_time));
plot->AppendTimeSeries(std::move(time_series_target_time));
plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
kLeftMargin, kRightMargin);
plot->SetSuggestedYAxis(0, 1, "Relative delay (ms)", kBottomMargin,
kTopMargin);
plot->SetTitle("NetEq timing for " +
GetStreamName(parsed_log, kIncomingPacket, ssrc));
}
template <typename NetEqStatsType>
void CreateNetEqStatsGraphInternal(
const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
const NetEqStatsGetterMap& neteq_stats,
rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*(
const test::NetEqStatsGetter*)> data_extractor,
rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor,
const std::string& plot_name,
Plot* plot) {
std::map<uint32_t, TimeSeries> time_series;
for (const auto& st : neteq_stats) {
const uint32_t ssrc = st.first;
const std::vector<std::pair<int64_t, NetEqStatsType>>* data_vector =
data_extractor(st.second.get());
for (const auto& data : *data_vector) {
const float time = config.GetCallTimeSec(data.first * 1000); // ms to us.
const float value = stats_extractor(data.second);
time_series[ssrc].points.emplace_back(TimeSeriesPoint(time, value));
}
}
for (auto& series : time_series) {
series.second.label =
GetStreamName(parsed_log, kIncomingPacket, series.first);
series.second.line_style = LineStyle::kLine;
plot->AppendTimeSeries(std::move(series.second));
}
plot->SetXAxis(config.CallBeginTimeSec(), config.CallEndTimeSec(), "Time (s)",
kLeftMargin, kRightMargin);
plot->SetSuggestedYAxis(0, 1, plot_name, kBottomMargin, kTopMargin);
plot->SetTitle(plot_name);
}
void CreateNetEqNetworkStatsGraph(
const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
const NetEqStatsGetterMap& neteq_stats,
rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
const std::string& plot_name,
Plot* plot) {
CreateNetEqStatsGraphInternal<NetEqNetworkStatistics>(
parsed_log, config, neteq_stats,
[](const test::NetEqStatsGetter* stats_getter) {
return stats_getter->stats();
},
stats_extractor, plot_name, plot);
}
void CreateNetEqLifetimeStatsGraph(
const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
const NetEqStatsGetterMap& neteq_stats,
rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
const std::string& plot_name,
Plot* plot) {
CreateNetEqStatsGraphInternal<NetEqLifetimeStatistics>(
parsed_log, config, neteq_stats,
[](const test::NetEqStatsGetter* stats_getter) {
return stats_getter->lifetime_stats();
},
stats_extractor, plot_name, plot);
}
} // namespace webrtc

View File

@ -0,0 +1,75 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_
#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_
#include <cstdint>
#include <map>
#include <memory>
#include <string>
#include "api/function_view.h"
#include "logging/rtc_event_log/rtc_event_log_parser.h"
#include "modules/audio_coding/neteq/tools/neteq_stats_getter.h"
#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
namespace webrtc {
void CreateAudioEncoderTargetBitrateGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot);
void CreateAudioEncoderFrameLengthGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot);
void CreateAudioEncoderPacketLossGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot);
void CreateAudioEncoderEnableFecGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot);
void CreateAudioEncoderEnableDtxGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot);
void CreateAudioEncoderNumChannelsGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
Plot* plot);
using NetEqStatsGetterMap =
std::map<uint32_t, std::unique_ptr<test::NetEqStatsGetter>>;
NetEqStatsGetterMap SimulateNetEq(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
const std::string& replacement_file_name,
int file_sample_rate_hz);
void CreateAudioJitterBufferGraph(const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
uint32_t ssrc,
const test::NetEqStatsGetter* stats_getter,
Plot* plot);
void CreateNetEqNetworkStatsGraph(
const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
const NetEqStatsGetterMap& neteq_stats_getters,
rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
const std::string& plot_name,
Plot* plot);
void CreateNetEqLifetimeStatsGraph(
const ParsedRtcEventLog& parsed_log,
const AnalyzerConfig& config,
const NetEqStatsGetterMap& neteq_stats_getters,
rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
const std::string& plot_name,
Plot* plot);
} // namespace webrtc
#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZE_AUDIO_H_

File diff suppressed because it is too large Load Diff

View File

@ -21,41 +21,18 @@
#include "logging/rtc_event_log/rtc_event_log_parser.h"
#include "modules/audio_coding/neteq/tools/neteq_stats_getter.h"
#include "rtc_base/strings/string_builder.h"
#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
#include "rtc_tools/rtc_event_log_visualizer/triage_notifications.h"
namespace webrtc {
class AnalyzerConfig {
public:
float GetCallTimeSec(int64_t timestamp_us) const {
int64_t offset = normalize_time_ ? begin_time_ : 0;
return static_cast<float>(timestamp_us - offset) / 1000000;
}
float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); }
float CallEndTimeSec() const { return GetCallTimeSec(end_time_); }
// Window and step size used for calculating moving averages, e.g. bitrate.
// The generated data points will be |step_| microseconds apart.
// Only events occurring at most |window_duration_| microseconds before the
// current data point will be part of the average.
int64_t window_duration_;
int64_t step_;
// First and last events of the log.
int64_t begin_time_;
int64_t end_time_;
bool normalize_time_;
};
class EventLogAnalyzer {
public:
// The EventLogAnalyzer keeps a reference to the ParsedRtcEventLogNew for the
// duration of its lifetime. The ParsedRtcEventLogNew must not be destroyed or
// modified while the EventLogAnalyzer is being used.
EventLogAnalyzer(const ParsedRtcEventLog& log, bool normalize_time);
EventLogAnalyzer(const ParsedRtcEventLog& log, const AnalyzerConfig& config);
void CreatePacketGraph(PacketDirection direction, Plot* plot);
@ -102,32 +79,6 @@ class EventLogAnalyzer {
std::string yaxis_label,
Plot* plot);
void CreateAudioEncoderTargetBitrateGraph(Plot* plot);
void CreateAudioEncoderFrameLengthGraph(Plot* plot);
void CreateAudioEncoderPacketLossGraph(Plot* plot);
void CreateAudioEncoderEnableFecGraph(Plot* plot);
void CreateAudioEncoderEnableDtxGraph(Plot* plot);
void CreateAudioEncoderNumChannelsGraph(Plot* plot);
using NetEqStatsGetterMap =
std::map<uint32_t, std::unique_ptr<test::NetEqStatsGetter>>;
NetEqStatsGetterMap SimulateNetEq(const std::string& replacement_file_name,
int file_sample_rate_hz) const;
void CreateAudioJitterBufferGraph(uint32_t ssrc,
const test::NetEqStatsGetter* stats_getter,
Plot* plot) const;
void CreateNetEqNetworkStatsGraph(
const NetEqStatsGetterMap& neteq_stats_getters,
rtc::FunctionView<float(const NetEqNetworkStatistics&)> stats_extractor,
const std::string& plot_name,
Plot* plot) const;
void CreateNetEqLifetimeStatsGraph(
const NetEqStatsGetterMap& neteq_stats_getters,
rtc::FunctionView<float(const NetEqLifetimeStatistics&)> stats_extractor,
const std::string& plot_name,
Plot* plot) const;
void CreateIceCandidatePairConfigGraph(Plot* plot);
void CreateIceConnectivityCheckGraph(Plot* plot);
@ -138,145 +89,11 @@ class EventLogAnalyzer {
void PrintNotifications(FILE* file);
private:
struct LayerDescription {
LayerDescription(uint32_t ssrc,
uint8_t spatial_layer,
uint8_t temporal_layer)
: ssrc(ssrc),
spatial_layer(spatial_layer),
temporal_layer(temporal_layer) {}
bool operator<(const LayerDescription& other) const {
if (ssrc != other.ssrc)
return ssrc < other.ssrc;
if (spatial_layer != other.spatial_layer)
return spatial_layer < other.spatial_layer;
return temporal_layer < other.temporal_layer;
}
uint32_t ssrc;
uint8_t spatial_layer;
uint8_t temporal_layer;
};
bool IsRtxSsrc(PacketDirection direction, uint32_t ssrc) const {
if (direction == kIncomingPacket) {
return parsed_log_.incoming_rtx_ssrcs().find(ssrc) !=
parsed_log_.incoming_rtx_ssrcs().end();
} else {
return parsed_log_.outgoing_rtx_ssrcs().find(ssrc) !=
parsed_log_.outgoing_rtx_ssrcs().end();
}
}
bool IsVideoSsrc(PacketDirection direction, uint32_t ssrc) const {
if (direction == kIncomingPacket) {
return parsed_log_.incoming_video_ssrcs().find(ssrc) !=
parsed_log_.incoming_video_ssrcs().end();
} else {
return parsed_log_.outgoing_video_ssrcs().find(ssrc) !=
parsed_log_.outgoing_video_ssrcs().end();
}
}
bool IsAudioSsrc(PacketDirection direction, uint32_t ssrc) const {
if (direction == kIncomingPacket) {
return parsed_log_.incoming_audio_ssrcs().find(ssrc) !=
parsed_log_.incoming_audio_ssrcs().end();
} else {
return parsed_log_.outgoing_audio_ssrcs().find(ssrc) !=
parsed_log_.outgoing_audio_ssrcs().end();
}
}
template <typename NetEqStatsType>
void CreateNetEqStatsGraphInternal(
const NetEqStatsGetterMap& neteq_stats,
rtc::FunctionView<const std::vector<std::pair<int64_t, NetEqStatsType>>*(
const test::NetEqStatsGetter*)> data_extractor,
rtc::FunctionView<float(const NetEqStatsType&)> stats_extractor,
const std::string& plot_name,
Plot* plot) const;
template <typename IterableType>
void CreateAccumulatedPacketsTimeSeries(Plot* plot,
const IterableType& packets,
const std::string& label);
void CreateStreamGapAlerts(PacketDirection direction);
void CreateTransmissionGapAlerts(PacketDirection direction);
std::string GetStreamName(PacketDirection direction, uint32_t ssrc) const {
char buffer[200];
rtc::SimpleStringBuilder name(buffer);
if (IsAudioSsrc(direction, ssrc)) {
name << "Audio ";
} else if (IsVideoSsrc(direction, ssrc)) {
name << "Video ";
} else {
name << "Unknown ";
}
if (IsRtxSsrc(direction, ssrc)) {
name << "RTX ";
}
if (direction == kIncomingPacket)
name << "(In) ";
else
name << "(Out) ";
name << "SSRC " << ssrc;
return name.str();
}
std::string GetLayerName(LayerDescription layer) const {
char buffer[100];
rtc::SimpleStringBuilder name(buffer);
name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl "
<< layer.temporal_layer;
return name.str();
}
void Alert_RtpLogTimeGap(PacketDirection direction,
float time_seconds,
int64_t duration) {
if (direction == kIncomingPacket) {
incoming_rtp_recv_time_gaps_.emplace_back(time_seconds, duration);
} else {
outgoing_rtp_send_time_gaps_.emplace_back(time_seconds, duration);
}
}
void Alert_RtcpLogTimeGap(PacketDirection direction,
float time_seconds,
int64_t duration) {
if (direction == kIncomingPacket) {
incoming_rtcp_recv_time_gaps_.emplace_back(time_seconds, duration);
} else {
outgoing_rtcp_send_time_gaps_.emplace_back(time_seconds, duration);
}
}
void Alert_SeqNumJump(PacketDirection direction,
float time_seconds,
uint32_t ssrc) {
if (direction == kIncomingPacket) {
incoming_seq_num_jumps_.emplace_back(time_seconds, ssrc);
} else {
outgoing_seq_num_jumps_.emplace_back(time_seconds, ssrc);
}
}
void Alert_CaptureTimeJump(PacketDirection direction,
float time_seconds,
uint32_t ssrc) {
if (direction == kIncomingPacket) {
incoming_capture_time_jumps_.emplace_back(time_seconds, ssrc);
} else {
outgoing_capture_time_jumps_.emplace_back(time_seconds, ssrc);
}
}
void Alert_OutgoingHighLoss(double avg_loss_fraction) {
outgoing_high_loss_alerts_.emplace_back(avg_loss_fraction);
}
std::string GetCandidatePairLogDescriptionFromId(uint32_t candidate_pair_id);
const ParsedRtcEventLog& parsed_log_;
@ -285,20 +102,6 @@ class EventLogAnalyzer {
// If left empty, all SSRCs will be considered relevant.
std::vector<uint32_t> desired_ssrc_;
// Stores the timestamps for all log segments, in the form of associated start
// and end events.
std::vector<std::pair<int64_t, int64_t>> log_segments_;
std::vector<IncomingRtpReceiveTimeGap> incoming_rtp_recv_time_gaps_;
std::vector<IncomingRtcpReceiveTimeGap> incoming_rtcp_recv_time_gaps_;
std::vector<OutgoingRtpSendTimeGap> outgoing_rtp_send_time_gaps_;
std::vector<OutgoingRtcpSendTimeGap> outgoing_rtcp_send_time_gaps_;
std::vector<IncomingSeqNumJump> incoming_seq_num_jumps_;
std::vector<IncomingCaptureTimeJump> incoming_capture_time_jumps_;
std::vector<OutgoingSeqNoJump> outgoing_seq_num_jumps_;
std::vector<OutgoingCaptureTimeJump> outgoing_capture_time_jumps_;
std::vector<OutgoingHighLoss> outgoing_high_loss_alerts_;
std::map<uint32_t, std::string> candidate_pair_desc_by_id_;
AnalyzerConfig config_;

View File

@ -0,0 +1,83 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "rtc_tools/rtc_event_log_visualizer/analyzer_common.h"
namespace webrtc {
bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log,
PacketDirection direction,
uint32_t ssrc) {
if (direction == kIncomingPacket) {
return parsed_log.incoming_rtx_ssrcs().find(ssrc) !=
parsed_log.incoming_rtx_ssrcs().end();
} else {
return parsed_log.outgoing_rtx_ssrcs().find(ssrc) !=
parsed_log.outgoing_rtx_ssrcs().end();
}
}
bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log,
PacketDirection direction,
uint32_t ssrc) {
if (direction == kIncomingPacket) {
return parsed_log.incoming_video_ssrcs().find(ssrc) !=
parsed_log.incoming_video_ssrcs().end();
} else {
return parsed_log.outgoing_video_ssrcs().find(ssrc) !=
parsed_log.outgoing_video_ssrcs().end();
}
}
bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log,
PacketDirection direction,
uint32_t ssrc) {
if (direction == kIncomingPacket) {
return parsed_log.incoming_audio_ssrcs().find(ssrc) !=
parsed_log.incoming_audio_ssrcs().end();
} else {
return parsed_log.outgoing_audio_ssrcs().find(ssrc) !=
parsed_log.outgoing_audio_ssrcs().end();
}
}
std::string GetStreamName(const ParsedRtcEventLog& parsed_log,
PacketDirection direction,
uint32_t ssrc) {
char buffer[200];
rtc::SimpleStringBuilder name(buffer);
if (IsAudioSsrc(parsed_log, direction, ssrc)) {
name << "Audio ";
} else if (IsVideoSsrc(parsed_log, direction, ssrc)) {
name << "Video ";
} else {
name << "Unknown ";
}
if (IsRtxSsrc(parsed_log, direction, ssrc)) {
name << "RTX ";
}
if (direction == kIncomingPacket)
name << "(In) ";
else
name << "(Out) ";
name << "SSRC " << ssrc;
return name.str();
}
std::string GetLayerName(LayerDescription layer) {
char buffer[100];
rtc::SimpleStringBuilder name(buffer);
name << "SSRC " << layer.ssrc << " sl " << layer.spatial_layer << ", tl "
<< layer.temporal_layer;
return name.str();
}
} // namespace webrtc

View File

@ -0,0 +1,182 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_
#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_
#include <cstdint>
#include <string>
#include "absl/types/optional.h"
#include "api/function_view.h"
#include "logging/rtc_event_log/rtc_event_log_parser.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
namespace webrtc {
constexpr int kNumMicrosecsPerSec = 1000000;
constexpr float kLeftMargin = 0.01f;
constexpr float kRightMargin = 0.02f;
constexpr float kBottomMargin = 0.02f;
constexpr float kTopMargin = 0.05f;
class AnalyzerConfig {
public:
float GetCallTimeSec(int64_t timestamp_us) const {
int64_t offset = normalize_time_ ? begin_time_ : 0;
return static_cast<float>(timestamp_us - offset) / 1000000;
}
float CallBeginTimeSec() const { return GetCallTimeSec(begin_time_); }
float CallEndTimeSec() const { return GetCallTimeSec(end_time_); }
// Window and step size used for calculating moving averages, e.g. bitrate.
// The generated data points will be |step_| microseconds apart.
// Only events occurring at most |window_duration_| microseconds before the
// current data point will be part of the average.
int64_t window_duration_;
int64_t step_;
// First and last events of the log.
int64_t begin_time_;
int64_t end_time_;
bool normalize_time_;
};
struct LayerDescription {
LayerDescription(uint32_t ssrc, uint8_t spatial_layer, uint8_t temporal_layer)
: ssrc(ssrc),
spatial_layer(spatial_layer),
temporal_layer(temporal_layer) {}
bool operator<(const LayerDescription& other) const {
if (ssrc != other.ssrc)
return ssrc < other.ssrc;
if (spatial_layer != other.spatial_layer)
return spatial_layer < other.spatial_layer;
return temporal_layer < other.temporal_layer;
}
uint32_t ssrc;
uint8_t spatial_layer;
uint8_t temporal_layer;
};
bool IsRtxSsrc(const ParsedRtcEventLog& parsed_log,
PacketDirection direction,
uint32_t ssrc);
bool IsVideoSsrc(const ParsedRtcEventLog& parsed_log,
PacketDirection direction,
uint32_t ssrc);
bool IsAudioSsrc(const ParsedRtcEventLog& parsed_log,
PacketDirection direction,
uint32_t ssrc);
std::string GetStreamName(const ParsedRtcEventLog& parsed_log,
PacketDirection direction,
uint32_t ssrc);
std::string GetLayerName(LayerDescription layer);
// For each element in data_view, use |f()| to extract a y-coordinate and
// store the result in a TimeSeries.
template <typename DataType, typename IterableType>
void ProcessPoints(rtc::FunctionView<float(const DataType&)> fx,
rtc::FunctionView<absl::optional<float>(const DataType&)> fy,
const IterableType& data_view,
TimeSeries* result) {
for (size_t i = 0; i < data_view.size(); i++) {
const DataType& elem = data_view[i];
float x = fx(elem);
absl::optional<float> y = fy(elem);
if (y)
result->points.emplace_back(x, *y);
}
}
// For each pair of adjacent elements in |data|, use |f()| to extract a
// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
// will be the time of the second element in the pair.
template <typename DataType, typename ResultType, typename IterableType>
void ProcessPairs(
rtc::FunctionView<float(const DataType&)> fx,
rtc::FunctionView<absl::optional<ResultType>(const DataType&,
const DataType&)> fy,
const IterableType& data,
TimeSeries* result) {
for (size_t i = 1; i < data.size(); i++) {
float x = fx(data[i]);
absl::optional<ResultType> y = fy(data[i - 1], data[i]);
if (y)
result->points.emplace_back(x, static_cast<float>(*y));
}
}
// For each pair of adjacent elements in |data|, use |f()| to extract a
// y-coordinate and store the result in a TimeSeries. Note that the x-coordinate
// will be the time of the second element in the pair.
template <typename DataType, typename ResultType, typename IterableType>
void AccumulatePairs(
rtc::FunctionView<float(const DataType&)> fx,
rtc::FunctionView<absl::optional<ResultType>(const DataType&,
const DataType&)> fy,
const IterableType& data,
TimeSeries* result) {
ResultType sum = 0;
for (size_t i = 1; i < data.size(); i++) {
float x = fx(data[i]);
absl::optional<ResultType> y = fy(data[i - 1], data[i]);
if (y) {
sum += *y;
result->points.emplace_back(x, static_cast<float>(sum));
}
}
}
// Calculates a moving average of |data| and stores the result in a TimeSeries.
// A data point is generated every |step| microseconds from |begin_time|
// to |end_time|. The value of each data point is the average of the data
// during the preceding |window_duration_us| microseconds.
template <typename DataType, typename ResultType, typename IterableType>
void MovingAverage(
rtc::FunctionView<absl::optional<ResultType>(const DataType&)> fy,
const IterableType& data_view,
AnalyzerConfig config,
TimeSeries* result) {
size_t window_index_begin = 0;
size_t window_index_end = 0;
ResultType sum_in_window = 0;
for (int64_t t = config.begin_time_; t < config.end_time_ + config.step_;
t += config.step_) {
while (window_index_end < data_view.size() &&
data_view[window_index_end].log_time_us() < t) {
absl::optional<ResultType> value = fy(data_view[window_index_end]);
if (value)
sum_in_window += *value;
++window_index_end;
}
while (window_index_begin < data_view.size() &&
data_view[window_index_begin].log_time_us() <
t - config.window_duration_) {
absl::optional<ResultType> value = fy(data_view[window_index_begin]);
if (value)
sum_in_window -= *value;
++window_index_begin;
}
float window_duration_s =
static_cast<float>(config.window_duration_) / kNumMicrosecsPerSec;
float x = config.GetCallTimeSec(t);
float y = sum_in_window / window_duration_s;
result->points.emplace_back(x, y);
}
}
} // namespace webrtc
#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_ANALYZER_COMMON_H_

View File

@ -30,10 +30,10 @@
#include "modules/rtp_rtcp/source/rtcp_packet/report_block.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_tools/rtc_event_log_visualizer/alerts.h"
#include "rtc_tools/rtc_event_log_visualizer/analyze_audio.h"
#include "rtc_tools/rtc_event_log_visualizer/analyzer.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_protobuf.h"
#include "rtc_tools/rtc_event_log_visualizer/plot_python.h"
#include "system_wrappers/include/field_trial.h"
#include "test/field_trial.h"
#include "test/testsupport/file_utils.h"
@ -77,7 +77,7 @@ ABSL_FLAG(bool,
ABSL_FLAG(bool,
print_triage_alerts,
false,
true,
"Print triage alerts, i.e. a list of potential problems.");
ABSL_FLAG(bool,
@ -194,9 +194,9 @@ int main(int argc, char* argv[]) {
"A tool for visualizing WebRTC event logs.\n"
"Example usage:\n"
"./event_log_visualizer <logfile> | python\n");
absl::FlagsUsageConfig config;
config.contains_help_flags = &ContainsHelppackageFlags;
absl::SetFlagsUsageConfig(config);
absl::FlagsUsageConfig flag_config;
flag_config.contains_help_flags = &ContainsHelppackageFlags;
absl::SetFlagsUsageConfig(flag_config);
std::vector<char*> args = absl::ParseCommandLine(argc, argv);
// Print RTC_LOG warnings and errors even in release builds.
@ -261,16 +261,22 @@ int main(int argc, char* argv[]) {
}
}
webrtc::EventLogAnalyzer analyzer(parsed_log,
absl::GetFlag(FLAGS_normalize_time));
std::unique_ptr<webrtc::PlotCollection> collection;
if (absl::GetFlag(FLAGS_protobuf_output)) {
collection.reset(new webrtc::ProtobufPlotCollection());
} else {
collection.reset(
new webrtc::PythonPlotCollection(absl::GetFlag(FLAGS_shared_xaxis)));
webrtc::AnalyzerConfig config;
config.window_duration_ = 250000;
config.step_ = 10000;
config.normalize_time_ = absl::GetFlag(FLAGS_normalize_time);
config.begin_time_ = parsed_log.first_timestamp();
config.end_time_ = parsed_log.last_timestamp();
if (config.end_time_ < config.begin_time_) {
RTC_LOG(LS_WARNING) << "Log end time " << config.end_time_
<< " not after begin time " << config.begin_time_
<< ". Nothing to analyze. Is the log broken?";
return -1;
}
webrtc::EventLogAnalyzer analyzer(parsed_log, config);
webrtc::PlotCollection collection;
PlotMap plots;
plots.RegisterPlot("incoming_packet_sizes", [&](Plot* plot) {
analyzer.CreatePacketGraph(webrtc::kIncomingPacket, plot);
@ -423,22 +429,22 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("pacer_delay",
[&](Plot* plot) { analyzer.CreatePacerDelayGraph(plot); });
plots.RegisterPlot("audio_encoder_bitrate", [&](Plot* plot) {
analyzer.CreateAudioEncoderTargetBitrateGraph(plot);
CreateAudioEncoderTargetBitrateGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_frame_length", [&](Plot* plot) {
analyzer.CreateAudioEncoderFrameLengthGraph(plot);
CreateAudioEncoderFrameLengthGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_packet_loss", [&](Plot* plot) {
analyzer.CreateAudioEncoderPacketLossGraph(plot);
CreateAudioEncoderPacketLossGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_fec", [&](Plot* plot) {
analyzer.CreateAudioEncoderEnableFecGraph(plot);
CreateAudioEncoderEnableFecGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_dtx", [&](Plot* plot) {
analyzer.CreateAudioEncoderEnableDtxGraph(plot);
CreateAudioEncoderEnableDtxGraph(parsed_log, config, plot);
});
plots.RegisterPlot("audio_encoder_num_channels", [&](Plot* plot) {
analyzer.CreateAudioEncoderNumChannelsGraph(plot);
CreateAudioEncoderNumChannelsGraph(parsed_log, config, plot);
});
plots.RegisterPlot("ice_candidate_pair_config", [&](Plot* plot) {
@ -461,14 +467,14 @@ int main(int argc, char* argv[]) {
wav_path = webrtc::test::ResourcePath(
"audio_processing/conversational_speech/EN_script2_F_sp2_B1", "wav");
}
absl::optional<webrtc::EventLogAnalyzer::NetEqStatsGetterMap> neteq_stats;
absl::optional<webrtc::NetEqStatsGetterMap> neteq_stats;
plots.RegisterPlot("simulated_neteq_expand_rate", [&](Plot* plot) {
if (!neteq_stats) {
neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
analyzer.CreateNetEqNetworkStatsGraph(
*neteq_stats,
webrtc::CreateNetEqNetworkStatsGraph(
parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.expand_rate / 16384.f;
},
@ -477,10 +483,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_speech_expand_rate", [&](Plot* plot) {
if (!neteq_stats) {
neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
analyzer.CreateNetEqNetworkStatsGraph(
*neteq_stats,
webrtc::CreateNetEqNetworkStatsGraph(
parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.speech_expand_rate / 16384.f;
},
@ -489,10 +495,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_accelerate_rate", [&](Plot* plot) {
if (!neteq_stats) {
neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
analyzer.CreateNetEqNetworkStatsGraph(
*neteq_stats,
webrtc::CreateNetEqNetworkStatsGraph(
parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.accelerate_rate / 16384.f;
},
@ -501,10 +507,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_preemptive_rate", [&](Plot* plot) {
if (!neteq_stats) {
neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
analyzer.CreateNetEqNetworkStatsGraph(
*neteq_stats,
webrtc::CreateNetEqNetworkStatsGraph(
parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.preemptive_rate / 16384.f;
},
@ -513,10 +519,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_packet_loss_rate", [&](Plot* plot) {
if (!neteq_stats) {
neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
analyzer.CreateNetEqNetworkStatsGraph(
*neteq_stats,
webrtc::CreateNetEqNetworkStatsGraph(
parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.packet_loss_rate / 16384.f;
},
@ -525,10 +531,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_concealment_events", [&](Plot* plot) {
if (!neteq_stats) {
neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
analyzer.CreateNetEqLifetimeStatsGraph(
*neteq_stats,
webrtc::CreateNetEqLifetimeStatsGraph(
parsed_log, config, *neteq_stats,
[](const webrtc::NetEqLifetimeStatistics& stats) {
return static_cast<float>(stats.concealment_events);
},
@ -537,10 +543,10 @@ int main(int argc, char* argv[]) {
plots.RegisterPlot("simulated_neteq_preferred_buffer_size", [&](Plot* plot) {
if (!neteq_stats) {
neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
analyzer.CreateNetEqNetworkStatsGraph(
*neteq_stats,
webrtc::CreateNetEqNetworkStatsGraph(
parsed_log, config, *neteq_stats,
[](const webrtc::NetEqNetworkStatistics& stats) {
return stats.preferred_buffer_size_ms;
},
@ -587,7 +593,7 @@ int main(int argc, char* argv[]) {
for (const auto& plot : plots) {
if (plot.enabled) {
Plot* output = collection->AppendNewPlot();
Plot* output = collection.AppendNewPlot();
plot.plot_func(output);
output->SetId(plot.label);
}
@ -601,21 +607,28 @@ int main(int argc, char* argv[]) {
if (absl::c_find(plot_flags, "simulated_neteq_jitter_buffer_delay") !=
plot_flags.end()) {
if (!neteq_stats) {
neteq_stats = analyzer.SimulateNetEq(wav_path, 48000);
neteq_stats = webrtc::SimulateNetEq(parsed_log, config, wav_path, 48000);
}
for (webrtc::EventLogAnalyzer::NetEqStatsGetterMap::const_iterator it =
neteq_stats->cbegin();
for (webrtc::NetEqStatsGetterMap::const_iterator it = neteq_stats->cbegin();
it != neteq_stats->cend(); ++it) {
analyzer.CreateAudioJitterBufferGraph(it->first, it->second.get(),
collection->AppendNewPlot());
webrtc::CreateAudioJitterBufferGraph(parsed_log, config, it->first,
it->second.get(),
collection.AppendNewPlot());
}
}
collection->Draw();
if (absl::GetFlag(FLAGS_protobuf_output)) {
webrtc::analytics::ChartCollection proto_charts;
collection.ExportProtobuf(&proto_charts);
std::cout << proto_charts.SerializeAsString();
} else {
collection.PrintPythonCode(absl::GetFlag(FLAGS_shared_xaxis));
}
if (absl::GetFlag(FLAGS_print_triage_alerts)) {
analyzer.CreateTriageNotifications();
analyzer.PrintNotifications(stderr);
webrtc::TriageHelper triage_alerts(config);
triage_alerts.AnalyzeLog(parsed_log);
triage_alerts.Print(stderr);
}
return 0;

View File

@ -11,6 +11,7 @@
#include "rtc_tools/rtc_event_log_visualizer/plot_base.h"
#include <algorithm>
#include <memory>
#include "rtc_base/checks.h"
@ -93,4 +94,232 @@ void Plot::AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series) {
}
}
void Plot::PrintPythonCode() const {
// Write python commands to stdout. Intended program usage is
// ./event_log_visualizer event_log160330.dump | python
if (!series_list_.empty()) {
printf("color_count = %zu\n", series_list_.size());
printf(
"hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i "
"in range(color_count)]\n");
printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n");
for (size_t i = 0; i < series_list_.size(); i++) {
printf("\n# === Series: %s ===\n", series_list_[i].label.c_str());
// List x coordinates
printf("x%zu = [", i);
if (!series_list_[i].points.empty())
printf("%.3f", series_list_[i].points[0].x);
for (size_t j = 1; j < series_list_[i].points.size(); j++)
printf(", %.3f", series_list_[i].points[j].x);
printf("]\n");
// List y coordinates
printf("y%zu = [", i);
if (!series_list_[i].points.empty())
printf("%G", series_list_[i].points[0].y);
for (size_t j = 1; j < series_list_[i].points.size(); j++)
printf(", %G", series_list_[i].points[j].y);
printf("]\n");
if (series_list_[i].line_style == LineStyle::kBar) {
// There is a plt.bar function that draws bar plots,
// but it is *way* too slow to be useful.
printf(
"plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: "
"max(t,0), y%zu), color=colors[%zu], "
"label=\'%s\')\n",
i, i, i, i, series_list_[i].label.c_str());
if (series_list_[i].point_style == PointStyle::kHighlight) {
printf(
"plt.plot(x%zu, y%zu, color=colors[%zu], "
"marker='.', ls=' ')\n",
i, i, i);
}
} else if (series_list_[i].line_style == LineStyle::kLine) {
if (series_list_[i].point_style == PointStyle::kHighlight) {
printf(
"plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
"marker='.')\n",
i, i, i, series_list_[i].label.c_str());
} else {
printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i,
i, i, series_list_[i].label.c_str());
}
} else if (series_list_[i].line_style == LineStyle::kStep) {
// Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on
// to illustrate the "steps". This can be expressed by duplicating all
// elements except the first in x and the last in y.
printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i);
printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i);
printf(
"plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], "
"label=\'%s\')\n",
i, i, i, series_list_[i].label.c_str());
if (series_list_[i].point_style == PointStyle::kHighlight) {
printf(
"plt.plot(x%zu, y%zu, color=colors[%zu], "
"marker='.', ls=' ')\n",
i, i, i);
}
} else if (series_list_[i].line_style == LineStyle::kNone) {
printf(
"plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
"marker='o', ls=' ')\n",
i, i, i, series_list_[i].label.c_str());
} else {
printf("raise Exception(\"Unknown graph type\")\n");
}
}
// IntervalSeries
printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n");
RTC_CHECK_LE(interval_list_.size(), 4);
// To get the intervals to show up in the legend we have to create patches
// for them.
printf("legend_patches = []\n");
for (size_t i = 0; i < interval_list_.size(); i++) {
// List intervals
printf("\n# === IntervalSeries: %s ===\n",
interval_list_[i].label.c_str());
printf("ival%zu = [", i);
if (!interval_list_[i].intervals.empty()) {
printf("(%G, %G)", interval_list_[i].intervals[0].begin,
interval_list_[i].intervals[0].end);
}
for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) {
printf(", (%G, %G)", interval_list_[i].intervals[j].begin,
interval_list_[i].intervals[j].end);
}
printf("]\n");
printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size());
if (interval_list_[i].orientation == IntervalSeries::kVertical) {
printf(
" plt.axhspan(ival%zu[i][0], ival%zu[i][1], "
"facecolor=interval_colors[%zu], "
"alpha=0.3)\n",
i, i, i);
} else {
printf(
" plt.axvspan(ival%zu[i][0], ival%zu[i][1], "
"facecolor=interval_colors[%zu], "
"alpha=0.3)\n",
i, i, i);
}
printf(
"legend_patches.append(mpatches.Patch(ec=\'black\', "
"fc=interval_colors[%zu], label='%s'))\n",
i, interval_list_[i].label.c_str());
}
}
printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_);
printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_);
printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str());
printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str());
printf("plt.title(\'%s\')\n", title_.c_str());
printf("fig = plt.gcf()\n");
printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str());
if (!yaxis_tick_labels_.empty()) {
printf("yaxis_tick_labels = [");
for (const auto& kv : yaxis_tick_labels_) {
printf("(%f,\"%s\"),", kv.first, kv.second.c_str());
}
printf("]\n");
printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n");
printf("plt.yticks(*yaxis_tick_labels)\n");
}
if (!series_list_.empty() || !interval_list_.empty()) {
printf("handles, labels = plt.gca().get_legend_handles_labels()\n");
printf("for lp in legend_patches:\n");
printf(" handles.append(lp)\n");
printf(" labels.append(lp.get_label())\n");
printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n");
}
}
void Plot::ExportProtobuf(webrtc::analytics::Chart* chart) const {
for (size_t i = 0; i < series_list_.size(); i++) {
webrtc::analytics::DataSet* data_set = chart->add_data_sets();
for (const auto& point : series_list_[i].points) {
data_set->add_x_values(point.x);
}
for (const auto& point : series_list_[i].points) {
data_set->add_y_values(point.y);
}
if (series_list_[i].line_style == LineStyle::kBar) {
data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART);
} else if (series_list_[i].line_style == LineStyle::kLine) {
data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART);
} else if (series_list_[i].line_style == LineStyle::kStep) {
data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART);
} else if (series_list_[i].line_style == LineStyle::kNone) {
data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART);
} else {
data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED);
}
if (series_list_[i].point_style == PointStyle::kHighlight)
data_set->set_highlight_points(true);
data_set->set_label(series_list_[i].label);
}
chart->set_xaxis_min(xaxis_min_);
chart->set_xaxis_max(xaxis_max_);
chart->set_yaxis_min(yaxis_min_);
chart->set_yaxis_max(yaxis_max_);
chart->set_xaxis_label(xaxis_label_);
chart->set_yaxis_label(yaxis_label_);
chart->set_title(title_);
chart->set_id(id_);
for (const auto& kv : yaxis_tick_labels_) {
webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels();
tick->set_value(kv.first);
tick->set_label(kv.second);
}
}
void PlotCollection::PrintPythonCode(bool shared_xaxis) const {
printf("import matplotlib.pyplot as plt\n");
printf("plt.rcParams.update({'figure.max_open_warning': 0})\n");
printf("import matplotlib.patches as mpatches\n");
printf("import matplotlib.patheffects as pe\n");
printf("import colorsys\n");
for (size_t i = 0; i < plots_.size(); i++) {
printf("plt.figure(%zu)\n", i);
if (shared_xaxis) {
// Link x-axes across all figures for synchronized zooming.
if (i == 0) {
printf("axis0 = plt.subplot(111)\n");
} else {
printf("plt.subplot(111, sharex=axis0)\n");
}
}
plots_[i]->PrintPythonCode();
}
printf("plt.show()\n");
}
void PlotCollection::ExportProtobuf(
webrtc::analytics::ChartCollection* collection) const {
for (const auto& plot : plots_) {
// TODO(terelius): Ensure that there is no way to insert plots other than
// ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast
// here.
webrtc::analytics::Chart* protobuf_representation =
collection->add_charts();
plot->ExportProtobuf(protobuf_representation);
}
}
Plot* PlotCollection::AppendNewPlot() {
plots_.push_back(std::make_unique<Plot>());
return plots_.back().get();
}
} // namespace webrtc

View File

@ -15,6 +15,13 @@
#include <utility>
#include <vector>
#include "rtc_base/deprecation.h"
#include "rtc_base/ignore_wundef.h"
RTC_PUSH_IGNORING_WUNDEF()
#include "rtc_tools/rtc_event_log_visualizer/proto/chart.pb.h"
RTC_POP_IGNORING_WUNDEF()
namespace webrtc {
enum class LineStyle {
@ -94,8 +101,8 @@ class Plot {
public:
virtual ~Plot() {}
// Overloaded to draw the plot.
virtual void Draw() = 0;
// Deprecated. Use PrintPythonCode() or ExportProtobuf() instead.
RTC_DEPRECATED virtual void Draw() {}
// Sets the lower x-axis limit to min_value (if left_margin == 0).
// Sets the upper x-axis limit to max_value (if right_margin == 0).
@ -158,6 +165,12 @@ class Plot {
// Otherwise, the call has no effect and the timeseries is destroyed.
void AppendTimeSeriesIfNotEmpty(TimeSeries&& time_series);
// Replaces PythonPlot::Draw()
void PrintPythonCode() const;
// Replaces ProtobufPlot::Draw()
void ExportProtobuf(webrtc::analytics::Chart* chart) const;
protected:
float xaxis_min_;
float xaxis_max_;
@ -175,8 +188,17 @@ class Plot {
class PlotCollection {
public:
virtual ~PlotCollection() {}
virtual void Draw() = 0;
virtual Plot* AppendNewPlot() = 0;
// Deprecated. Use PrintPythonCode() or ExportProtobuf() instead.
RTC_DEPRECATED virtual void Draw() {}
virtual Plot* AppendNewPlot();
// Replaces PythonPlotCollection::Draw()
void PrintPythonCode(bool shared_xaxis) const;
// Replaces ProtobufPlotCollections::Draw()
void ExportProtobuf(webrtc::analytics::ChartCollection* collection) const;
protected:
std::vector<std::unique_ptr<Plot>> plots_;

View File

@ -24,49 +24,7 @@ ProtobufPlot::~ProtobufPlot() {}
void ProtobufPlot::Draw() {}
void ProtobufPlot::ExportProtobuf(webrtc::analytics::Chart* chart) {
for (size_t i = 0; i < series_list_.size(); i++) {
webrtc::analytics::DataSet* data_set = chart->add_data_sets();
for (const auto& point : series_list_[i].points) {
data_set->add_x_values(point.x);
}
for (const auto& point : series_list_[i].points) {
data_set->add_y_values(point.y);
}
if (series_list_[i].line_style == LineStyle::kBar) {
data_set->set_style(webrtc::analytics::ChartStyle::BAR_CHART);
} else if (series_list_[i].line_style == LineStyle::kLine) {
data_set->set_style(webrtc::analytics::ChartStyle::LINE_CHART);
} else if (series_list_[i].line_style == LineStyle::kStep) {
data_set->set_style(webrtc::analytics::ChartStyle::LINE_STEP_CHART);
} else if (series_list_[i].line_style == LineStyle::kNone) {
data_set->set_style(webrtc::analytics::ChartStyle::SCATTER_CHART);
} else {
data_set->set_style(webrtc::analytics::ChartStyle::UNDEFINED);
}
if (series_list_[i].point_style == PointStyle::kHighlight)
data_set->set_highlight_points(true);
data_set->set_label(series_list_[i].label);
}
chart->set_xaxis_min(xaxis_min_);
chart->set_xaxis_max(xaxis_max_);
chart->set_yaxis_min(yaxis_min_);
chart->set_yaxis_max(yaxis_max_);
chart->set_xaxis_label(xaxis_label_);
chart->set_yaxis_label(yaxis_label_);
chart->set_title(title_);
chart->set_id(id_);
for (const auto& kv : yaxis_tick_labels_) {
webrtc::analytics::TickLabel* tick = chart->add_yaxis_tick_labels();
tick->set_value(kv.first);
tick->set_label(kv.second);
}
}
ProtobufPlotCollection::ProtobufPlotCollection() {}
@ -78,19 +36,6 @@ void ProtobufPlotCollection::Draw() {
std::cout << collection.SerializeAsString();
}
void ProtobufPlotCollection::ExportProtobuf(
webrtc::analytics::ChartCollection* collection) {
for (const auto& plot : plots_) {
// TODO(terelius): Ensure that there is no way to insert plots other than
// ProtobufPlots in a ProtobufPlotCollection. Needed to safely static_cast
// here.
webrtc::analytics::Chart* protobuf_representation =
collection->add_charts();
static_cast<ProtobufPlot*>(plot.get())
->ExportProtobuf(protobuf_representation);
}
}
Plot* ProtobufPlotCollection::AppendNewPlot() {
Plot* plot = new ProtobufPlot();
plots_.push_back(std::unique_ptr<Plot>(plot));

View File

@ -23,16 +23,15 @@ class ProtobufPlot final : public Plot {
ProtobufPlot();
~ProtobufPlot() override;
void Draw() override;
void ExportProtobuf(webrtc::analytics::Chart* chart);
};
class ProtobufPlotCollection final : public PlotCollection {
public:
ProtobufPlotCollection();
// This class is deprecated. Use PlotCollection and ExportProtobuf() instead.
RTC_DEPRECATED ProtobufPlotCollection();
~ProtobufPlotCollection() override;
void Draw() override;
Plot* AppendNewPlot() override;
void ExportProtobuf(webrtc::analytics::ChartCollection* collection);
};
} // namespace webrtc

View File

@ -25,149 +25,7 @@ PythonPlot::PythonPlot() {}
PythonPlot::~PythonPlot() {}
void PythonPlot::Draw() {
// Write python commands to stdout. Intended program usage is
// ./event_log_visualizer event_log160330.dump | python
if (!series_list_.empty()) {
printf("color_count = %zu\n", series_list_.size());
printf(
"hls_colors = [(i*1.0/color_count, 0.25+i*0.5/color_count, 0.8) for i "
"in range(color_count)]\n");
printf("colors = [colorsys.hls_to_rgb(*hls) for hls in hls_colors]\n");
for (size_t i = 0; i < series_list_.size(); i++) {
printf("\n# === Series: %s ===\n", series_list_[i].label.c_str());
// List x coordinates
printf("x%zu = [", i);
if (!series_list_[i].points.empty())
printf("%.3f", series_list_[i].points[0].x);
for (size_t j = 1; j < series_list_[i].points.size(); j++)
printf(", %.3f", series_list_[i].points[j].x);
printf("]\n");
// List y coordinates
printf("y%zu = [", i);
if (!series_list_[i].points.empty())
printf("%G", series_list_[i].points[0].y);
for (size_t j = 1; j < series_list_[i].points.size(); j++)
printf(", %G", series_list_[i].points[j].y);
printf("]\n");
if (series_list_[i].line_style == LineStyle::kBar) {
// There is a plt.bar function that draws bar plots,
// but it is *way* too slow to be useful.
printf(
"plt.vlines(x%zu, map(lambda t: min(t,0), y%zu), map(lambda t: "
"max(t,0), y%zu), color=colors[%zu], "
"label=\'%s\')\n",
i, i, i, i, series_list_[i].label.c_str());
if (series_list_[i].point_style == PointStyle::kHighlight) {
printf(
"plt.plot(x%zu, y%zu, color=colors[%zu], "
"marker='.', ls=' ')\n",
i, i, i);
}
} else if (series_list_[i].line_style == LineStyle::kLine) {
if (series_list_[i].point_style == PointStyle::kHighlight) {
printf(
"plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
"marker='.')\n",
i, i, i, series_list_[i].label.c_str());
} else {
printf("plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\')\n", i,
i, i, series_list_[i].label.c_str());
}
} else if (series_list_[i].line_style == LineStyle::kStep) {
// Draw lines from (x[0],y[0]) to (x[1],y[0]) to (x[1],y[1]) and so on
// to illustrate the "steps". This can be expressed by duplicating all
// elements except the first in x and the last in y.
printf("xd%zu = [dup for v in x%zu for dup in [v, v]]\n", i, i);
printf("yd%zu = [dup for v in y%zu for dup in [v, v]]\n", i, i);
printf(
"plt.plot(xd%zu[1:], yd%zu[:-1], color=colors[%zu], "
"label=\'%s\')\n",
i, i, i, series_list_[i].label.c_str());
if (series_list_[i].point_style == PointStyle::kHighlight) {
printf(
"plt.plot(x%zu, y%zu, color=colors[%zu], "
"marker='.', ls=' ')\n",
i, i, i);
}
} else if (series_list_[i].line_style == LineStyle::kNone) {
printf(
"plt.plot(x%zu, y%zu, color=colors[%zu], label=\'%s\', "
"marker='o', ls=' ')\n",
i, i, i, series_list_[i].label.c_str());
} else {
printf("raise Exception(\"Unknown graph type\")\n");
}
}
// IntervalSeries
printf("interval_colors = ['#ff8e82','#5092fc','#c4ffc4','#aaaaaa']\n");
RTC_CHECK_LE(interval_list_.size(), 4);
// To get the intervals to show up in the legend we have to create patches
// for them.
printf("legend_patches = []\n");
for (size_t i = 0; i < interval_list_.size(); i++) {
// List intervals
printf("\n# === IntervalSeries: %s ===\n",
interval_list_[i].label.c_str());
printf("ival%zu = [", i);
if (!interval_list_[i].intervals.empty()) {
printf("(%G, %G)", interval_list_[i].intervals[0].begin,
interval_list_[i].intervals[0].end);
}
for (size_t j = 1; j < interval_list_[i].intervals.size(); j++) {
printf(", (%G, %G)", interval_list_[i].intervals[j].begin,
interval_list_[i].intervals[j].end);
}
printf("]\n");
printf("for i in range(0, %zu):\n", interval_list_[i].intervals.size());
if (interval_list_[i].orientation == IntervalSeries::kVertical) {
printf(
" plt.axhspan(ival%zu[i][0], ival%zu[i][1], "
"facecolor=interval_colors[%zu], "
"alpha=0.3)\n",
i, i, i);
} else {
printf(
" plt.axvspan(ival%zu[i][0], ival%zu[i][1], "
"facecolor=interval_colors[%zu], "
"alpha=0.3)\n",
i, i, i);
}
printf(
"legend_patches.append(mpatches.Patch(ec=\'black\', "
"fc=interval_colors[%zu], label='%s'))\n",
i, interval_list_[i].label.c_str());
}
}
printf("plt.xlim(%f, %f)\n", xaxis_min_, xaxis_max_);
printf("plt.ylim(%f, %f)\n", yaxis_min_, yaxis_max_);
printf("plt.xlabel(\'%s\')\n", xaxis_label_.c_str());
printf("plt.ylabel(\'%s\')\n", yaxis_label_.c_str());
printf("plt.title(\'%s\')\n", title_.c_str());
printf("fig = plt.gcf()\n");
printf("fig.canvas.set_window_title(\'%s\')\n", id_.c_str());
if (!yaxis_tick_labels_.empty()) {
printf("yaxis_tick_labels = [");
for (const auto& kv : yaxis_tick_labels_) {
printf("(%f,\"%s\"),", kv.first, kv.second.c_str());
}
printf("]\n");
printf("yaxis_tick_labels = list(zip(*yaxis_tick_labels))\n");
printf("plt.yticks(*yaxis_tick_labels)\n");
}
if (!series_list_.empty() || !interval_list_.empty()) {
printf("handles, labels = plt.gca().get_legend_handles_labels()\n");
printf("for lp in legend_patches:\n");
printf(" handles.append(lp)\n");
printf(" labels.append(lp.get_label())\n");
printf("plt.legend(handles, labels, loc=\'best\', fontsize=\'small\')\n");
}
PrintPythonCode();
}
PythonPlotCollection::PythonPlotCollection(bool shared_xaxis)
@ -176,24 +34,7 @@ PythonPlotCollection::PythonPlotCollection(bool shared_xaxis)
PythonPlotCollection::~PythonPlotCollection() {}
void PythonPlotCollection::Draw() {
printf("import matplotlib.pyplot as plt\n");
printf("plt.rcParams.update({'figure.max_open_warning': 0})\n");
printf("import matplotlib.patches as mpatches\n");
printf("import matplotlib.patheffects as pe\n");
printf("import colorsys\n");
for (size_t i = 0; i < plots_.size(); i++) {
printf("plt.figure(%zu)\n", i);
if (shared_xaxis_) {
// Link x-axes across all figures for synchronized zooming.
if (i == 0) {
printf("axis0 = plt.subplot(111)\n");
} else {
printf("plt.subplot(111, sharex=axis0)\n");
}
}
plots_[i]->Draw();
}
printf("plt.show()\n");
PrintPythonCode(shared_xaxis_);
}
Plot* PythonPlotCollection::AppendNewPlot() {

View File

@ -23,7 +23,8 @@ class PythonPlot final : public Plot {
class PythonPlotCollection final : public PlotCollection {
public:
explicit PythonPlotCollection(bool shared_xaxis = false);
// This class is deprecated. Use PlotCollection and PrintPythonCode() instead.
RTC_DEPRECATED explicit PythonPlotCollection(bool shared_xaxis = false);
~PythonPlotCollection() override;
void Draw() override;
Plot* AppendNewPlot() override;

View File

@ -1,158 +0,0 @@
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_
#define RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_
#include <string>
namespace webrtc {
class IncomingRtpReceiveTimeGap {
public:
IncomingRtpReceiveTimeGap(float time_seconds, int64_t duration)
: time_seconds_(time_seconds), duration_(duration) {}
float Time() const { return time_seconds_; }
std::string ToString() const {
return std::string("No RTP packets received for ") +
std::to_string(duration_) + std::string(" ms");
}
private:
float time_seconds_;
int64_t duration_;
};
class IncomingRtcpReceiveTimeGap {
public:
IncomingRtcpReceiveTimeGap(float time_seconds, int64_t duration)
: time_seconds_(time_seconds), duration_(duration) {}
float Time() const { return time_seconds_; }
std::string ToString() const {
return std::string("No RTCP packets received for ") +
std::to_string(duration_) + std::string(" ms");
}
private:
float time_seconds_;
int64_t duration_;
};
class OutgoingRtpSendTimeGap {
public:
OutgoingRtpSendTimeGap(float time_seconds, int64_t duration)
: time_seconds_(time_seconds), duration_(duration) {}
float Time() const { return time_seconds_; }
std::string ToString() const {
return std::string("No RTP packets sent for ") + std::to_string(duration_) +
std::string(" ms");
}
private:
float time_seconds_;
int64_t duration_;
};
class OutgoingRtcpSendTimeGap {
public:
OutgoingRtcpSendTimeGap(float time_seconds, int64_t duration)
: time_seconds_(time_seconds), duration_(duration) {}
float Time() const { return time_seconds_; }
std::string ToString() const {
return std::string("No RTCP packets sent for ") +
std::to_string(duration_) + std::string(" ms");
}
private:
float time_seconds_;
int64_t duration_;
};
class IncomingSeqNumJump {
public:
IncomingSeqNumJump(float time_seconds, uint32_t ssrc)
: time_seconds_(time_seconds), ssrc_(ssrc) {}
float Time() const { return time_seconds_; }
std::string ToString() const {
return std::string("Sequence number jumps on incoming SSRC ") +
std::to_string(ssrc_);
}
private:
float time_seconds_;
uint32_t ssrc_;
};
class IncomingCaptureTimeJump {
public:
IncomingCaptureTimeJump(float time_seconds, uint32_t ssrc)
: time_seconds_(time_seconds), ssrc_(ssrc) {}
float Time() const { return time_seconds_; }
std::string ToString() const {
return std::string("Capture timestamp jumps on incoming SSRC ") +
std::to_string(ssrc_);
}
private:
float time_seconds_;
uint32_t ssrc_;
};
class OutgoingSeqNoJump {
public:
OutgoingSeqNoJump(float time_seconds, uint32_t ssrc)
: time_seconds_(time_seconds), ssrc_(ssrc) {}
float Time() const { return time_seconds_; }
std::string ToString() const {
return std::string("Sequence number jumps on outgoing SSRC ") +
std::to_string(ssrc_);
}
private:
float time_seconds_;
uint32_t ssrc_;
};
class OutgoingCaptureTimeJump {
public:
OutgoingCaptureTimeJump(float time_seconds, uint32_t ssrc)
: time_seconds_(time_seconds), ssrc_(ssrc) {}
float Time() const { return time_seconds_; }
std::string ToString() const {
return std::string("Capture timestamp jumps on outgoing SSRC ") +
std::to_string(ssrc_);
}
private:
float time_seconds_;
uint32_t ssrc_;
};
class OutgoingHighLoss {
public:
explicit OutgoingHighLoss(double avg_loss_fraction)
: avg_loss_fraction_(avg_loss_fraction) {}
std::string ToString() const {
return std::string("High average loss (") +
std::to_string(avg_loss_fraction_ * 100) +
std::string("%) across the call.");
}
private:
double avg_loss_fraction_;
};
} // namespace webrtc
#endif // RTC_TOOLS_RTC_EVENT_LOG_VISUALIZER_TRIAGE_NOTIFICATIONS_H_