Remove gflags dependency for event_log_visualizer and activity_metric

This is the first step towards getting rid of gflags as a dependency.
It has been causing us extra work for a long time since it's not present
in Chromium as one example.

BUG=webrtc:7644
NOTRY=True

Review-Url: https://codereview.webrtc.org/2874403003
Cr-Commit-Position: refs/heads/master@{#18157}
This commit is contained in:
kjellander
2017-05-16 00:01:23 -07:00
committed by Commit bot
parent 09f0561675
commit 4fa5be451f
3 changed files with 72 additions and 59 deletions

View File

@ -244,8 +244,8 @@ if (rtc_include_tests) {
defines = [ "ENABLE_RTC_EVENT_LOG" ] defines = [ "ENABLE_RTC_EVENT_LOG" ]
deps = [ deps = [
":event_log_visualizer_utils", ":event_log_visualizer_utils",
"../base:rtc_base_approved",
"../test:field_trial", "../test:field_trial",
"//third_party/gflags",
] ]
} }
} }
@ -262,13 +262,13 @@ if (rtc_include_tests) {
} }
deps = [ deps = [
"../base:rtc_base_approved",
"../modules:module_api", "../modules:module_api",
"../modules/audio_processing", "../modules/audio_processing",
"../system_wrappers:metrics_default", "../system_wrappers:metrics_default",
"../test:test_support", "../test:test_support",
"//build/win:default_exe_manifest", "//build/win:default_exe_manifest",
"//testing/gtest", "//testing/gtest",
"//third_party/gflags",
] ]
} }

View File

@ -16,7 +16,7 @@
#include <algorithm> #include <algorithm>
#include <memory> #include <memory>
#include "gflags/gflags.h" #include "webrtc/base/flags.h"
#include "webrtc/modules/audio_processing/agc/agc.h" #include "webrtc/modules/audio_processing/agc/agc.h"
#include "webrtc/modules/audio_processing/agc/loudness_histogram.h" #include "webrtc/modules/audio_processing/agc/loudness_histogram.h"
#include "webrtc/modules/audio_processing/agc/utility.h" #include "webrtc/modules/audio_processing/agc/utility.h"
@ -28,7 +28,7 @@
#include "webrtc/test/gtest.h" #include "webrtc/test/gtest.h"
static const int kAgcAnalWindowSamples = 100; static const int kAgcAnalWindowSamples = 100;
static const double kDefaultActivityThreshold = 0.3; static const float kDefaultActivityThreshold = 0.3f;
DEFINE_bool(standalone_vad, true, "enable stand-alone VAD"); DEFINE_bool(standalone_vad, true, "enable stand-alone VAD");
DEFINE_string(true_vad, "", "name of a file containing true VAD in 'int'" DEFINE_string(true_vad, "", "name of a file containing true VAD in 'int'"
@ -44,8 +44,9 @@ DEFINE_string(result, "", "name of a file to write the results. The results"
" will be appended to the end of the file. This is optional."); " will be appended to the end of the file. This is optional.");
DEFINE_string(audio_content, "", "name of a file where audio content is written" DEFINE_string(audio_content, "", "name of a file where audio content is written"
" to, in double format."); " to, in double format.");
DEFINE_double(activity_threshold, kDefaultActivityThreshold, DEFINE_float(activity_threshold, kDefaultActivityThreshold,
"Activity threshold"); "Activity threshold");
DEFINE_bool(help, false, "prints this message");
namespace webrtc { namespace webrtc {
@ -105,13 +106,13 @@ class AgcStat {
AudioFeatures features; AudioFeatures features;
audio_processing_->ExtractFeatures( audio_processing_->ExtractFeatures(
frame.data_, frame.samples_per_channel_, &features); frame.data_, frame.samples_per_channel_, &features);
if (FLAGS_standalone_vad) { if (FLAG_standalone_vad) {
standalone_vad_->AddAudio(frame.data_, standalone_vad_->AddAudio(frame.data_,
frame.samples_per_channel_); frame.samples_per_channel_);
} }
if (features.num_frames > 0) { if (features.num_frames > 0) {
double p[kMaxNumFrames] = {0.5, 0.5, 0.5, 0.5}; double p[kMaxNumFrames] = {0.5, 0.5, 0.5, 0.5};
if (FLAGS_standalone_vad) { if (FLAG_standalone_vad) {
standalone_vad_->GetActivity(p, kMaxNumFrames); standalone_vad_->GetActivity(p, kMaxNumFrames);
} }
// TODO(turajs) combining and limiting are used in the source files as // TODO(turajs) combining and limiting are used in the source files as
@ -175,20 +176,20 @@ void void_main(int argc, char* argv[]) {
} }
FILE* true_vad_fid = NULL; FILE* true_vad_fid = NULL;
ASSERT_GT(FLAGS_true_vad.size(), 0u) << "Specify the file containing true " ASSERT_GT(strlen(FLAG_true_vad), 0u) << "Specify the file containing true "
"VADs using --true_vad flag."; "VADs using --true_vad flag.";
true_vad_fid = fopen(FLAGS_true_vad.c_str(), "rb"); true_vad_fid = fopen(FLAG_true_vad, "rb");
ASSERT_TRUE(true_vad_fid != NULL) << "Cannot open the active list " << ASSERT_TRUE(true_vad_fid != NULL) << "Cannot open the active list " <<
FLAGS_true_vad; FLAG_true_vad;
FILE* results_fid = NULL; FILE* results_fid = NULL;
if (FLAGS_result.size() > 0) { if (strlen(FLAG_result) > 0) {
// True if this is the first time writing to this function and we add a // True if this is the first time writing to this function and we add a
// header to the beginning of the file. // header to the beginning of the file.
bool write_header; bool write_header;
// Open in the read mode. If it fails, the file doesn't exist and has to // Open in the read mode. If it fails, the file doesn't exist and has to
// write a header for it. Otherwise no need to write a header. // write a header for it. Otherwise no need to write a header.
results_fid = fopen(FLAGS_result.c_str(), "r"); results_fid = fopen(FLAG_result, "r");
if (results_fid == NULL) { if (results_fid == NULL) {
write_header = true; write_header = true;
} else { } else {
@ -196,9 +197,9 @@ void void_main(int argc, char* argv[]) {
write_header = false; write_header = false;
} }
// Open in append mode. // Open in append mode.
results_fid = fopen(FLAGS_result.c_str(), "a"); results_fid = fopen(FLAG_result, "a");
ASSERT_TRUE(results_fid != NULL) << "Cannot open the file, " << ASSERT_TRUE(results_fid != NULL) << "Cannot open the file, " <<
FLAGS_result << ", to write the results."; FLAG_result << ", to write the results.";
// Write the header if required. // Write the header if required.
if (write_header) { if (write_header) {
fprintf(results_fid, "%% Total Active, Misdetection, " fprintf(results_fid, "%% Total Active, Misdetection, "
@ -208,19 +209,19 @@ void void_main(int argc, char* argv[]) {
} }
FILE* video_vad_fid = NULL; FILE* video_vad_fid = NULL;
if (FLAGS_video_vad.size() > 0) { if (strlen(FLAG_video_vad) > 0) {
video_vad_fid = fopen(FLAGS_video_vad.c_str(), "rb"); video_vad_fid = fopen(FLAG_video_vad, "rb");
ASSERT_TRUE(video_vad_fid != NULL) << "Cannot open the file, " << ASSERT_TRUE(video_vad_fid != NULL) << "Cannot open the file, " <<
FLAGS_video_vad << " to read video-based VAD decisions.\n"; FLAG_video_vad << " to read video-based VAD decisions.\n";
} }
// AgsStat will be the owner of this file and will close it at its // AgsStat will be the owner of this file and will close it at its
// destructor. // destructor.
FILE* audio_content_fid = NULL; FILE* audio_content_fid = NULL;
if (FLAGS_audio_content.size() > 0) { if (strlen(FLAG_audio_content) > 0) {
audio_content_fid = fopen(FLAGS_audio_content.c_str(), "wb"); audio_content_fid = fopen(FLAG_audio_content, "wb");
ASSERT_TRUE(audio_content_fid != NULL) << "Cannot open file, " << ASSERT_TRUE(audio_content_fid != NULL) << "Cannot open file, " <<
FLAGS_audio_content << " to write audio-content.\n"; FLAG_audio_content << " to write audio-content.\n";
agc_stat.set_audio_content_file(audio_content_fid); agc_stat.set_audio_content_file(audio_content_fid);
} }
@ -231,7 +232,7 @@ void void_main(int argc, char* argv[]) {
const size_t kSamplesToRead = frame.num_channels_ * const size_t kSamplesToRead = frame.num_channels_ *
frame.samples_per_channel_; frame.samples_per_channel_;
agc_stat.SetActivityThreshold(FLAGS_activity_threshold); agc_stat.SetActivityThreshold(FLAG_activity_threshold);
int ret_val = 0; int ret_val = 0;
int num_frames = 0; int num_frames = 0;
@ -369,17 +370,25 @@ void void_main(int argc, char* argv[]) {
} // namespace webrtc } // namespace webrtc
int main(int argc, char* argv[]) { int main(int argc, char* argv[]) {
char kUsage[] = if (argc == 1) {
// Print usage information.
std::cout <<
"\nCompute the number of misdetected and false-positive frames. Not\n" "\nCompute the number of misdetected and false-positive frames. Not\n"
" that for each frame of audio (10 ms) there should be one true\n" " that for each frame of audio (10 ms) there should be one true\n"
" activity. If any video-based activity is given, there should also be\n" " activity. If any video-based activity is given, there should also be\n"
" one probability per frame.\n" " one probability per frame.\n"
"Run with --help for more details on available flags.\n"
"\nUsage:\n\n" "\nUsage:\n\n"
"activity_metric input_pcm [options]\n" "activity_metric input_pcm [options]\n"
"where 'input_pcm' is the input audio sampled at 16 kHz in 16 bits " "where 'input_pcm' is the input audio sampled at 16 kHz in 16 bits "
"format.\n\n"; "format.\n\n";
google::SetUsageMessage(kUsage); return 0;
google::ParseCommandLineFlags(&argc, &argv, true); }
rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true);
if (FLAG_help) {
rtc::FlagList::Print(nullptr, false);
return 0;
}
webrtc::void_main(argc, argv); webrtc::void_main(argc, argv);
return 0; return 0;
} }

View File

@ -10,7 +10,7 @@
#include <iostream> #include <iostream>
#include "gflags/gflags.h" #include "webrtc/base/flags.h"
#include "webrtc/logging/rtc_event_log/rtc_event_log_parser.h" #include "webrtc/logging/rtc_event_log/rtc_event_log_parser.h"
#include "webrtc/test/field_trial.h" #include "webrtc/test/field_trial.h"
#include "webrtc/tools/event_log_visualizer/analyzer.h" #include "webrtc/tools/event_log_visualizer/analyzer.h"
@ -84,6 +84,7 @@ DEFINE_string(
"E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enabled/" "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enabled/"
" will assign the group Enabled to field trial WebRTC-FooFeature. Multiple " " will assign the group Enabled to field trial WebRTC-FooFeature. Multiple "
"trials are separated by \"/\""); "trials are separated by \"/\"");
DEFINE_bool(help, false, "prints this message");
int main(int argc, char* argv[]) { int main(int argc, char* argv[]) {
std::string program_name = argv[0]; std::string program_name = argv[0];
@ -92,16 +93,19 @@ int main(int argc, char* argv[]) {
"Example usage:\n" + "Example usage:\n" +
program_name + " <logfile> | python\n" + "Run " + program_name + program_name + " <logfile> | python\n" + "Run " + program_name +
" --help for a list of command line options\n"; " --help for a list of command line options\n";
google::SetUsageMessage(usage); rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true);
google::ParseCommandLineFlags(&argc, &argv, true); if (FLAG_help) {
rtc::FlagList::Print(nullptr, false);
if (argc != 2) {
// Print usage information.
std::cout << google::ProgramUsage();
return 0; return 0;
} }
webrtc::test::InitFieldTrialsFromString(FLAGS_force_fieldtrials); if (argc != 2) {
// Print usage information.
std::cout << usage;
return 0;
}
webrtc::test::InitFieldTrialsFromString(FLAG_force_fieldtrials);
std::string filename = argv[1]; std::string filename = argv[1];
@ -118,15 +122,15 @@ int main(int argc, char* argv[]) {
std::unique_ptr<webrtc::plotting::PlotCollection> collection( std::unique_ptr<webrtc::plotting::PlotCollection> collection(
new webrtc::plotting::PythonPlotCollection()); new webrtc::plotting::PythonPlotCollection());
if (FLAGS_plot_all || FLAGS_plot_packets) { if (FLAG_plot_all || FLAG_plot_packets) {
if (FLAGS_incoming) { if (FLAG_incoming) {
analyzer.CreatePacketGraph(webrtc::PacketDirection::kIncomingPacket, analyzer.CreatePacketGraph(webrtc::PacketDirection::kIncomingPacket,
collection->AppendNewPlot()); collection->AppendNewPlot());
analyzer.CreateAccumulatedPacketsGraph( analyzer.CreateAccumulatedPacketsGraph(
webrtc::PacketDirection::kIncomingPacket, webrtc::PacketDirection::kIncomingPacket,
collection->AppendNewPlot()); collection->AppendNewPlot());
} }
if (FLAGS_outgoing) { if (FLAG_outgoing) {
analyzer.CreatePacketGraph(webrtc::PacketDirection::kOutgoingPacket, analyzer.CreatePacketGraph(webrtc::PacketDirection::kOutgoingPacket,
collection->AppendNewPlot()); collection->AppendNewPlot());
analyzer.CreateAccumulatedPacketsGraph( analyzer.CreateAccumulatedPacketsGraph(
@ -135,95 +139,95 @@ int main(int argc, char* argv[]) {
} }
} }
if (FLAGS_plot_all || FLAGS_plot_audio_playout) { if (FLAG_plot_all || FLAG_plot_audio_playout) {
analyzer.CreatePlayoutGraph(collection->AppendNewPlot()); analyzer.CreatePlayoutGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_plot_audio_level) { if (FLAG_plot_all || FLAG_plot_audio_level) {
analyzer.CreateAudioLevelGraph(collection->AppendNewPlot()); analyzer.CreateAudioLevelGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_plot_sequence_number) { if (FLAG_plot_all || FLAG_plot_sequence_number) {
if (FLAGS_incoming) { if (FLAG_incoming) {
analyzer.CreateSequenceNumberGraph(collection->AppendNewPlot()); analyzer.CreateSequenceNumberGraph(collection->AppendNewPlot());
} }
} }
if (FLAGS_plot_all || FLAGS_plot_delay_change) { if (FLAG_plot_all || FLAG_plot_delay_change) {
if (FLAGS_incoming) { if (FLAG_incoming) {
analyzer.CreateDelayChangeGraph(collection->AppendNewPlot()); analyzer.CreateDelayChangeGraph(collection->AppendNewPlot());
} }
} }
if (FLAGS_plot_all || FLAGS_plot_accumulated_delay_change) { if (FLAG_plot_all || FLAG_plot_accumulated_delay_change) {
if (FLAGS_incoming) { if (FLAG_incoming) {
analyzer.CreateAccumulatedDelayChangeGraph(collection->AppendNewPlot()); analyzer.CreateAccumulatedDelayChangeGraph(collection->AppendNewPlot());
} }
} }
if (FLAGS_plot_all || FLAGS_plot_fraction_loss) { if (FLAG_plot_all || FLAG_plot_fraction_loss) {
analyzer.CreateFractionLossGraph(collection->AppendNewPlot()); analyzer.CreateFractionLossGraph(collection->AppendNewPlot());
analyzer.CreateIncomingPacketLossGraph(collection->AppendNewPlot()); analyzer.CreateIncomingPacketLossGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_plot_total_bitrate) { if (FLAG_plot_all || FLAG_plot_total_bitrate) {
if (FLAGS_incoming) { if (FLAG_incoming) {
analyzer.CreateTotalBitrateGraph(webrtc::PacketDirection::kIncomingPacket, analyzer.CreateTotalBitrateGraph(webrtc::PacketDirection::kIncomingPacket,
collection->AppendNewPlot()); collection->AppendNewPlot());
} }
if (FLAGS_outgoing) { if (FLAG_outgoing) {
analyzer.CreateTotalBitrateGraph(webrtc::PacketDirection::kOutgoingPacket, analyzer.CreateTotalBitrateGraph(webrtc::PacketDirection::kOutgoingPacket,
collection->AppendNewPlot()); collection->AppendNewPlot());
} }
} }
if (FLAGS_plot_all || FLAGS_plot_stream_bitrate) { if (FLAG_plot_all || FLAG_plot_stream_bitrate) {
if (FLAGS_incoming) { if (FLAG_incoming) {
analyzer.CreateStreamBitrateGraph( analyzer.CreateStreamBitrateGraph(
webrtc::PacketDirection::kIncomingPacket, webrtc::PacketDirection::kIncomingPacket,
collection->AppendNewPlot()); collection->AppendNewPlot());
} }
if (FLAGS_outgoing) { if (FLAG_outgoing) {
analyzer.CreateStreamBitrateGraph( analyzer.CreateStreamBitrateGraph(
webrtc::PacketDirection::kOutgoingPacket, webrtc::PacketDirection::kOutgoingPacket,
collection->AppendNewPlot()); collection->AppendNewPlot());
} }
} }
if (FLAGS_plot_all || FLAGS_plot_bwe) { if (FLAG_plot_all || FLAG_plot_bwe) {
analyzer.CreateBweSimulationGraph(collection->AppendNewPlot()); analyzer.CreateBweSimulationGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_plot_network_delay_feedback) { if (FLAG_plot_all || FLAG_plot_network_delay_feedback) {
analyzer.CreateNetworkDelayFeedbackGraph(collection->AppendNewPlot()); analyzer.CreateNetworkDelayFeedbackGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_plot_timestamps) { if (FLAG_plot_all || FLAG_plot_timestamps) {
analyzer.CreateTimestampGraph(collection->AppendNewPlot()); analyzer.CreateTimestampGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_audio_encoder_bitrate_bps) { if (FLAG_plot_all || FLAG_audio_encoder_bitrate_bps) {
analyzer.CreateAudioEncoderTargetBitrateGraph(collection->AppendNewPlot()); analyzer.CreateAudioEncoderTargetBitrateGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_audio_encoder_frame_length_ms) { if (FLAG_plot_all || FLAG_audio_encoder_frame_length_ms) {
analyzer.CreateAudioEncoderFrameLengthGraph(collection->AppendNewPlot()); analyzer.CreateAudioEncoderFrameLengthGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_audio_encoder_uplink_packet_loss_fraction) { if (FLAG_plot_all || FLAG_audio_encoder_uplink_packet_loss_fraction) {
analyzer.CreateAudioEncoderUplinkPacketLossFractionGraph( analyzer.CreateAudioEncoderUplinkPacketLossFractionGraph(
collection->AppendNewPlot()); collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_audio_encoder_fec) { if (FLAG_plot_all || FLAG_audio_encoder_fec) {
analyzer.CreateAudioEncoderEnableFecGraph(collection->AppendNewPlot()); analyzer.CreateAudioEncoderEnableFecGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_audio_encoder_dtx) { if (FLAG_plot_all || FLAG_audio_encoder_dtx) {
analyzer.CreateAudioEncoderEnableDtxGraph(collection->AppendNewPlot()); analyzer.CreateAudioEncoderEnableDtxGraph(collection->AppendNewPlot());
} }
if (FLAGS_plot_all || FLAGS_audio_encoder_num_channels) { if (FLAG_plot_all || FLAG_audio_encoder_num_channels) {
analyzer.CreateAudioEncoderNumChannelsGraph(collection->AppendNewPlot()); analyzer.CreateAudioEncoderNumChannelsGraph(collection->AppendNewPlot());
} }