refactor: Move from global state to functions (#53)

This commit represents a few experiments of features I've used in Cobra

1. Uses cli.GenericFlag to encapsulate parsing and validation of flag
   values at parse time. This removes the burden from the individual
   CLI commands to parse and validate args and options.

2. Add influxid.ID that may be used by any flag that requires an
   Influx ID. influxid.ID parses and validates string value is a valid
   ID, removing this burden from individual commands and ensuring valid
   values before the command actions begins.

3. Binds cli.Flags directly to params structures to directly capture
   the values when parsing flags.

4. Moves from global state to local builder functions for the majority
   of the commands. This allows the commands to bind to flag variables
   reducing the repeated ctx.String(), ctx.Int(), etc

5. Leverages the BeforeFunc to create middleware and inject the CLI and
   API client into commands, saving the repeated boilerplate across
   all of the instantiated commands. This is extensible, so additional
   middleware can be appends using the middleware.WithBeforeFns
This commit is contained in:
Stuart Carnie
2021-05-03 23:31:45 +10:00
committed by GitHub
parent 0b4d753728
commit 3414e1a983
13 changed files with 1058 additions and 569 deletions

View File

@ -11,271 +11,236 @@ import (
"github.com/influxdata/influx-cli/v2/internal/batcher"
"github.com/influxdata/influx-cli/v2/internal/linereader"
"github.com/influxdata/influx-cli/v2/internal/throttler"
"github.com/influxdata/influx-cli/v2/pkg/cli/middleware"
"github.com/urfave/cli/v2"
)
var writeFlags = append(
commonFlagsNoPrint,
&cli.StringFlag{
Name: "bucket-id",
Usage: "The ID of destination bucket",
EnvVars: []string{"INFLUX_BUCKET_ID"},
},
&cli.StringFlag{
Name: "bucket",
Usage: "The name of destination bucket",
Aliases: []string{"b"},
EnvVars: []string{"INFLUX_BUCKET_NAME"},
},
&cli.StringFlag{
Name: "org-id",
Usage: "The ID of the organization",
EnvVars: []string{"INFLUX_ORG_ID"},
},
&cli.StringFlag{
Name: "org",
Usage: "The name of the organization",
Aliases: []string{"o"},
EnvVars: []string{"INFLUX_ORG"},
},
&cli.StringFlag{
Name: "precision",
Usage: "Precision of the timestamps of the lines",
Aliases: []string{"p"},
EnvVars: []string{"INFLUX_PRECISION"},
Value: "ns",
},
&cli.StringFlag{
Name: "format",
Usage: "Input format, either 'lp' (Line Protocol) or 'csv' (Comma Separated Values)",
DefaultText: "'lp' unless '.csv' extension",
},
&cli.StringSliceFlag{
Name: "header",
Usage: "Header prepends lines to input data",
},
&cli.StringSliceFlag{
Name: "file",
Usage: "The path to the file to import",
Aliases: []string{"f"},
TakesFile: true,
},
&cli.StringSliceFlag{
Name: "url",
Usage: "The URL to import data from",
Aliases: []string{"u"},
},
&cli.BoolFlag{
Name: "debug",
Usage: "Log CSV columns to stderr before reading data rows",
},
&cli.BoolFlag{
Name: "skipRowOnError",
Usage: "Log CSV data errors to stderr and continue with CSV processing",
},
// NOTE: The old CLI allowed this flag to be used as an int _or_ a bool, with the bool form being
// short-hand for N=1. urfave/cli isn't that flexible.
&cli.IntFlag{
Name: "skipHeader",
Usage: "Skip the first <n> rows from input data",
},
&cli.IntFlag{
Name: "max-line-length",
Usage: "Specifies the maximum number of bytes that can be read for a single line",
Value: 16_000_000,
},
&cli.BoolFlag{
Name: "xIgnoreDataTypeInColumnName",
Usage: "Ignores dataType which could be specified after ':' in column name",
Hidden: true,
},
&cli.StringFlag{
Name: "encoding",
Usage: "Character encoding of input files or stdin",
Value: "UTF-8",
},
&cli.StringFlag{
Name: "errors-file",
Usage: "The path to the file to write rejected rows to",
TakesFile: true,
},
&cli.StringFlag{
Name: "rate-limit",
Usage: `Throttles write, examples: "5 MB / 5 min" , "17kBs"`,
DefaultText: "no throttling",
},
&cli.StringFlag{
Name: "compression",
Usage: "Input compression, either 'none' or 'gzip'",
DefaultText: "'none' unless an input has a '.gz' extension",
},
)
type writeParams struct {
Files cli.StringSlice
URLs cli.StringSlice
Format linereader.InputFormat
Compression linereader.InputCompression
Encoding string
var writeCmd = cli.Command{
Name: "write",
Usage: "Write points to InfluxDB",
Description: "Write data to InfluxDB via stdin, or add an entire file specified with the -f flag",
Flags: writeFlags,
Action: func(ctx *cli.Context) error {
format, err := parseFormat(ctx.String("format"))
if err != nil {
return err
}
compression, err := parseCompression(ctx.String("compression"))
if err != nil {
return err
}
precision, err := parsePrecision(ctx.String("precision"))
if err != nil {
return err
}
// CSV-specific options.
Headers cli.StringSlice
SkipRowOnError bool
SkipHeader int
IgnoreDataTypeInColumnName bool
Debug bool
var errorOut io.Writer
if ctx.IsSet("errors-file") {
errorFile, err := os.Open(ctx.String("errors-file"))
if err != nil {
return fmt.Errorf("failed to open errors-file: %w", err)
}
defer errorFile.Close()
errorOut = errorFile
}
ErrorsFile string
MaxLineLength int
RateLimit throttler.BytesPerSec
throttler, err := throttler.NewThrottler(ctx.String("rate-limit"))
if err != nil {
return err
}
cli, err := newCli(ctx)
if err != nil {
return err
}
client, err := newApiClient(ctx, cli, true)
if err != nil {
return err
}
writeClients := &internal.WriteClients{
Client: client.WriteApi,
Reader: &linereader.MultiInputLineReader{
StdIn: os.Stdin,
HttpClient: http.DefaultClient,
ErrorOut: errorOut,
Args: ctx.Args().Slice(),
Files: ctx.StringSlice("file"),
URLs: ctx.StringSlice("url"),
Format: format,
Compression: compression,
Encoding: ctx.String("encoding"),
Headers: ctx.StringSlice("header"),
SkipRowOnError: ctx.Bool("skipRowOnError"),
SkipHeader: ctx.Int("skipHeader"),
IgnoreDataTypeInColumnName: ctx.Bool("xIgnoreDataTypeInColumnName"),
Debug: ctx.Bool("debug"),
},
Throttler: throttler,
Writer: &batcher.BufferBatcher{
MaxFlushBytes: batcher.DefaultMaxBytes,
MaxFlushInterval: batcher.DefaultInterval,
MaxLineLength: ctx.Int("max-line-length"),
},
}
internal.WriteParams
}
return cli.Write(standardCtx(ctx), writeClients, &internal.WriteParams{
BucketID: ctx.String("bucket-id"),
BucketName: ctx.String("bucket"),
OrgID: ctx.String("org-id"),
OrgName: ctx.String("org"),
Precision: precision,
})
},
Subcommands: []*cli.Command{
{
Name: "dryrun",
Usage: "Write to stdout instead of InfluxDB",
Description: "Write protocol lines to stdout instead of InfluxDB. Troubleshoot conversion from CSV to line protocol",
Flags: writeFlags,
Action: func(ctx *cli.Context) error {
format, err := parseFormat(ctx.String("format"))
if err != nil {
return err
}
compression, err := parseCompression(ctx.String("compression"))
if err != nil {
return err
}
func (p *writeParams) makeLineReader(args []string, errorOut io.Writer) *linereader.MultiInputLineReader {
return &linereader.MultiInputLineReader{
StdIn: os.Stdin,
HttpClient: http.DefaultClient,
ErrorOut: errorOut,
Args: args,
Files: p.Files.Value(),
URLs: p.URLs.Value(),
Format: p.Format,
Compression: p.Compression,
Encoding: p.Encoding,
Headers: p.Headers.Value(),
SkipRowOnError: p.SkipRowOnError,
SkipHeader: p.SkipHeader,
IgnoreDataTypeInColumnName: p.IgnoreDataTypeInColumnName,
Debug: p.Debug,
}
}
var errorOut io.Writer
if ctx.IsSet("errors-file") {
errorFile, err := os.Open(ctx.String("errors-file"))
if err != nil {
return fmt.Errorf("failed to open errors-file: %w", err)
}
defer errorFile.Close()
errorOut = errorFile
}
func (p *writeParams) makeErrorFile() (*os.File, error) {
if p.ErrorsFile == "" {
return nil, nil
}
errorFile, err := os.Open(p.ErrorsFile)
if err != nil {
return nil, fmt.Errorf("failed to open errors-file: %w", err)
}
return errorFile, nil
}
cli, err := newCli(ctx)
if err != nil {
return err
}
reader := &linereader.MultiInputLineReader{
StdIn: os.Stdin,
HttpClient: http.DefaultClient,
ErrorOut: errorOut,
Args: ctx.Args().Slice(),
Files: ctx.StringSlice("file"),
URLs: ctx.StringSlice("url"),
Format: format,
Compression: compression,
Encoding: ctx.String("encoding"),
Headers: ctx.StringSlice("header"),
SkipRowOnError: ctx.Bool("skipRowOnError"),
SkipHeader: ctx.Int("skipHeader"),
IgnoreDataTypeInColumnName: ctx.Bool("xIgnoreDataTypeInColumnName"),
Debug: ctx.Bool("debug"),
}
return cli.WriteDryRun(standardCtx(ctx), reader)
},
func (p *writeParams) Flags() []cli.Flag {
return []cli.Flag{
&cli.StringFlag{
Name: "bucket-id",
Usage: "The ID of destination bucket",
EnvVars: []string{"INFLUX_BUCKET_ID"},
Destination: &p.BucketID,
},
&cli.StringFlag{
Name: "bucket",
Usage: "The name of destination bucket",
Aliases: []string{"b"},
EnvVars: []string{"INFLUX_BUCKET_NAME"},
Destination: &p.BucketName,
},
&cli.StringFlag{
Name: "org-id",
Usage: "The ID of the organization",
EnvVars: []string{"INFLUX_ORG_ID"},
Destination: &p.OrgID,
},
&cli.StringFlag{
Name: "org",
Usage: "The name of the organization",
Aliases: []string{"o"},
EnvVars: []string{"INFLUX_ORG"},
Destination: &p.OrgName,
},
&cli.GenericFlag{
Name: "precision",
Usage: "Precision of the timestamps of the lines",
Aliases: []string{"p"},
EnvVars: []string{"INFLUX_PRECISION"},
Value: &p.Precision,
},
&cli.GenericFlag{
Name: "format",
Usage: "Input format, either 'lp' (Line Protocol) or 'csv' (Comma Separated Values)",
DefaultText: "'lp' unless '.csv' extension",
Value: &p.Format,
},
&cli.StringSliceFlag{
Name: "header",
Usage: "Header prepends lines to input data",
Destination: &p.Headers,
},
&cli.StringSliceFlag{
Name: "file",
Usage: "The path to the file to import",
Aliases: []string{"f"},
TakesFile: true,
Destination: &p.Files,
},
&cli.StringSliceFlag{
Name: "url",
Usage: "The URL to import data from",
Aliases: []string{"u"},
Destination: &p.URLs,
},
&cli.BoolFlag{
Name: "debug",
Usage: "Log CSV columns to stderr before reading data rows",
Destination: &p.Debug,
},
&cli.BoolFlag{
Name: "skipRowOnError",
Usage: "Log CSV data errors to stderr and continue with CSV processing",
Destination: &p.SkipRowOnError,
},
// NOTE: The old CLI allowed this flag to be used as an int _or_ a bool, with the bool form being
// short-hand for N=1. urfave/cli isn't that flexible.
&cli.IntFlag{
Name: "skipHeader",
Usage: "Skip the first <n> rows from input data",
Destination: &p.SkipHeader,
},
&cli.IntFlag{
Name: "max-line-length",
Usage: "Specifies the maximum number of bytes that can be read for a single line",
Value: 16_000_000,
Destination: &p.MaxLineLength,
},
&cli.BoolFlag{
Name: "xIgnoreDataTypeInColumnName",
Usage: "Ignores dataType which could be specified after ':' in column name",
Hidden: true,
Destination: &p.IgnoreDataTypeInColumnName,
},
&cli.StringFlag{
Name: "encoding",
Usage: "Character encoding of input files or stdin",
Value: "UTF-8",
Destination: &p.Encoding,
},
&cli.StringFlag{
Name: "errors-file",
Usage: "The path to the file to write rejected rows to",
TakesFile: true,
Destination: &p.ErrorsFile,
},
&cli.GenericFlag{
Name: "rate-limit",
Usage: `Throttles write, examples: "5 MB / 5 min" , "17kBs"`,
DefaultText: "no throttling",
Value: &p.RateLimit,
},
&cli.GenericFlag{
Name: "compression",
Usage: "Input compression, either 'none' or 'gzip'",
DefaultText: "'none' unless an input has a '.gz' extension",
Value: &p.Compression,
},
},
}
func parseFormat(f string) (linereader.InputFormat, error) {
switch f {
case "":
return linereader.InputFormatDerived, nil
case "lp":
return linereader.InputFormatLP, nil
case "csv":
return linereader.InputFormatCSV, nil
default:
return 0, fmt.Errorf("unsupported format: %q", f)
}
}
func parseCompression(c string) (linereader.InputCompression, error) {
switch c {
case "":
return linereader.InputCompressionDerived, nil
case "none":
return linereader.InputCompressionNone, nil
case "gzip":
return linereader.InputCompressionGZIP, nil
default:
return 0, fmt.Errorf("unsupported compression: %q", c)
func newWriteCmd() *cli.Command {
params := writeParams{
WriteParams: internal.WriteParams{
Precision: api.WRITEPRECISION_NS,
},
}
return &cli.Command{
Name: "write",
Usage: "Write points to InfluxDB",
Description: "Write data to InfluxDB via stdin, or add an entire file specified with the -f flag",
Before: middleware.WithBeforeFns(withCli(), withApi(true)),
Flags: append(commonFlagsNoPrint, params.Flags()...),
Action: func(ctx *cli.Context) error {
errorFile, err := params.makeErrorFile()
if err != nil {
return err
}
defer func() { _ = errorFile.Close() }()
client := getAPI(ctx)
writeClients := &internal.WriteClients{
Client: client.WriteApi,
Reader: params.makeLineReader(ctx.Args().Slice(), errorFile),
Throttler: throttler.NewThrottler(params.RateLimit),
Writer: &batcher.BufferBatcher{
MaxFlushBytes: batcher.DefaultMaxBytes,
MaxFlushInterval: batcher.DefaultInterval,
MaxLineLength: params.MaxLineLength,
},
}
return getCLI(ctx).Write(ctx.Context, writeClients, &params.WriteParams)
},
Subcommands: []*cli.Command{
newWriteDryRun(),
},
}
}
func parsePrecision(p string) (api.WritePrecision, error) {
switch p {
case "ms":
return api.WRITEPRECISION_MS, nil
case "s":
return api.WRITEPRECISION_S, nil
case "us":
return api.WRITEPRECISION_US, nil
case "ns":
return api.WRITEPRECISION_NS, nil
default:
return "", fmt.Errorf("unsupported precision: %q", p)
func newWriteDryRun() *cli.Command {
params := writeParams{
WriteParams: internal.WriteParams{
Precision: api.WRITEPRECISION_NS,
},
}
return &cli.Command{
Name: "dryrun",
Usage: "Write to stdout instead of InfluxDB",
Description: "Write protocol lines to stdout instead of InfluxDB. Troubleshoot conversion from CSV to line protocol",
Before: middleware.WithBeforeFns(withCli(), withApi(true)),
Flags: append(commonFlagsNoPrint, params.Flags()...),
Action: func(ctx *cli.Context) error {
errorFile, err := params.makeErrorFile()
if err != nil {
return err
}
defer func() { _ = errorFile.Close() }()
return getCLI(ctx).WriteDryRun(ctx.Context, params.makeLineReader(ctx.Args().Slice(), errorFile))
},
}
}