Skip to content

Commit

Permalink
chore: Fix linter findings for `revive:enforce-repeated-arg-type-styl…
Browse files Browse the repository at this point in the history
…e` in `plugins/outputs` and `plugins/parsers`
  • Loading branch information
zak-pawel committed Sep 5, 2024
1 parent fc2d714 commit 6cb40dc
Show file tree
Hide file tree
Showing 29 changed files with 39 additions and 78 deletions.
16 changes: 2 additions & 14 deletions plugins/outputs/application_insights/application_insights_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -389,13 +389,7 @@ func unfinished() <-chan struct{} {
return unfinished
}

func verifyAggregateTelemetry(
t *testing.T,
m telegraf.Metric,
valueField string,
countField string,
telemetry *appinsights.AggregateMetricTelemetry,
) {
func verifyAggregateTelemetry(t *testing.T, m telegraf.Metric, valueField, countField string, telemetry *appinsights.AggregateMetricTelemetry) {
verifyAggregateField := func(fieldName string, telemetryValue float64) {
metricRawFieldValue, found := m.Fields()[fieldName]
if !found {
Expand All @@ -417,13 +411,7 @@ func verifyAggregateTelemetry(
assertMapContains(t, m.Tags(), telemetry.Properties)
}

func verifySimpleTelemetry(
t *testing.T,
m telegraf.Metric,
valueField string,
expectedTelemetryName string,
telemetry *appinsights.MetricTelemetry,
) {
func verifySimpleTelemetry(t *testing.T, m telegraf.Metric, valueField, expectedTelemetryName string, telemetry *appinsights.MetricTelemetry) {
require.Equal(t, expectedTelemetryName, telemetry.Name, "Telemetry name is not what was expected")
require.InDelta(t, m.Fields()[valueField], telemetry.Value, testutil.DefaultDelta, "Telemetry value does not match metric value field")
require.Equal(t, m.Time(), telemetry.Timestamp, "Telemetry and metric timestamps do not match")
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/application_insights/transmitter.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ type Transmitter struct {
client appinsights.TelemetryClient
}

func NewTransmitter(ikey string, endpointURL string) *Transmitter {
func NewTransmitter(ikey, endpointURL string) *Transmitter {
if len(endpointURL) == 0 {
return &Transmitter{client: appinsights.NewTelemetryClient(ikey)}
}
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/azure_data_explorer/azure_data_explorer.go
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ func init() {
}

// For each table create the ingestor
func createIngestorByTable(client *kusto.Client, database string, tableName string, ingestionType string) (ingest.Ingestor, error) {
func createIngestorByTable(client *kusto.Client, database, tableName, ingestionType string) (ingest.Ingestor, error) {
switch strings.ToLower(ingestionType) {
case managedIngestion:
mi, err := ingest.NewManaged(client, database, tableName)
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/azure_monitor/azure_monitor.go
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ func (a *AzureMonitor) initHTTPClient() {
}

// vmMetadata retrieves metadata about the current Azure VM
func vmInstanceMetadata(c *http.Client) (region string, resourceID string, err error) {
func vmInstanceMetadata(c *http.Client) (region, resourceID string, err error) {
req, err := http.NewRequest("GET", vmInstanceMetadataURL, nil)
if err != nil {
return "", "", fmt.Errorf("error creating request: %w", err)
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/cloud_pubsub/cloud_pubsub_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@ func verifyRawMetricPublished(t *testing.T, m telegraf.Metric, published map[str
return verifyMetricPublished(t, m, published, false, false)
}

func verifyMetricPublished(t *testing.T, m telegraf.Metric, published map[string]*pubsub.Message, base64Encoded bool, gzipEncoded bool) *pubsub.Message {
func verifyMetricPublished(t *testing.T, m telegraf.Metric, published map[string]*pubsub.Message, base64Encoded, gzipEncoded bool) *pubsub.Message {
p := influx.Parser{}
require.NoError(t, p.Init())

Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/cloudwatch/cloudwatch.go
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,7 @@ func PartitionDatums(size int, datums []types.MetricDatum) [][]types.MetricDatum
// BuildMetricDatum makes a MetricDatum from telegraf.Metric. It would check if all required fields of
// cloudwatch.StatisticSet are available. If so, it would build MetricDatum from statistic values.
// Otherwise, fields would still been built independently.
func BuildMetricDatum(buildStatistic bool, highResolutionMetrics bool, point telegraf.Metric) []types.MetricDatum {
func BuildMetricDatum(buildStatistic, highResolutionMetrics bool, point telegraf.Metric) []types.MetricDatum {
fields := make(map[string]cloudwatchField)
tags := point.Tags()
storageResolution := int64(60)
Expand Down
4 changes: 2 additions & 2 deletions plugins/outputs/cratedb/cratedb.go
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ func (c *CrateDB) Write(metrics []telegraf.Metric) error {
return nil
}

func insertSQL(table string, keyReplacement string, metrics []telegraf.Metric) (string, error) {
func insertSQL(table, keyReplacement string, metrics []telegraf.Metric) (string, error) {
rows := make([]string, 0, len(metrics))
for _, m := range metrics {
cols := []interface{}{
Expand Down Expand Up @@ -213,7 +213,7 @@ func escapeObject(m map[string]interface{}, keyReplacement string) (string, erro

// escapeString wraps s in the given quote string and replaces all occurrences
// of it inside of s with a double quote.
func escapeString(s string, quote string) string {
func escapeString(s, quote string) string {
return quote + strings.ReplaceAll(s, quote, quote+quote) + quote
}

Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/datadog/datadog.go
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ func verifyValue(v interface{}) bool {
return true
}

func isRateable(statsDMetricType string, fieldName string) bool {
func isRateable(statsDMetricType, fieldName string) bool {
switch statsDMetricType {
case
"counter":
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/exec/exec.go
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ type CommandRunner struct {
}

// Run runs the command.
func (c *CommandRunner) Run(timeout time.Duration, command []string, environments []string, buffer io.Reader) error {
func (c *CommandRunner) Run(timeout time.Duration, command, environments []string, buffer io.Reader) error {
cmd := exec.Command(command[0], command[1:]...)
if len(environments) > 0 {
cmd.Env = append(os.Environ(), environments...)
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/exec/exec_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ type MockRunner struct {
}

// Run runs the command.
func (c *MockRunner) Run(_ time.Duration, _ []string, _ []string, buffer io.Reader) error {
func (c *MockRunner) Run(_ time.Duration, _, _ []string, buffer io.Reader) error {
parser := influxParser.NewStreamParser(buffer)
numMetrics := 0

Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/graylog/graylog.go
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ func (g *gelfUDP) Close() (err error) {
return err
}

func (g *gelfUDP) createChunkedMessage(index int, chunkCountInt int, id []byte, compressed *bytes.Buffer) (bytes.Buffer, error) {
func (g *gelfUDP) createChunkedMessage(index, chunkCountInt int, id []byte, compressed *bytes.Buffer) (bytes.Buffer, error) {
var packet bytes.Buffer

chunksize := g.getChunksize()
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/librato/librato.go
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ func (l *Librato) Write(metrics []telegraf.Metric) error {
return nil
}

func (l *Librato) writeBatch(start int, sizeBatch int, metricCounter int, tempGauges []*Gauge) error {
func (l *Librato) writeBatch(start, sizeBatch, metricCounter int, tempGauges []*Gauge) error {
lmetrics := LMetrics{}
end := start + sizeBatch
if end > metricCounter {
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/mqtt/topic_name_generator.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ type TopicNameGenerator struct {
template *template.Template
}

func NewTopicNameGenerator(topicPrefix string, topic string) (*TopicNameGenerator, error) {
func NewTopicNameGenerator(topicPrefix, topic string) (*TopicNameGenerator, error) {
tt, err := template.New("topic_name").Parse(topic)
if err != nil {
return nil, err
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/parquet/parquet.go
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ func (p *Parquet) createSchema(metrics []telegraf.Metric) (*arrow.Schema, error)
return arrow.NewSchema(fields, nil), nil
}

func (p *Parquet) createWriter(name string, filename string, schema *arrow.Schema) (*pqarrow.FileWriter, error) {
func (p *Parquet) createWriter(name, filename string, schema *arrow.Schema) (*pqarrow.FileWriter, error) {
if _, err := os.Stat(filename); err == nil {
now := time.Now()
rotatedFilename := fmt.Sprintf("%s/%s-%s-%s.parquet", p.Directory, name, now.Format("2006-01-02"), strconv.FormatInt(now.Unix(), 10))
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/postgresql/sqltemplate/template.go
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ func (t *Template) UnmarshalText(text []byte) error {
return nil
}

func (t *Template) Render(table *Table, newColumns []utils.Column, metricTable *Table, tagTable *Table) ([]byte, error) {
func (t *Template) Render(table *Table, newColumns []utils.Column, metricTable, tagTable *Table) ([]byte, error) {
tcs := NewColumns(newColumns).Sorted()
data := map[string]interface{}{
"table": table,
Expand Down
9 changes: 3 additions & 6 deletions plugins/outputs/postgresql/table_manager.go
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,8 @@ func (tm *TableManager) EnsureStructure(
db dbh,
tbl *tableState,
columns []utils.Column,
createTemplates []*sqltemplate.Template,
addColumnsTemplates []*sqltemplate.Template,
metricsTable *tableState,
tagsTable *tableState,
createTemplates, addColumnsTemplates []*sqltemplate.Template,
metricsTable, tagsTable *tableState,
) ([]utils.Column, error) {
// Sort so that:
// * When we create/alter the table the columns are in a sane order (telegraf gives us the fields in random order)
Expand Down Expand Up @@ -353,8 +351,7 @@ func (tm *TableManager) update(ctx context.Context,
state *tableState,
tmpls []*sqltemplate.Template,
missingCols []utils.Column,
metricsTable *tableState,
tagsTable *tableState,
metricsTable, tagsTable *tableState,
) error {
tmplTable := sqltemplate.NewTable(tm.Schema, state.name, colMapToSlice(state.columns))
metricsTmplTable := sqltemplate.NewTable(tm.Schema, metricsTable.name, colMapToSlice(metricsTable.columns))
Expand Down
8 changes: 1 addition & 7 deletions plugins/outputs/prometheus_client/v1/collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,7 @@ type Collector struct {
expireTicker *time.Ticker
}

func NewCollector(
expire time.Duration,
stringsAsLabel bool,
exportTimestamp bool,
typeMapping serializer.MetricTypes,
logger telegraf.Logger,
) *Collector {
func NewCollector(expire time.Duration, stringsAsLabel, exportTimestamp bool, typeMapping serializer.MetricTypes, logger telegraf.Logger) *Collector {
c := &Collector{
ExpirationInterval: expire,
StringAsLabel: stringsAsLabel,
Expand Down
7 changes: 1 addition & 6 deletions plugins/outputs/prometheus_client/v2/collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,7 @@ type Collector struct {
coll *serializer.Collection
}

func NewCollector(
expire time.Duration,
stringsAsLabel bool,
exportTimestamp bool,
typeMapping serializer.MetricTypes,
) *Collector {
func NewCollector(expire time.Duration, stringsAsLabel, exportTimestamp bool, typeMapping serializer.MetricTypes) *Collector {
cfg := serializer.FormatConfig{
StringAsLabel: stringsAsLabel,
ExportTimestamp: exportTimestamp,
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/riemann/riemann.go
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ func (r *Riemann) attributes(name string, tags map[string]string) map[string]str
return tags
}

func (r *Riemann) service(name string, field string) string {
func (r *Riemann) service(name, field string) string {
var serviceStrings []string

// if measurement is not enabled as an attribute then prepend it to service name
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/signalfx/signalfx.go
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ func (s *SignalFx) isEventIncluded(name string) bool {
}

// getMetricName combines telegraf fields and tags into a full metric name
func getMetricName(metric string, field string) string {
func getMetricName(metric, field string) string {
name := metric

// Include field in metric name when it adds to the metric name
Expand Down
6 changes: 1 addition & 5 deletions plugins/outputs/stackdriver/stackdriver.go
Original file line number Diff line number Diff line change
Expand Up @@ -474,11 +474,7 @@ func getStackdriverIntervalEndpoints(
return startTime, endTime
}

func getStackdriverTimeInterval(
m metricpb.MetricDescriptor_MetricKind,
startTime *timestamppb.Timestamp,
endTime *timestamppb.Timestamp,
) (*monitoringpb.TimeInterval, error) {
func getStackdriverTimeInterval(m metricpb.MetricDescriptor_MetricKind, startTime, endTime *timestamppb.Timestamp) (*monitoringpb.TimeInterval, error) {
switch m {
case metricpb.MetricDescriptor_GAUGE:
return &monitoringpb.TimeInterval{
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/syslog/syslog_mapper.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ func (sm *SyslogMapper) mapStructuredData(metric telegraf.Metric, msg *rfc5424.S
}
}

func (sm *SyslogMapper) mapStructuredDataItem(key string, value string, msg *rfc5424.SyslogMessage) {
func (sm *SyslogMapper) mapStructuredDataItem(key, value string, msg *rfc5424.SyslogMessage) {
if sm.reservedKeys[key] {
return
}
Expand Down
2 changes: 1 addition & 1 deletion plugins/outputs/timestream/timestream_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -452,7 +452,7 @@ func TestBuildMultiMeasuresInSingleAndMultiTableMode(t *testing.T) {
"will contain request: %+v\n\n", result, expectedResultSingleTable)
}

func buildExpectedMultiRecords(multiMeasureName string, tableName string) *timestreamwrite.WriteRecordsInput {
func buildExpectedMultiRecords(multiMeasureName, tableName string) *timestreamwrite.WriteRecordsInput {
var recordsMultiTableMode []types.Record
recordDouble := buildMultiRecords([]SimpleInput{
{
Expand Down
6 changes: 1 addition & 5 deletions plugins/outputs/zabbix/autoregister_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -112,11 +112,7 @@ func (m *mockZabbixSender) Send(packet *zabbix.Packet) (res zabbix.Response, err
return zabbix.Response{}, nil
}

func (m *mockZabbixSender) SendMetrics(metrics []*zabbix.Metric) (
resActive zabbix.Response,
resTrapper zabbix.Response,
err error,
) {
func (m *mockZabbixSender) SendMetrics(metrics []*zabbix.Metric) (resActive, resTrapper zabbix.Response, err error) {
m.sendMetrics = append(m.sendMetrics, metrics...)
return zabbix.Response{}, zabbix.Response{}, nil
}
2 changes: 1 addition & 1 deletion plugins/outputs/zabbix/zabbix_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -760,7 +760,7 @@ func TestAutoRegister(t *testing.T) {
// compareData compares generated data with expected data ignoring slice order if all Clocks are the same.
// This is useful for metrics with several fields that should produce several Zabbix values that
// could not be sorted by clock
func compareData(t *testing.T, expected []zabbixRequestData, data []zabbixRequestData) {
func compareData(t *testing.T, expected, data []zabbixRequestData) {
t.Helper()

var clock int64
Expand Down
2 changes: 1 addition & 1 deletion plugins/parsers/avro/schema_registry.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ type schemaRegistry struct {

const schemaByID = "%s/schemas/ids/%d"

func newSchemaRegistry(addr string, caCertPath string) (*schemaRegistry, error) {
func newSchemaRegistry(addr, caCertPath string) (*schemaRegistry, error) {
var client *http.Client
var tlsCfg *tls.Config
if caCertPath != "" {
Expand Down
4 changes: 2 additions & 2 deletions plugins/parsers/influx/escape.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,13 @@ func stringFieldUnescape(b []byte) string {
}

// parseIntBytes is a zero-alloc wrapper around strconv.ParseInt.
func parseIntBytes(b []byte, base int, bitSize int) (i int64, err error) {
func parseIntBytes(b []byte, base, bitSize int) (i int64, err error) {
s := unsafeBytesToString(b)
return strconv.ParseInt(s, base, bitSize)
}

// parseUintBytes is a zero-alloc wrapper around strconv.ParseUint.
func parseUintBytes(b []byte, base int, bitSize int) (i uint64, err error) {
func parseUintBytes(b []byte, base, bitSize int) (i uint64, err error) {
s := unsafeBytesToString(b)
return strconv.ParseUint(s, base, bitSize)
}
Expand Down
12 changes: 6 additions & 6 deletions plugins/parsers/influx/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,14 +51,14 @@ func (h *MetricHandler) SetMeasurement(name []byte) error {
return nil
}

func (h *MetricHandler) AddTag(key []byte, value []byte) error {
func (h *MetricHandler) AddTag(key, value []byte) error {
tk := unescape(key)
tv := unescape(value)
h.metric.AddTag(tk, tv)
return nil
}

func (h *MetricHandler) AddInt(key []byte, value []byte) error {
func (h *MetricHandler) AddInt(key, value []byte) error {
fk := unescape(key)
fv, err := parseIntBytes(bytes.TrimSuffix(value, []byte("i")), 10, 64)
if err != nil {
Expand All @@ -72,7 +72,7 @@ func (h *MetricHandler) AddInt(key []byte, value []byte) error {
return nil
}

func (h *MetricHandler) AddUint(key []byte, value []byte) error {
func (h *MetricHandler) AddUint(key, value []byte) error {
fk := unescape(key)
fv, err := parseUintBytes(bytes.TrimSuffix(value, []byte("u")), 10, 64)
if err != nil {
Expand All @@ -86,7 +86,7 @@ func (h *MetricHandler) AddUint(key []byte, value []byte) error {
return nil
}

func (h *MetricHandler) AddFloat(key []byte, value []byte) error {
func (h *MetricHandler) AddFloat(key, value []byte) error {
fk := unescape(key)
fv, err := parseFloatBytes(value, 64)
if err != nil {
Expand All @@ -100,14 +100,14 @@ func (h *MetricHandler) AddFloat(key []byte, value []byte) error {
return nil
}

func (h *MetricHandler) AddString(key []byte, value []byte) error {
func (h *MetricHandler) AddString(key, value []byte) error {
fk := unescape(key)
fv := stringFieldUnescape(value)
h.metric.AddField(fk, fv)
return nil
}

func (h *MetricHandler) AddBool(key []byte, value []byte) error {
func (h *MetricHandler) AddBool(key, value []byte) error {
fk := unescape(key)
fv, err := parseBoolBytes(value)
if err != nil {
Expand Down
7 changes: 1 addition & 6 deletions plugins/parsers/json/json_flattener.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,7 @@ func (f *JSONFlattener) FlattenJSON(
}

// FullFlattenJSON flattens nested maps/interfaces into a fields map (including bools and string)
func (f *JSONFlattener) FullFlattenJSON(
fieldName string,
v interface{},
convertString bool,
convertBool bool,
) error {
func (f *JSONFlattener) FullFlattenJSON(fieldName string, v interface{}, convertString, convertBool bool) error {
if f.Fields == nil {
f.Fields = make(map[string]interface{})
}
Expand Down

0 comments on commit 6cb40dc

Please sign in to comment.