Skip to content

Commit

Permalink
textfile: set windows_exporter_collector_success to 0, if an errors o…
Browse files Browse the repository at this point in the history
…ccurs (#1775)

Signed-off-by: Jan-Otto Kröpke <[email protected]>
  • Loading branch information
jkroepke authored Nov 26, 2024
1 parent ca04ad8 commit 40b6f53
Show file tree
Hide file tree
Showing 3 changed files with 49 additions and 62 deletions.
3 changes: 1 addition & 2 deletions internal/collector/net/net.go
Original file line number Diff line number Diff line change
Expand Up @@ -304,8 +304,7 @@ func (c *Collector) collect(ch chan<- prometheus.Metric) error {
}

for nicName, nicData := range data {
if c.config.NicExclude.MatchString(nicName) ||
!c.config.NicInclude.MatchString(nicName) {
if c.config.NicExclude.MatchString(nicName) || !c.config.NicInclude.MatchString(nicName) {
continue
}

Expand Down
48 changes: 23 additions & 25 deletions internal/collector/textfile/textfile.go
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ type Collector struct {
// Only set for testing to get predictable output.
mTime *float64

mTimeDesc *prometheus.Desc
modTimeDesc *prometheus.Desc
}

func New(config *Config) *Collector {
Expand Down Expand Up @@ -105,9 +105,9 @@ func (c *Collector) Close() error {
func (c *Collector) Build(logger *slog.Logger, _ *mi.Session) error {
c.logger = logger.With(slog.String("collector", Name))

c.logger.Info("textfile Collector directories: " + strings.Join(c.config.TextFileDirectories, ","))
c.logger.Info("textfile directories: " + strings.Join(c.config.TextFileDirectories, ","))

c.mTimeDesc = prometheus.NewDesc(
c.modTimeDesc = prometheus.NewDesc(
prometheus.BuildFQName(types.Namespace, "textfile", "mtime_seconds"),
"Unixtime mtime of textfiles successfully read.",
[]string{"file"},
Expand Down Expand Up @@ -165,7 +165,7 @@ func (c *Collector) convertMetricFamily(logger *slog.Logger, metricFamily *dto.M

for _, metric := range metricFamily.GetMetric() {
if metric.TimestampMs != nil {
logger.Warn(fmt.Sprintf("Ignoring unsupported custom timestamp on textfile Collector metric %v", metric))
logger.Warn(fmt.Sprintf("Ignoring unsupported custom timestamp on textfile metric %v", metric))
}

labels := metric.GetLabel()
Expand Down Expand Up @@ -259,23 +259,24 @@ func (c *Collector) convertMetricFamily(logger *slog.Logger, metricFamily *dto.M
}
}

func (c *Collector) exportMTimes(mTimes map[string]time.Time, ch chan<- prometheus.Metric) {
func (c *Collector) exportMTimes(modTimes map[string]time.Time, ch chan<- prometheus.Metric) {
// Export the mtimes of the successful files.
if len(mTimes) > 0 {
if len(modTimes) > 0 {
// Sorting is needed for predictable output comparison in tests.
filenames := make([]string, 0, len(mTimes))
for filename := range mTimes {
filenames := make([]string, 0, len(modTimes))
for filename := range modTimes {
filenames = append(filenames, filename)
}

sort.Strings(filenames)

for _, filename := range filenames {
mtime := float64(mTimes[filename].UnixNano() / 1e9)
modTime := float64(modTimes[filename].UnixNano() / 1e9)
if c.mTime != nil {
mtime = *c.mTime
modTime = *c.mTime
}
ch <- prometheus.MustNewConstMetric(c.mTimeDesc, prometheus.GaugeValue, mtime, filename)

ch <- prometheus.MustNewConstMetric(c.modTimeDesc, prometheus.GaugeValue, modTime, filename)
}
}
}
Expand Down Expand Up @@ -314,6 +315,8 @@ func (c *Collector) Collect(ch chan<- prometheus.Metric) error {
// This will ensure that duplicate metrics are correctly detected between multiple .prom files.
var metricFamilies []*dto.MetricFamily

errs := make([]error, 0)

// Iterate over files and accumulate their metrics.
for _, directory := range c.config.TextFileDirectories {
err := filepath.WalkDir(directory, func(path string, dirEntry os.DirEntry, err error) error {
Expand All @@ -326,24 +329,20 @@ func (c *Collector) Collect(ch chan<- prometheus.Metric) error {

families_array, err := scrapeFile(path, c.logger)
if err != nil {
c.logger.Error(fmt.Sprintf("Error scraping file: %q. Skip File.", path),
slog.Any("err", err),
)
errs = append(errs, fmt.Errorf("error scraping file %q: %w", path, err))

return nil
}

fileInfo, err := os.Stat(path)
if err != nil {
c.logger.Error(fmt.Sprintf("Error reading file info: %q. Skip File.", path),
slog.Any("err", err),
)
errs = append(errs, fmt.Errorf("error reading file info %q: %w", path, err))

return nil
}

if _, hasName := mTimes[fileInfo.Name()]; hasName {
c.logger.Error(fmt.Sprintf("Duplicate filename detected: %q. Skip File.", path))
errs = append(errs, fmt.Errorf("duplicate filename detected: %q", path))

return nil
}
Expand All @@ -355,25 +354,24 @@ func (c *Collector) Collect(ch chan<- prometheus.Metric) error {

return nil
})

if err != nil && directory != "" {
c.logger.Error("Error reading textfile Collector directory: "+directory,
slog.Any("err", err),
)
errs = append(errs, fmt.Errorf("error reading textfile directory %q: %w", directory, err))
}
}

c.exportMTimes(mTimes, ch)

// If duplicates are detected across *multiple* files, return error.
if duplicateMetricEntry(metricFamilies) {
c.logger.Error("Duplicate metrics detected across multiple files")
c.logger.Warn("duplicate metrics detected across multiple files")
} else {
for _, mf := range metricFamilies {
c.convertMetricFamily(c.logger, mf, ch)
}
}

c.exportMTimes(mTimes, ch)

return nil
return errors.Join(errs...)
}

func scrapeFile(path string, logger *slog.Logger) ([]*dto.MetricFamily, error) {
Expand Down
60 changes: 25 additions & 35 deletions internal/collector/textfile/textfile_test_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import (
"github.com/prometheus-community/windows_exporter/pkg/collector"
"github.com/prometheus/client_golang/prometheus"
dto "github.com/prometheus/client_model/go"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

Expand All @@ -48,30 +49,26 @@ func TestMultipleDirectories(t *testing.T) {
metrics := make(chan prometheus.Metric)
got := ""

errCh := make(chan error, 1)
go func() {
for {
var metric dto.Metric
errCh <- textFileCollector.Collect(metrics)

val := <-metrics
close(metrics)
}()

err := val.Write(&metric)
if err != nil {
t.Errorf("Unexpected error %s", err)
}
for val := range metrics {
var metric dto.Metric

got += metric.String()
}
}()
err := val.Write(&metric)
require.NoError(t, err)

err := textFileCollector.Collect(metrics)
if err != nil {
t.Errorf("Unexpected error %s", err)
got += metric.String()
}

require.NoError(t, <-errCh)

for _, f := range []string{"dir1", "dir2", "dir3", "dir3sub"} {
if !strings.Contains(got, f) {
t.Errorf("Unexpected output %s: %q", f, got)
}
assert.Contains(t, got, f)
}
}

Expand All @@ -89,31 +86,24 @@ func TestDuplicateFileName(t *testing.T) {
metrics := make(chan prometheus.Metric)
got := ""

errCh := make(chan error, 1)
go func() {
for {
var metric dto.Metric
errCh <- textFileCollector.Collect(metrics)

val := <-metrics
close(metrics)
}()

err := val.Write(&metric)
if err != nil {
t.Errorf("Unexpected error %s", err)
}
for val := range metrics {
var metric dto.Metric

got += metric.String()
}
}()
err := val.Write(&metric)
require.NoError(t, err)

err := textFileCollector.Collect(metrics)
if err != nil {
t.Errorf("Unexpected error %s", err)
got += metric.String()
}

if !strings.Contains(got, "file") {
t.Errorf("Unexpected output %q", got)
}
require.ErrorContains(t, <-errCh, "duplicate filename detected")

if strings.Contains(got, "sub_file") {
t.Errorf("Unexpected output %q", got)
}
assert.Contains(t, got, "file")
assert.NotContains(t, got, "sub_file")
}

0 comments on commit 40b6f53

Please sign in to comment.