Skip to content

Commit

Permalink
fix: consume lag metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
adityathebe committed Dec 5, 2024
1 parent 6fe18a3 commit 218ff33
Showing 1 changed file with 7 additions and 2 deletions.
9 changes: 7 additions & 2 deletions scrapers/incremental.go
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@ func ConsumeKubernetesWatchJobFunc(sc api.ScrapeContext, config v1.Kubernetes, q
return fmt.Errorf("unexpected item in the priority queue: %T", val)
}
obj := queueItem.Obj
queuedTime[string(obj.GetUID())] = queueItem.Timestamp

if queueItem.Operation == kubernetes.QueueItemOperationDelete {
deletedObjects = append(deletedObjects, string(obj.GetUID()))
Expand All @@ -100,7 +101,6 @@ func ConsumeKubernetesWatchJobFunc(sc api.ScrapeContext, config v1.Kubernetes, q
seenObjects[string(obj.GetUID())] = struct{}{}
}

queuedTime[string(obj.GetUID())] = queueItem.Timestamp
objs = append(objs, obj)
}

Expand Down Expand Up @@ -137,7 +137,12 @@ func ConsumeKubernetesWatchJobFunc(sc api.ScrapeContext, config v1.Kubernetes, q
}

for _, obj := range objs {
lag := time.Since(queuedTime[string(obj.GetUID())])
queuedtime, ok := queuedTime[string(obj.GetUID())]
if !ok {
continue // involved objects have 0 queuedtime as they never enter the queue
}

lag := time.Since(queuedtime)
ctx.Histogram("informer_consume_lag", consumeLagBuckets, "scraper", sc.ScraperID(), "kind", obj.GetKind()).
Record(time.Duration(lag.Milliseconds()))
}
Expand Down

0 comments on commit 218ff33

Please sign in to comment.