Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .golangci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ run:

issues:
exclude-rules:
- path: _test.go
linters:
- errcheck
- path: _test.go
linters:
- errcheck
31 changes: 30 additions & 1 deletion collectors/monitoring_collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"fmt"
"log/slog"
"math"
"reflect"
"strings"
"sync"
"time"
Expand Down Expand Up @@ -399,6 +400,28 @@ func (c *MonitoringCollector) reportMonitoringMetrics(ch chan<- prometheus.Metri
return <-errChannel
}

func (c *MonitoringCollector) filterDuplicateTimeSeries(
timeSeries []*monitoring.TimeSeries,
) []*monitoring.TimeSeries {
var keptTimeSeries []*monitoring.TimeSeries

for _, timeSerie := range timeSeries {
var skip = false

for _, keptTimeSerie := range keptTimeSeries {
// Did we already find a timeSerie with the exact same key-value labels?
if reflect.DeepEqual(timeSerie.Metric.Labels, keptTimeSerie.Metric.Labels) {
skip = true
break
}
}
if !skip {
keptTimeSeries = append(keptTimeSeries, timeSerie)
}
}
return keptTimeSeries
}

func (c *MonitoringCollector) reportTimeSeriesMetrics(
page *monitoring.ListTimeSeriesResponse,
metricDescriptor *monitoring.MetricDescriptor,
Expand All @@ -408,6 +431,7 @@ func (c *MonitoringCollector) reportTimeSeriesMetrics(
var metricValue float64
var metricValueType prometheus.ValueType
var newestTSPoint *monitoring.Point
var uniqueTimeSeries []*monitoring.TimeSeries

timeSeriesMetrics, err := newTimeSeriesMetrics(metricDescriptor,
ch,
Expand All @@ -419,7 +443,12 @@ func (c *MonitoringCollector) reportTimeSeriesMetrics(
if err != nil {
return fmt.Errorf("error creating the TimeSeriesMetrics %v", err)
}
for _, timeSeries := range page.TimeSeries {

// Make sure we don't feed Prometheus duplicate time series if the
// metrics page gives us some.
uniqueTimeSeries = c.filterDuplicateTimeSeries(page.TimeSeries)

for _, timeSeries := range uniqueTimeSeries {
newestEndTime := time.Unix(0, 0)
for _, point := range timeSeries.Points {
endTime, err := time.Parse(time.RFC3339Nano, point.Interval.EndTime)
Expand Down