Skip to content

Commit

Permalink
Check duplicates across entire textfile set
Browse files Browse the repository at this point in the history
Check all textfile metrics will be checked for duplicates. If duplicates
are detected, drop all metrics and log error.

Signed-off-by: Ben Reedy <[email protected]>
  • Loading branch information
breed808 committed May 17, 2021
1 parent 4293497 commit 5072879
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 13 deletions.
24 changes: 15 additions & 9 deletions collector/textfile.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,9 @@ func NewTextFileCollector() (Collector, error) {
}, nil
}

// Given a metric family, determine if any two entries are duplicates.
// Given a slice of metric families, determine if any two entries are duplicates.
// Duplicates will be detected where the metric name, labels and label values are identical.
func duplicateMetricEntry(metricFamilies map[string]*dto.MetricFamily) bool {
func duplicateMetricEntry(metricFamilies []*dto.MetricFamily) bool {
uniqueMetrics := make(map[string]map[string]string)
for _, metricFamily := range metricFamilies {
metric_name := *metricFamily.Name
Expand Down Expand Up @@ -249,6 +249,10 @@ func (c *textFileCollector) Collect(ctx *ScrapeContext, ch chan<- prometheus.Met
error = 1.0
}

// Create empty metricFamily slice here and append parsedFamilies to it inside the loop.
// Once loop is complete, raise error if any duplicates are present.
// This will ensure that duplicate metrics are correctly detected between multiple .prom files.
var metricFamilies = []*dto.MetricFamily{}
fileLoop:
for _, f := range files {
if !strings.HasSuffix(f.Name(), ".prom") {
Expand Down Expand Up @@ -297,19 +301,21 @@ fileLoop:
// a failure does not appear fresh.
mtimes[f.Name()] = f.ModTime()

if duplicateMetricEntry(parsedFamilies) {
log.Errorf("Duplicate metrics detected in file: %q", path)
error = 1.0
continue
for _, metricFamily := range parsedFamilies {
metricFamilies = append(metricFamilies, metricFamily)
}
}

for _, mf := range parsedFamilies {
if duplicateMetricEntry(metricFamilies) {
log.Errorf("Duplicate metrics detected in files")
error = 1.0
} else {
for _, mf := range metricFamilies {
convertMetricFamily(mf, ch)
c.exportMTimes(mtimes, ch)
}
}

c.exportMTimes(mtimes, ch)

// Export if there were errors.
ch <- prometheus.MustNewConstMetric(
prometheus.NewDesc(
Expand Down
11 changes: 7 additions & 4 deletions collector/textfile_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ func TestDuplicateMetricEntry(t *testing.T) {
Metric: []*dto.Metric{&metric1, &metric2},
}

duplicateFamily := make(map[string]*dto.MetricFamily)
duplicateFamily["test"] = &duplicate
duplicateFamily := []*dto.MetricFamily{}
duplicateFamily = append(duplicateFamily, &duplicate)

// Ensure detection for duplicate metrics
if !duplicateMetricEntry(duplicateFamily) {
Expand All @@ -118,7 +118,9 @@ func TestDuplicateMetricEntry(t *testing.T) {
Type: &metric_type,
Metric: []*dto.Metric{&metric1, &metric3},
}
duplicateFamily["test"] = &differentLabels

duplicateFamily = []*dto.MetricFamily{}
duplicateFamily = append(duplicateFamily, &differentLabels)

// Additional label on second metric should not be cause for duplicate detection
if duplicateMetricEntry(duplicateFamily) {
Expand All @@ -142,7 +144,8 @@ func TestDuplicateMetricEntry(t *testing.T) {
Type: &metric_type,
Metric: []*dto.Metric{&metric3, &metric4},
}
duplicateFamily["test"] = &differentValues
duplicateFamily = []*dto.MetricFamily{}
duplicateFamily = append(duplicateFamily, &differentValues)

// Additional label with different values metric should not be cause for duplicate detection
if duplicateMetricEntry(duplicateFamily) {
Expand Down

0 comments on commit 5072879

Please sign in to comment.