From 93048ed4781d6a88dc7b8ebc5a9e65fbe598cbcc Mon Sep 17 00:00:00 2001 From: Cristian Greco Date: Sun, 22 Sep 2024 19:11:15 +0200 Subject: [PATCH 1/4] Reimplement PrometheusMetric to use slices for label pairs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This refactoring stems from an attempt to optimise memory usage in BuildMetrics and createPrometheusLabels, where labels are copied across various maps. The new PrometheusMetric uses slices to store label pairs and is implemented to guarantee that labels are always sorted by key. The rationale is that slices _might_ be more memory efficient than maps for large preallocation sizes. Moreover, the fact that label keys are promptly available (no need to iterate over the map) comes handy in a bunch of places where we save additional allocations. Lastly, while we spend cycles to do explicit sorting in yace now, it should save us some comparisons when prometheus sorts labels internally. The refactoring also comes with a reimplementation of signature for labels, since the prometheus models only work with maps. I've added a bunch of benchmarks of specific methods. They show that sometimes the change is noticeable, sometimes it's not (but the overall impact is hard to judge in synthetic benchs due to the variety of input one can get at runtime fromcoming from large aws responses). Benchmark_EnsureLabelConsistencyAndRemoveDuplicates: ``` │ before.txt │ after.txt │ │ sec/op │ sec/op vs base │ _EnsureLabelConsistencyAndRemoveDuplicates-12 14.203µ ± 2% 9.115µ ± 1% -35.82% (p=0.000 n=10) │ before.txt │ after.txt │ │ B/op │ B/op vs base │ _EnsureLabelConsistencyAndRemoveDuplicates-12 448.0 ± 0% 256.0 ± 0% -42.86% (p=0.000 n=10) │ before.txt │ after.txt │ │ allocs/op │ allocs/op vs base │ _EnsureLabelConsistencyAndRemoveDuplicates-12 17.000 ± 0% 9.000 ± 0% -47.06% (p=0.000 n=10) ``` Benchmark_createPrometheusLabels: ``` │ before.txt │ after.txt │ │ sec/op │ sec/op vs base │ _createPrometheusLabels-12 41.86m ± 5% 41.40m ± 9% ~ (p=0.481 n=10) │ before.txt │ after.txt │ │ B/op │ B/op vs base │ _createPrometheusLabels-12 2.867Mi ± 0% 1.531Mi ± 0% -46.59% (p=0.000 n=10) │ before.txt │ after.txt │ │ allocs/op │ allocs/op vs base │ _createPrometheusLabels-12 40.00k ± 0% 40.00k ± 0% -0.00% (p=0.000 n=10) ``` Benchmark_BuildMetrics: ``` │ before.txt │ after.txt │ │ sec/op │ sec/op vs base │ _BuildMetrics-12 110.4µ ± 1% 114.1µ ± 1% +3.35% (p=0.000 n=10) │ before.txt │ after.txt │ │ B/op │ B/op vs base │ _BuildMetrics-12 4.344Ki ± 0% 3.797Ki ± 0% -12.59% (p=0.000 n=10) │ before.txt │ after.txt │ │ allocs/op │ allocs/op vs base │ _BuildMetrics-12 95.00 ± 0% 99.00 ± 0% +4.21% (p=0.000 n=10) ``` Benchmark_NewPrometheusCollector: ``` │ before.txt │ after.txt │ │ sec/op │ sec/op vs base │ _NewPrometheusCollector-12 154.8µ ± 1% 143.5µ ± 1% -7.26% (p=0.000 n=10) │ before.txt │ after.txt │ │ B/op │ B/op vs base │ _NewPrometheusCollector-12 4.516Ki ± 0% 4.281Ki ± 0% -5.19% (p=0.000 n=10) │ before.txt │ after.txt │ │ allocs/op │ allocs/op vs base │ _NewPrometheusCollector-12 142.0 ± 0% 127.0 ± 0% -10.56% (p=0.000 n=10) ``` --- go.mod | 3 +- go.sum | 2 - pkg/promutil/migrate.go | 108 ++--- pkg/promutil/migrate_test.go | 737 ++++++++++++++++++-------------- pkg/promutil/prometheus.go | 137 +++++- pkg/promutil/prometheus_test.go | 91 ++-- 6 files changed, 637 insertions(+), 441 deletions(-) diff --git a/go.mod b/go.mod index d618ac3c..d42d16af 100644 --- a/go.mod +++ b/go.mod @@ -20,6 +20,7 @@ require ( github.com/aws/aws-sdk-go-v2/service/storagegateway v1.33.1 github.com/aws/aws-sdk-go-v2/service/sts v1.31.1 github.com/aws/smithy-go v1.21.0 + github.com/cespare/xxhash/v2 v2.3.0 github.com/go-kit/log v0.2.1 github.com/grafana/regexp v0.0.0-20221123153739-15dc172cd2db github.com/prometheus/client_golang v1.20.4 @@ -28,7 +29,6 @@ require ( github.com/r3labs/diff/v3 v3.0.1 github.com/stretchr/testify v1.9.0 github.com/urfave/cli/v2 v2.27.4 - golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 golang.org/x/sync v0.8.0 gopkg.in/yaml.v2 v2.4.0 ) @@ -43,7 +43,6 @@ require ( github.com/aws/aws-sdk-go-v2/service/sso v1.23.1 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.27.1 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.4 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/go-logfmt/logfmt v0.5.1 // indirect diff --git a/go.sum b/go.sum index 9d04bc47..d46ae807 100644 --- a/go.sum +++ b/go.sum @@ -108,8 +108,6 @@ github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAh github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4= github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM= -golang.org/x/exp v0.0.0-20240823005443-9b4947da3948 h1:kx6Ds3MlpiUHKj7syVnbp57++8WpuKPcR5yjLBjvLEA= -golang.org/x/exp v0.0.0-20240823005443-9b4947da3948/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.23.0 h1:YfKFowiIMvtgl1UERQoTPPToxltDeZfbj4H7dVUCwmM= diff --git a/pkg/promutil/migrate.go b/pkg/promutil/migrate.go index 0db1dc84..b0a3f283 100644 --- a/pkg/promutil/migrate.go +++ b/pkg/promutil/migrate.go @@ -2,14 +2,13 @@ package promutil import ( "fmt" - "maps" "math" "sort" + "strconv" "strings" "time" "github.com/grafana/regexp" - prom_model "github.com/prometheus/common/model" "github.com/nerdswords/yet-another-cloudwatch-exporter/pkg/logging" "github.com/nerdswords/yet-another-cloudwatch-exporter/pkg/model" @@ -46,13 +45,16 @@ func BuildMetricName(namespace, metricName, statistic string) string { func BuildNamespaceInfoMetrics(tagData []model.TaggedResourceResult, metrics []*PrometheusMetric, observedMetricLabels map[string]model.LabelSet, labelsSnakeCase bool, logger logging.Logger) ([]*PrometheusMetric, map[string]model.LabelSet) { for _, tagResult := range tagData { - contextLabels := contextToLabels(tagResult.Context, labelsSnakeCase, logger) + contextLabelKeys, contextLabelValues := contextToLabels(tagResult.Context, labelsSnakeCase, logger) for _, d := range tagResult.Data { - metricName := BuildMetricName(d.Namespace, "info", "") + size := len(d.Tags) + len(contextLabelKeys) + 1 + promLabelKeys, promLabelValues := make([]string, 0, size), make([]string, 0, size) + + promLabelKeys = append(promLabelKeys, "name") + promLabelKeys = append(promLabelKeys, contextLabelKeys...) + promLabelValues = append(promLabelValues, d.ARN) + promLabelValues = append(promLabelValues, contextLabelValues...) - promLabels := make(map[string]string, len(d.Tags)+len(contextLabels)+1) - maps.Copy(promLabels, contextLabels) - promLabels["name"] = d.ARN for _, tag := range d.Tags { ok, promTag := PromStringTag(tag.Key, labelsSnakeCase) if !ok { @@ -60,16 +62,13 @@ func BuildNamespaceInfoMetrics(tagData []model.TaggedResourceResult, metrics []* continue } - labelName := "tag_" + promTag - promLabels[labelName] = tag.Value + promLabelKeys = append(promLabelKeys, "tag_"+promTag) + promLabelValues = append(promLabelValues, tag.Value) } - observedMetricLabels = recordLabelsForMetric(metricName, promLabels, observedMetricLabels) - metrics = append(metrics, &PrometheusMetric{ - Name: metricName, - Labels: promLabels, - Value: 0, - }) + metricName := BuildMetricName(d.Namespace, "info", "") + observedMetricLabels = recordLabelsForMetric(metricName, promLabelKeys, observedMetricLabels) + metrics = append(metrics, NewPrometheusMetric(metricName, promLabelKeys, promLabelValues, 0)) } } @@ -81,7 +80,7 @@ func BuildMetrics(results []model.CloudwatchMetricResult, labelsSnakeCase bool, observedMetricLabels := make(map[string]model.LabelSet) for _, result := range results { - contextLabels := contextToLabels(result.Context, labelsSnakeCase, logger) + contextLabelKeys, contextLabelValues := contextToLabels(result.Context, labelsSnakeCase, logger) for _, metric := range result.Data { // This should not be possible but check just in case if metric.GetMetricStatisticsResult == nil && metric.GetMetricDataResult == nil { @@ -112,17 +111,17 @@ func BuildMetrics(results []model.CloudwatchMetricResult, labelsSnakeCase bool, name := BuildMetricName(metric.Namespace, metric.MetricName, statistic) - promLabels := createPrometheusLabels(metric, labelsSnakeCase, contextLabels, logger) - observedMetricLabels = recordLabelsForMetric(name, promLabels, observedMetricLabels) - - output = append(output, &PrometheusMetric{ - Name: name, - Labels: promLabels, - Value: exportedDatapoint, - Timestamp: ts, - IncludeTimestamp: metric.MetricMigrationParams.AddCloudwatchTimestamp, - }) - + labelKeys, labelValues := createPrometheusLabels(metric, labelsSnakeCase, contextLabelKeys, contextLabelValues, logger) + observedMetricLabels = recordLabelsForMetric(name, labelKeys, observedMetricLabels) + + output = append(output, NewPrometheusMetricWithTimestamp( + name, + labelKeys, + labelValues, + exportedDatapoint, + metric.MetricMigrationParams.AddCloudwatchTimestamp, + ts, + )) } } } @@ -209,9 +208,12 @@ func sortByTimestamp(datapoints []*model.Datapoint) []*model.Datapoint { return datapoints } -func createPrometheusLabels(cwd *model.CloudwatchData, labelsSnakeCase bool, contextLabels map[string]string, logger logging.Logger) map[string]string { - labels := make(map[string]string, len(cwd.Dimensions)+len(cwd.Tags)+len(contextLabels)) - labels["name"] = cwd.ResourceName +func createPrometheusLabels(cwd *model.CloudwatchData, labelsSnakeCase bool, contextLabelsKeys []string, contextLabelsValues []string, logger logging.Logger) ([]string, []string) { + size := len(cwd.Dimensions) + len(cwd.Tags) + len(contextLabelsKeys) + 1 + labelKeys, labelValues := make([]string, 0, size), make([]string, 0, size) + + labelKeys = append(labelKeys, "name") + labelValues = append(labelValues, cwd.ResourceName) // Inject the sfn name back as a label for _, dimension := range cwd.Dimensions { @@ -220,7 +222,8 @@ func createPrometheusLabels(cwd *model.CloudwatchData, labelsSnakeCase bool, con logger.Warn("dimension name is an invalid prometheus label name", "dimension", dimension.Name) continue } - labels["dimension_"+promTag] = dimension.Value + labelKeys = append(labelKeys, "dimension_"+promTag) + labelValues = append(labelValues, dimension.Value) } for _, tag := range cwd.Tags { @@ -229,25 +232,31 @@ func createPrometheusLabels(cwd *model.CloudwatchData, labelsSnakeCase bool, con logger.Warn("metric tag name is an invalid prometheus label name", "tag", tag.Key) continue } - labels["tag_"+promTag] = tag.Value + labelKeys = append(labelKeys, "tag_"+promTag) + labelValues = append(labelValues, tag.Value) } - maps.Copy(labels, contextLabels) + labelKeys = append(labelKeys, contextLabelsKeys...) + labelValues = append(labelValues, contextLabelsValues...) - return labels + return labelKeys, labelValues } -func contextToLabels(context *model.ScrapeContext, labelsSnakeCase bool, logger logging.Logger) map[string]string { +func contextToLabels(context *model.ScrapeContext, labelsSnakeCase bool, logger logging.Logger) ([]string, []string) { if context == nil { - return map[string]string{} + return []string{}, []string{} } - labels := make(map[string]string, 2+len(context.CustomTags)) - labels["region"] = context.Region - labels["account_id"] = context.AccountID + size := 3 + len(context.CustomTags) + keys, values := make([]string, 0, size), make([]string, 0, size) + + keys = append(keys, "region", "account_id") + values = append(values, context.Region, context.AccountID) + // If there's no account alias, omit adding an extra label in the series, it will work either way query wise if context.AccountAlias != "" { - labels["account_alias"] = context.AccountAlias + keys = append(keys, "account_alias") + values = append(values, context.AccountAlias) } for _, label := range context.CustomTags { @@ -256,19 +265,20 @@ func contextToLabels(context *model.ScrapeContext, labelsSnakeCase bool, logger logger.Warn("custom tag name is an invalid prometheus label name", "tag", label.Key) continue } - labels["custom_tag_"+promTag] = label.Value + keys = append(keys, "custom_tag_"+promTag) + values = append(values, label.Value) } - return labels + return keys, values } // recordLabelsForMetric adds any missing labels from promLabels in to the LabelSet for the metric name and returns // the updated observedMetricLabels -func recordLabelsForMetric(metricName string, promLabels map[string]string, observedMetricLabels map[string]model.LabelSet) map[string]model.LabelSet { +func recordLabelsForMetric(metricName string, labelKeys []string, observedMetricLabels map[string]model.LabelSet) map[string]model.LabelSet { if _, ok := observedMetricLabels[metricName]; !ok { - observedMetricLabels[metricName] = make(model.LabelSet, len(promLabels)) + observedMetricLabels[metricName] = make(model.LabelSet, len(labelKeys)) } - for label := range promLabels { + for _, label := range labelKeys { if _, ok := observedMetricLabels[metricName][label]; !ok { observedMetricLabels[metricName][label] = struct{}{} } @@ -285,13 +295,11 @@ func EnsureLabelConsistencyAndRemoveDuplicates(metrics []*PrometheusMetric, obse output := make([]*PrometheusMetric, 0, len(metrics)) for _, metric := range metrics { - for observedLabels := range observedMetricLabels[metric.Name] { - if _, ok := metric.Labels[observedLabels]; !ok { - metric.Labels[observedLabels] = "" - } + for observedLabels := range observedMetricLabels[metric.Name()] { + metric.AddIfMissingLabelPair(observedLabels, "") } - metricKey := fmt.Sprintf("%s-%d", metric.Name, prom_model.LabelsToSignature(metric.Labels)) + metricKey := metric.Name() + "-" + strconv.FormatUint(metric.LabelsSignature(), 10) if _, exists := metricKeys[metricKey]; !exists { metricKeys[metricKey] = struct{}{} output = append(output, metric) diff --git a/pkg/promutil/migrate_test.go b/pkg/promutil/migrate_test.go index 57dd3fbb..f4c44846 100644 --- a/pkg/promutil/migrate_test.go +++ b/pkg/promutil/migrate_test.go @@ -1,6 +1,7 @@ package promutil import ( + "fmt" "math" "testing" "time" @@ -47,14 +48,12 @@ func TestBuildNamespaceInfoMetrics(t *testing.T) { observedMetricLabels: map[string]model.LabelSet{}, labelsSnakeCase: false, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_elasticache_info", - Labels: map[string]string{ - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "tag_CustomTag": "tag_Value", - }, - Value: 0, - }, + NewPrometheusMetric( + "aws_elasticache_info", + []string{"name", "tag_CustomTag"}, + []string{"arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", "tag_Value"}, + 0, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_elasticache_info": map[string]struct{}{ @@ -87,14 +86,12 @@ func TestBuildNamespaceInfoMetrics(t *testing.T) { observedMetricLabels: map[string]model.LabelSet{}, labelsSnakeCase: true, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_elasticache_info", - Labels: map[string]string{ - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "tag_custom_tag": "tag_Value", - }, - Value: 0, - }, + NewPrometheusMetric( + "aws_elasticache_info", + []string{"name", "tag_custom_tag"}, + []string{"arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", "tag_Value"}, + 0, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_elasticache_info": map[string]struct{}{ @@ -124,14 +121,18 @@ func TestBuildNamespaceInfoMetrics(t *testing.T) { }, }, metrics: []*PrometheusMetric{ - { - Name: "aws_ec2_cpuutilization_maximum", - Labels: map[string]string{ - "name": "arn:aws:ec2:us-east-1:123456789012:instance/i-abc123", - "dimension_InstanceId": "i-abc123", + NewPrometheusMetric( + "aws_ec2_cpuutilization_maximum", + []string{ + "name", + "dimension_InstanceId", }, - Value: 0, - }, + []string{ + "arn:aws:ec2:us-east-1:123456789012:instance/i-abc123", + "i-abc123", + }, + 0, + ), }, observedMetricLabels: map[string]model.LabelSet{ "aws_ec2_cpuutilization_maximum": map[string]struct{}{ @@ -141,22 +142,30 @@ func TestBuildNamespaceInfoMetrics(t *testing.T) { }, labelsSnakeCase: true, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_ec2_cpuutilization_maximum", - Labels: map[string]string{ - "name": "arn:aws:ec2:us-east-1:123456789012:instance/i-abc123", - "dimension_InstanceId": "i-abc123", + NewPrometheusMetric( + "aws_ec2_cpuutilization_maximum", + []string{ + "name", + "dimension_InstanceId", }, - Value: 0, - }, - { - Name: "aws_elasticache_info", - Labels: map[string]string{ - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "tag_custom_tag": "tag_Value", + []string{ + "arn:aws:ec2:us-east-1:123456789012:instance/i-abc123", + "i-abc123", }, - Value: 0, - }, + 0, + ), + NewPrometheusMetric( + "aws_elasticache_info", + []string{ + "name", + "tag_custom_tag", + }, + []string{ + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "tag_Value", + }, + 0, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_ec2_cpuutilization_maximum": map[string]struct{}{ @@ -200,17 +209,24 @@ func TestBuildNamespaceInfoMetrics(t *testing.T) { observedMetricLabels: map[string]model.LabelSet{}, labelsSnakeCase: true, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_elasticache_info", - Labels: map[string]string{ - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "tag_cache_name": "cache_instance_1", - "account_id": "12345", - "region": "us-east-2", - "custom_tag_billable_to": "api", + NewPrometheusMetric( + "aws_elasticache_info", + []string{ + "name", + "tag_cache_name", + "account_id", + "region", + "custom_tag_billable_to", }, - Value: 0, - }, + []string{ + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "cache_instance_1", + "12345", + "us-east-2", + "api", + }, + 0, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_elasticache_info": map[string]struct{}{ @@ -246,14 +262,18 @@ func TestBuildNamespaceInfoMetrics(t *testing.T) { observedMetricLabels: map[string]model.LabelSet{}, labelsSnakeCase: false, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_sagemaker_trainingjobs_info", - Labels: map[string]string{ - "name": "arn:aws:sagemaker:us-east-1:123456789012:training-job/sagemaker-xgboost", - "tag_CustomTag": "tag_Value", + NewPrometheusMetric( + "aws_sagemaker_trainingjobs_info", + []string{ + "name", + "tag_CustomTag", }, - Value: 0, - }, + []string{ + "arn:aws:sagemaker:us-east-1:123456789012:training-job/sagemaker-xgboost", + "tag_Value", + }, + 0, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_sagemaker_trainingjobs_info": map[string]struct{}{ @@ -379,51 +399,78 @@ func TestBuildMetrics(t *testing.T) { }}, labelsSnakeCase: false, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_elasticache_cpuutilization_average", - Value: 1, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_CacheClusterId": "redis-cluster", + NewPrometheusMetricWithTimestamp( + "aws_elasticache_cpuutilization_average", + []string{ + "account_id", + "name", + "region", + "dimension_CacheClusterId", }, - }, - { - Name: "aws_elasticache_freeable_memory_average", - Value: 2, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_CacheClusterId": "redis-cluster", + []string{ + "123456789012", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", }, - }, - { - Name: "aws_elasticache_network_bytes_in_average", - Value: 3, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_CacheClusterId": "redis-cluster", + 1, + false, + ts, + ), + NewPrometheusMetricWithTimestamp( + "aws_elasticache_freeable_memory_average", + []string{ + "account_id", + "name", + "region", + "dimension_CacheClusterId", }, - }, - { - Name: "aws_elasticache_network_bytes_out_average", - Value: 4, - Timestamp: ts, - IncludeTimestamp: true, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_CacheClusterId": "redis-cluster", + []string{ + "123456789012", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", }, - }, + 2, + false, + ts, + ), + NewPrometheusMetricWithTimestamp( + "aws_elasticache_network_bytes_in_average", + []string{ + "account_id", + "name", + "region", + "dimension_CacheClusterId", + }, + []string{ + "123456789012", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", + }, + 3, + false, + ts, + ), + NewPrometheusMetricWithTimestamp( + "aws_elasticache_network_bytes_out_average", + []string{ + "account_id", + "name", + "region", + "dimension_CacheClusterId", + }, + []string{ + "123456789012", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", + }, + 4, + true, + ts, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_elasticache_cpuutilization_average": { @@ -489,7 +536,6 @@ func TestBuildMetrics(t *testing.T) { AddCloudwatchTimestamp: false, }, Namespace: "AWS/ElastiCache", - Dimensions: []model.Dimension{ { Name: "CacheClusterId", @@ -547,42 +593,61 @@ func TestBuildMetrics(t *testing.T) { }}, labelsSnakeCase: false, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_elasticache_cpuutilization_average", - Value: 0, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_CacheClusterId": "redis-cluster", + NewPrometheusMetricWithTimestamp( + "aws_elasticache_cpuutilization_average", + []string{ + "account_id", + "name", + "region", + "dimension_CacheClusterId", }, - IncludeTimestamp: false, - }, - { - Name: "aws_elasticache_freeable_memory_average", - Value: math.NaN(), - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_CacheClusterId": "redis-cluster", + []string{ + "123456789012", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", }, - IncludeTimestamp: false, - }, - { - Name: "aws_elasticache_network_bytes_in_average", - Value: 0, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_CacheClusterId": "redis-cluster", + 0, + false, + ts, + ), + NewPrometheusMetricWithTimestamp( + "aws_elasticache_freeable_memory_average", + + []string{ + "account_id", + "name", + "region", + "dimension_CacheClusterId", }, - IncludeTimestamp: false, - }, + []string{ + "123456789012", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", + }, + math.NaN(), + false, + ts, + ), + NewPrometheusMetricWithTimestamp( + "aws_elasticache_network_bytes_in_average", + []string{ + "account_id", + "name", + "region", + "dimension_CacheClusterId", + }, + []string{ + "123456789012", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", + }, + 0, + false, + ts, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_elasticache_cpuutilization_average": { @@ -639,17 +704,24 @@ func TestBuildMetrics(t *testing.T) { }}, labelsSnakeCase: true, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_elasticache_cpuutilization_average", - Value: 1, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_cache_cluster_id": "redis-cluster", + NewPrometheusMetricWithTimestamp( + "aws_elasticache_cpuutilization_average", + []string{ + "account_id", + "name", + "region", + "dimension_cache_cluster_id", }, - }, + []string{ + "123456789012", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", + }, + 1, + false, + ts, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_elasticache_cpuutilization_average": { @@ -694,17 +766,24 @@ func TestBuildMetrics(t *testing.T) { }}, labelsSnakeCase: true, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_sagemaker_trainingjobs_cpuutilization_average", - Value: 1, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:sagemaker:us-east-1:123456789012:training-job/sagemaker-xgboost", - "region": "us-east-1", - "dimension_host": "sagemaker-xgboost", + NewPrometheusMetricWithTimestamp( + "aws_sagemaker_trainingjobs_cpuutilization_average", + []string{ + "account_id", + "name", + "region", + "dimension_host", }, - }, + []string{ + "123456789012", + "arn:aws:sagemaker:us-east-1:123456789012:training-job/sagemaker-xgboost", + "us-east-1", + "sagemaker-xgboost", + }, + 1, + false, + ts, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_sagemaker_trainingjobs_cpuutilization_average": { @@ -749,17 +828,24 @@ func TestBuildMetrics(t *testing.T) { }}, labelsSnakeCase: true, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_glue_driver_aggregate_bytes_read_average", - Value: 1, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:glue:us-east-1:123456789012:job/test-job", - "region": "us-east-1", - "dimension_job_name": "test-job", + NewPrometheusMetricWithTimestamp( + "aws_glue_driver_aggregate_bytes_read_average", + []string{ + "account_id", + "name", + "region", + "dimension_job_name", }, - }, + []string{ + "123456789012", + "arn:aws:glue:us-east-1:123456789012:job/test-job", + "us-east-1", + "test-job", + }, + 1, + false, + ts, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_glue_driver_aggregate_bytes_read_average": { @@ -804,17 +890,24 @@ func TestBuildMetrics(t *testing.T) { }}, labelsSnakeCase: true, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_glue_aggregate_glue_jobs_bytes_read_average", - Value: 1, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:glue:us-east-1:123456789012:job/test-job", - "region": "us-east-1", - "dimension_job_name": "test-job", + NewPrometheusMetricWithTimestamp( + "aws_glue_aggregate_glue_jobs_bytes_read_average", + []string{ + "account_id", + "name", + "region", + "dimension_job_name", }, - }, + []string{ + "123456789012", + "arn:aws:glue:us-east-1:123456789012:job/test-job", + "us-east-1", + "test-job", + }, + 1, + false, + ts, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_glue_aggregate_glue_jobs_bytes_read_average": { @@ -862,18 +955,26 @@ func TestBuildMetrics(t *testing.T) { }}, labelsSnakeCase: true, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_elasticache_cpuutilization_average", - Value: 1, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_cache_cluster_id": "redis-cluster", - "custom_tag_billable_to": "api", + NewPrometheusMetricWithTimestamp( + "aws_elasticache_cpuutilization_average", + []string{ + "account_id", + "name", + "region", + "dimension_cache_cluster_id", + "custom_tag_billable_to", }, - }, + []string{ + "123456789012", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", + "api", + }, + 1, + false, + ts, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_elasticache_cpuutilization_average": { @@ -919,18 +1020,26 @@ func TestBuildMetrics(t *testing.T) { }}, labelsSnakeCase: true, expectedMetrics: []*PrometheusMetric{ - { - Name: "aws_elasticache_cpuutilization_average", - Value: 1, - Timestamp: ts, - Labels: map[string]string{ - "account_id": "123456789012", - "account_alias": "billingacct", - "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", - "region": "us-east-1", - "dimension_cache_cluster_id": "redis-cluster", + NewPrometheusMetricWithTimestamp( + "aws_elasticache_cpuutilization_average", + []string{ + "account_id", + "account_alias", + "name", + "region", + "dimension_cache_cluster_id", }, - }, + []string{ + "123456789012", + "billingacct", + "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "us-east-1", + "redis-cluster", + }, + 1, + false, + ts, + ), }, expectedLabels: map[string]model.LabelSet{ "aws_elasticache_cpuutilization_average": { @@ -1117,8 +1226,8 @@ func Benchmark_BuildMetrics(b *testing.B) { // struct values are NaN because NaN != NaN func replaceNaNValues(metrics []*PrometheusMetric) []*PrometheusMetric { for _, metric := range metrics { - if math.IsNaN(metric.Value) { - metric.Value = 54321.0 + if math.IsNaN(metric.Value()) { + metric.SetValue(54321.0) } } return metrics @@ -1169,191 +1278,89 @@ func Test_EnsureLabelConsistencyAndRemoveDuplicates(t *testing.T) { { name: "adds missing labels", metrics: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - Value: 1.0, - }, - { - Name: "metric1", - Labels: map[string]string{"label2": "value2"}, - Value: 2.0, - }, - { - Name: "metric1", - Labels: map[string]string{}, - Value: 3.0, - }, + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric1", []string{"label2"}, []string{"value2"}, 2.0), + NewPrometheusMetric("metric1", []string{}, []string{}, 3.0), }, observedLabels: map[string]model.LabelSet{"metric1": {"label1": {}, "label2": {}, "label3": {}}}, output: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1", "label2": "", "label3": ""}, - Value: 1.0, - }, - { - Name: "metric1", - Labels: map[string]string{"label1": "", "label3": "", "label2": "value2"}, - Value: 2.0, - }, - { - Name: "metric1", - Labels: map[string]string{"label1": "", "label2": "", "label3": ""}, - Value: 3.0, - }, + NewPrometheusMetric("metric1", []string{"label1", "label2", "label3"}, []string{"value1", "", ""}, 1.0), + NewPrometheusMetric("metric1", []string{"label1", "label3", "label2"}, []string{"", "", "value2"}, 2.0), + NewPrometheusMetric("metric1", []string{"label1", "label2", "label3"}, []string{"", "", ""}, 3.0), }, }, { name: "duplicate metric", metrics: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), }, observedLabels: map[string]model.LabelSet{}, output: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), }, }, { name: "duplicate metric, multiple labels", metrics: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1", "label2": "value2"}, - }, - { - Name: "metric1", - Labels: map[string]string{"label2": "value2", "label1": "value1"}, - }, + NewPrometheusMetric("metric1", []string{"label1", "label2"}, []string{"value1", "value2"}, 1.0), + NewPrometheusMetric("metric1", []string{"label2", "label1"}, []string{"value2", "value1"}, 1.0), }, observedLabels: map[string]model.LabelSet{}, output: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1", "label2": "value2"}, - }, + NewPrometheusMetric("metric1", []string{"label1", "label2"}, []string{"value1", "value2"}, 1.0), }, }, { name: "metric with different labels", metrics: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric1", - Labels: map[string]string{"label2": "value2"}, - }, + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric1", []string{"label2"}, []string{"value2"}, 1.0), }, observedLabels: map[string]model.LabelSet{}, output: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric1", - Labels: map[string]string{"label2": "value2"}, - }, + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric1", []string{"label2"}, []string{"value2"}, 1.0), }, }, { name: "two metrics", metrics: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric2", - Labels: map[string]string{"label1": "value1"}, - }, + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric2", []string{"label1"}, []string{"value1"}, 1.0), }, observedLabels: map[string]model.LabelSet{}, output: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric2", - Labels: map[string]string{"label1": "value1"}, - }, + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric2", []string{"label1"}, []string{"value1"}, 1.0), }, }, { name: "two metrics with different labels", metrics: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric2", - Labels: map[string]string{"label2": "value2"}, - }, + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric2", []string{"label2"}, []string{"value2"}, 1.0), }, observedLabels: map[string]model.LabelSet{}, output: []*PrometheusMetric{ - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric2", - Labels: map[string]string{"label2": "value2"}, - }, + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric2", []string{"label2"}, []string{"value2"}, 1.0), }, }, { name: "multiple duplicates and non-duplicates", metrics: []*PrometheusMetric{ - { - Name: "metric2", - Labels: map[string]string{"label2": "value2"}, - }, - { - Name: "metric2", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, + NewPrometheusMetric("metric2", []string{"label2"}, []string{"value2"}, 1.0), + NewPrometheusMetric("metric2", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), }, observedLabels: map[string]model.LabelSet{}, output: []*PrometheusMetric{ - { - Name: "metric2", - Labels: map[string]string{"label2": "value2"}, - }, - { - Name: "metric2", - Labels: map[string]string{"label1": "value1"}, - }, - { - Name: "metric1", - Labels: map[string]string{"label1": "value1"}, - }, + NewPrometheusMetric("metric2", []string{"label2"}, []string{"value2"}, 1.0), + NewPrometheusMetric("metric2", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), }, }, } @@ -1365,3 +1372,79 @@ func Test_EnsureLabelConsistencyAndRemoveDuplicates(t *testing.T) { }) } } + +func Benchmark_EnsureLabelConsistencyAndRemoveDuplicates(b *testing.B) { + metrics := []*PrometheusMetric{ + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric1", []string{"label2"}, []string{"value2"}, 2.0), + NewPrometheusMetric("metric1", []string{}, []string{}, 3.0), + NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), + } + observedLabels := map[string]model.LabelSet{"metric1": {"label1": {}, "label2": {}, "label3": {}}} + + var output []*PrometheusMetric + + b.ReportAllocs() + b.ResetTimer() + + for i := 0; i < b.N; i++ { + output = EnsureLabelConsistencyAndRemoveDuplicates(metrics, observedLabels) + } + + expectedOutput := []*PrometheusMetric{ + NewPrometheusMetric("metric1", []string{"label1", "label2", "label3"}, []string{"value1", "", ""}, 1.0), + NewPrometheusMetric("metric1", []string{"label1", "label3", "label2"}, []string{"", "", "value2"}, 2.0), + NewPrometheusMetric("metric1", []string{"label1", "label2", "label3"}, []string{"", "", ""}, 3.0), + } + require.Equal(b, expectedOutput, output) +} + +func Benchmark_createPrometheusLabels(b *testing.B) { + ts := time.Date(2024, time.January, 1, 0, 0, 0, 0, time.UTC) + + cwd := &model.CloudwatchData{ + MetricName: "CPUUtilization", + MetricMigrationParams: model.MetricMigrationParams{ + NilToZero: true, + AddCloudwatchTimestamp: false, + }, + Namespace: "AWS/ElastiCache", + GetMetricDataResult: &model.GetMetricDataResult{ + Statistic: "Average", + Datapoint: aws.Float64(1), + Timestamp: ts, + }, + Dimensions: []model.Dimension{}, + ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + Tags: []model.Tag{}, + } + + contextLabelKeys := []string{} + contextLabelValues := []string{} + + for i := 0; i < 10000; i++ { + contextLabelKeys = append(contextLabelKeys, fmt.Sprintf("context_label_%d", i)) + contextLabelValues = append(contextLabelValues, fmt.Sprintf("context_value_%d", i)) + + cwd.Dimensions = append(cwd.Dimensions, model.Dimension{ + Name: fmt.Sprintf("dimension_%d", i), + Value: fmt.Sprintf("value_%d", i), + }) + + cwd.Tags = append(cwd.Tags, model.Tag{ + Key: fmt.Sprintf("tag_%d", i), + Value: fmt.Sprintf("value_%d", i), + }) + } + + var labelKeys, labelValues []string + + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + labelKeys, labelValues = createPrometheusLabels(cwd, false, contextLabelKeys, contextLabelValues, logging.NewNopLogger()) + } + + require.Equal(b, 30001, len(labelKeys)) + require.Equal(b, 30001, len(labelValues)) +} diff --git a/pkg/promutil/prometheus.go b/pkg/promutil/prometheus.go index c47b8c98..df55265e 100644 --- a/pkg/promutil/prometheus.go +++ b/pkg/promutil/prometheus.go @@ -1,12 +1,16 @@ package promutil import ( + "slices" + "sort" "strings" "time" + "github.com/cespare/xxhash/v2" + prom_model "github.com/prometheus/common/model" + "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/common/model" - "golang.org/x/exp/maps" ) var ( @@ -91,12 +95,106 @@ var replacer = strings.NewReplacer( "%", "_percent", ) +// labelPair joins two slices of keys and values +// and allows simultaneous sorting. +type labelPair struct { + keys []string + vals []string +} + +func (p labelPair) Len() int { + return len(p.keys) +} + +func (p labelPair) Swap(i, j int) { + p.keys[i], p.keys[j] = p.keys[j], p.keys[i] + p.vals[i], p.vals[j] = p.vals[j], p.vals[i] +} + +func (p labelPair) Less(i, j int) bool { + return p.keys[i] < p.keys[j] +} + +// PrometheusMetric is a precursor of prometheus.Metric. +// Labels are kept sorted by key to ensure consistent ordering. type PrometheusMetric struct { - Name string - Labels map[string]string - Value float64 - IncludeTimestamp bool - Timestamp time.Time + name string + labels labelPair + value float64 + includeTimestamp bool + timestamp time.Time +} + +func NewPrometheusMetric(name string, labelKeys, labelValues []string, value float64) *PrometheusMetric { + return NewPrometheusMetricWithTimestamp(name, labelKeys, labelValues, value, false, time.Time{}) +} + +func NewPrometheusMetricWithTimestamp(name string, labelKeys, labelValues []string, value float64, includeTimestamp bool, timestamp time.Time) *PrometheusMetric { + if len(labelKeys) != len(labelValues) { + panic("labelKeys and labelValues have different length") + } + + labels := labelPair{labelKeys, labelValues} + sort.Sort(labels) + + return &PrometheusMetric{ + name: name, + labels: labels, + value: value, + includeTimestamp: includeTimestamp, + timestamp: timestamp, + } +} + +func (p *PrometheusMetric) Name() string { + return p.name +} + +func (p *PrometheusMetric) Labels() ([]string, []string) { + return p.labels.keys, p.labels.vals +} + +func (p *PrometheusMetric) Value() float64 { + return p.value +} + +// SetValue should be used only for testing +func (p *PrometheusMetric) SetValue(v float64) { + p.value = v +} + +func (p *PrometheusMetric) IncludeTimestamp() bool { + return p.includeTimestamp +} + +func (p *PrometheusMetric) Timestamp() time.Time { + return p.timestamp +} + +var separatorByteSlice = []byte{prom_model.SeparatorByte} + +// LabelsSignature returns a hash of the labels. It emulates +// prometheus' LabelsToSignature implementation but works on +// labelPair instead of map[string]string. Assumes that +// the labels are sorted. +func (p *PrometheusMetric) LabelsSignature() uint64 { + xxh := xxhash.New() + for i, key := range p.labels.keys { + _, _ = xxh.WriteString(key) + _, _ = xxh.Write(separatorByteSlice) + _, _ = xxh.WriteString(p.labels.vals[i]) + _, _ = xxh.Write(separatorByteSlice) + } + return xxh.Sum64() +} + +func (p *PrometheusMetric) AddIfMissingLabelPair(key, val string) { + // TODO(cristian): might use binary search here + if !slices.Contains(p.labels.keys, key) { + p.labels.keys = append(p.labels.keys, key) + p.labels.vals = append(p.labels.vals, val) + sort.Sort(p.labels) + } } type PrometheusCollector struct { @@ -124,37 +222,30 @@ func (p *PrometheusCollector) Collect(metrics chan<- prometheus.Metric) { } func toConstMetrics(metrics []*PrometheusMetric) []prometheus.Metric { - // We keep two fast lookup maps here one for the prometheus.Desc of a metric which can be reused for each metric with - // the same name and the expected label key order of a particular metric name. + // Keep a fast lookup map for the prometheus.Desc of a metric which can be reused for each metric with + // the same name and the expected label key order of a particular metric name (sorting of keys and values + // is guaranteed by the implementation of PrometheusMetric). // The prometheus.Desc object is expensive to create and being able to reuse it for all metrics with the same name - // results in large performance gain. We use the other map because metrics created using the Desc only provide label - // values and they must be provided in the exact same order as registered in the Desc. + // results in large performance gain. metricToDesc := map[string]*prometheus.Desc{} - metricToExpectedLabelOrder := map[string][]string{} result := make([]prometheus.Metric, 0, len(metrics)) for _, metric := range metrics { - metricName := metric.Name + metricName := metric.Name() + labelKeys, labelValues := metric.Labels() + if _, ok := metricToDesc[metricName]; !ok { - labelKeys := maps.Keys(metric.Labels) metricToDesc[metricName] = prometheus.NewDesc(metricName, "Help is not implemented yet.", labelKeys, nil) - metricToExpectedLabelOrder[metricName] = labelKeys } metricsDesc := metricToDesc[metricName] - // Create the label values using the label order of the Desc - labelValues := make([]string, 0, len(metric.Labels)) - for _, labelKey := range metricToExpectedLabelOrder[metricName] { - labelValues = append(labelValues, metric.Labels[labelKey]) - } - - promMetric, err := prometheus.NewConstMetric(metricsDesc, prometheus.GaugeValue, metric.Value, labelValues...) + promMetric, err := prometheus.NewConstMetric(metricsDesc, prometheus.GaugeValue, metric.Value(), labelValues...) if err != nil { // If for whatever reason the metric or metricsDesc is considered invalid this will ensure the error is // reported through the collector promMetric = prometheus.NewInvalidMetric(metricsDesc, err) - } else if metric.IncludeTimestamp { - promMetric = prometheus.NewMetricWithTimestamp(metric.Timestamp, promMetric) + } else if metric.IncludeTimestamp() { + promMetric = prometheus.NewMetricWithTimestamp(metric.Timestamp(), promMetric) } result = append(result, promMetric) diff --git a/pkg/promutil/prometheus_test.go b/pkg/promutil/prometheus_test.go index fd4c33c4..65a05221 100644 --- a/pkg/promutil/prometheus_test.go +++ b/pkg/promutil/prometheus_test.go @@ -123,18 +123,8 @@ func TestPromStringTag(t *testing.T) { func TestNewPrometheusCollector_CanReportMetricsAndErrors(t *testing.T) { metrics := []*PrometheusMetric{ - { - Name: "this*is*not*valid", - Labels: map[string]string{}, - Value: 0, - IncludeTimestamp: false, - }, - { - Name: "this_is_valid", - Labels: map[string]string{"key": "value1"}, - Value: 0, - IncludeTimestamp: false, - }, + NewPrometheusMetric("this*is*not*valid", []string{}, []string{}, 0), + NewPrometheusMetric("this_is_valid", []string{"key"}, []string{"value1"}, 0), } collector := NewPrometheusCollector(metrics) registry := prometheus.NewRegistry() @@ -153,31 +143,32 @@ func TestNewPrometheusCollector_CanReportMetrics(t *testing.T) { labelSet2 := map[string]string{"key2": "out", "key3": "of", "key1": "order"} labelSet3 := map[string]string{"key2": "out", "key1": "of", "key3": "order"} metrics := []*PrometheusMetric{ - { - Name: "metric_with_labels", - Labels: labelSet1, - Value: 1, - IncludeTimestamp: false, - }, - { - Name: "metric_with_labels", - Labels: labelSet2, - Value: 2, - IncludeTimestamp: false, - }, - { - Name: "metric_with_labels", - Labels: labelSet3, - Value: 3, - IncludeTimestamp: false, - }, - { - Name: "metric_with_timestamp", - Labels: map[string]string{}, - Value: 1, - IncludeTimestamp: true, - Timestamp: ts, - }, + NewPrometheusMetric( + "metric_with_labels", + []string{"key1", "key2", "key3"}, + []string{"value", "value", "value"}, + 1, + ), + NewPrometheusMetric( + "metric_with_labels", + []string{"key2", "key3", "key1"}, + []string{"out", "of", "order"}, + 2, + ), + NewPrometheusMetric( + "metric_with_labels", + []string{"key2", "key1", "key3"}, + []string{"out", "of", "order"}, + 3, + ), + NewPrometheusMetricWithTimestamp( + "metric_with_timestamp", + []string{}, + []string{}, + 1, + true, + ts, + ), } collector := NewPrometheusCollector(metrics) @@ -231,3 +222,29 @@ func TestNewPrometheusCollector_CanReportMetrics(t *testing.T) { assert.Equal(t, ts.UnixMilli(), *tsMetric.TimestampMs) assert.Equal(t, 1.0, *tsMetric.Gauge.Value) } + +func Benchmark_NewPrometheusCollector(b *testing.B) { + metrics := []*PrometheusMetric{ + NewPrometheusMetric("metric1", []string{"key1"}, []string{"value11"}, 1.0), + NewPrometheusMetric("metric1", []string{"key1"}, []string{"value12"}, 1.0), + NewPrometheusMetric("metric2", []string{"key2"}, []string{"value21"}, 2.0), + NewPrometheusMetric("metric2", []string{"key2"}, []string{"value22"}, 2.0), + NewPrometheusMetric("metric3", []string{"key3"}, []string{"value31"}, 3.0), + NewPrometheusMetric("metric3", []string{"key3"}, []string{"value32"}, 3.0), + NewPrometheusMetric("metric4", []string{"key4"}, []string{"value41"}, 4.0), + NewPrometheusMetric("metric4", []string{"key4"}, []string{"value42"}, 4.0), + NewPrometheusMetric("metric5", []string{"key5"}, []string{"value51"}, 5.0), + NewPrometheusMetric("metric5", []string{"key5"}, []string{"value52"}, 5.0), + } + + var collector *PrometheusCollector + + b.ReportAllocs() + b.ResetTimer() + for i := 0; i < b.N; i++ { + collector = NewPrometheusCollector(metrics) + } + + registry := prometheus.NewRegistry() + require.NoError(b, registry.Register(collector)) +} From 3188d16d47bdf28c3619449a7da24dfb5781cbe6 Mon Sep 17 00:00:00 2001 From: Cristian Greco Date: Tue, 24 Sep 2024 14:47:06 +0200 Subject: [PATCH 2/4] Address review feedback --- pkg/promutil/prometheus.go | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pkg/promutil/prometheus.go b/pkg/promutil/prometheus.go index df55265e..680c4a9f 100644 --- a/pkg/promutil/prometheus.go +++ b/pkg/promutil/prometheus.go @@ -95,8 +95,8 @@ var replacer = strings.NewReplacer( "%", "_percent", ) -// labelPair joins two slices of keys and values -// and allows simultaneous sorting. +// labelPair joins two slices of keys and values and allows +// simultaneous sorting. It implements sort.Interface. type labelPair struct { keys []string vals []string @@ -175,8 +175,10 @@ var separatorByteSlice = []byte{prom_model.SeparatorByte} // LabelsSignature returns a hash of the labels. It emulates // prometheus' LabelsToSignature implementation but works on -// labelPair instead of map[string]string. Assumes that -// the labels are sorted. +// labelPair instead of map[string]string. +// Assumes that the labels are sorted. Notably, this uses +// a different hash function than prometheus, but it doesn't +// matter for the purpose of computing a unique signature. func (p *PrometheusMetric) LabelsSignature() uint64 { xxh := xxhash.New() for i, key := range p.labels.keys { From 1d2077ab28e6e97b62bd8d88692eeaaf1eb32b93 Mon Sep 17 00:00:00 2001 From: Cristian Greco Date: Wed, 25 Sep 2024 10:33:26 +0200 Subject: [PATCH 3/4] Handle case of duplicate labels in metrics --- pkg/exporter.go | 2 +- pkg/promutil/migrate.go | 12 +++++++--- pkg/promutil/migrate_test.go | 35 ++++++++++++++++++---------- pkg/promutil/prometheus.go | 19 +++++++++++++++ pkg/promutil/prometheus_test.go | 41 +++++++++++++++++++++++++++++++++ 5 files changed, 93 insertions(+), 16 deletions(-) diff --git a/pkg/exporter.go b/pkg/exporter.go index 5ead0ce0..a7c638bc 100644 --- a/pkg/exporter.go +++ b/pkg/exporter.go @@ -196,7 +196,7 @@ func UpdateMetrics( return nil } metrics, observedMetricLabels = promutil.BuildNamespaceInfoMetrics(tagsData, metrics, observedMetricLabels, options.labelsSnakeCase, logger) - metrics = promutil.EnsureLabelConsistencyAndRemoveDuplicates(metrics, observedMetricLabels) + metrics = promutil.EnsureLabelConsistencyAndRemoveDuplicates(metrics, observedMetricLabels, logger) registry.MustRegister(promutil.NewPrometheusCollector(metrics)) return nil diff --git a/pkg/promutil/migrate.go b/pkg/promutil/migrate.go index b0a3f283..cb2e1f01 100644 --- a/pkg/promutil/migrate.go +++ b/pkg/promutil/migrate.go @@ -290,13 +290,19 @@ func recordLabelsForMetric(metricName string, labelKeys []string, observedMetric // EnsureLabelConsistencyAndRemoveDuplicates ensures that every metric has the same set of labels based on the data // in observedMetricLabels and that there are no duplicate metrics. // Prometheus requires that all metrics with the same name have the same set of labels and that no duplicates are registered -func EnsureLabelConsistencyAndRemoveDuplicates(metrics []*PrometheusMetric, observedMetricLabels map[string]model.LabelSet) []*PrometheusMetric { +func EnsureLabelConsistencyAndRemoveDuplicates(metrics []*PrometheusMetric, observedMetricLabels map[string]model.LabelSet, logger logging.Logger) []*PrometheusMetric { metricKeys := make(map[string]struct{}, len(metrics)) output := make([]*PrometheusMetric, 0, len(metrics)) for _, metric := range metrics { - for observedLabels := range observedMetricLabels[metric.Name()] { - metric.AddIfMissingLabelPair(observedLabels, "") + observedLabels := observedMetricLabels[metric.Name()] + for label := range observedLabels { + metric.AddIfMissingLabelPair(label, "") + } + + if len(observedLabels) != metric.LabelsLen() { + logger.Warn("metric has duplicate labels", "metric_name", metric.Name(), "observed_labels", len(observedLabels), "labels_len", metric.LabelsLen()) + metric.RemoveDuplicateLabels() } metricKey := metric.Name() + "-" + strconv.FormatUint(metric.LabelsSignature(), 10) diff --git a/pkg/promutil/migrate_test.go b/pkg/promutil/migrate_test.go index f4c44846..ae0336ba 100644 --- a/pkg/promutil/migrate_test.go +++ b/pkg/promutil/migrate_test.go @@ -1289,13 +1289,23 @@ func Test_EnsureLabelConsistencyAndRemoveDuplicates(t *testing.T) { NewPrometheusMetric("metric1", []string{"label1", "label2", "label3"}, []string{"", "", ""}, 3.0), }, }, + { + name: "removes duplicate labels", + metrics: []*PrometheusMetric{ + NewPrometheusMetric("metric1", []string{"label1", "label1", "label2"}, []string{"value1", "value1", "value2"}, 1.0), + }, + observedLabels: map[string]model.LabelSet{"metric1": {"label1": {}, "label2": {}}}, + output: []*PrometheusMetric{ + NewPrometheusMetric("metric1", []string{"label1", "label2"}, []string{"value1", "value2"}, 1.0), + }, + }, { name: "duplicate metric", metrics: []*PrometheusMetric{ NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), }, - observedLabels: map[string]model.LabelSet{}, + observedLabels: map[string]model.LabelSet{"metric1": {"label1": {}}}, output: []*PrometheusMetric{ NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), }, @@ -1306,7 +1316,7 @@ func Test_EnsureLabelConsistencyAndRemoveDuplicates(t *testing.T) { NewPrometheusMetric("metric1", []string{"label1", "label2"}, []string{"value1", "value2"}, 1.0), NewPrometheusMetric("metric1", []string{"label2", "label1"}, []string{"value2", "value1"}, 1.0), }, - observedLabels: map[string]model.LabelSet{}, + observedLabels: map[string]model.LabelSet{"metric1": {"label1": {}, "label2": {}}}, output: []*PrometheusMetric{ NewPrometheusMetric("metric1", []string{"label1", "label2"}, []string{"value1", "value2"}, 1.0), }, @@ -1317,10 +1327,10 @@ func Test_EnsureLabelConsistencyAndRemoveDuplicates(t *testing.T) { NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), NewPrometheusMetric("metric1", []string{"label2"}, []string{"value2"}, 1.0), }, - observedLabels: map[string]model.LabelSet{}, + observedLabels: map[string]model.LabelSet{"metric1": {"label1": {}, "label2": {}}}, output: []*PrometheusMetric{ - NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), - NewPrometheusMetric("metric1", []string{"label2"}, []string{"value2"}, 1.0), + NewPrometheusMetric("metric1", []string{"label1", "label2"}, []string{"value1", ""}, 1.0), + NewPrometheusMetric("metric1", []string{"label1", "label2"}, []string{"", "value2"}, 1.0), }, }, { @@ -1329,7 +1339,7 @@ func Test_EnsureLabelConsistencyAndRemoveDuplicates(t *testing.T) { NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), NewPrometheusMetric("metric2", []string{"label1"}, []string{"value1"}, 1.0), }, - observedLabels: map[string]model.LabelSet{}, + observedLabels: map[string]model.LabelSet{"metric1": {"label1": {}}, "metric2": {"label1": {}}}, output: []*PrometheusMetric{ NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), NewPrometheusMetric("metric2", []string{"label1"}, []string{"value1"}, 1.0), @@ -1341,7 +1351,7 @@ func Test_EnsureLabelConsistencyAndRemoveDuplicates(t *testing.T) { NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), NewPrometheusMetric("metric2", []string{"label2"}, []string{"value2"}, 1.0), }, - observedLabels: map[string]model.LabelSet{}, + observedLabels: map[string]model.LabelSet{"metric1": {"label1": {}}, "metric2": {"label2": {}}}, output: []*PrometheusMetric{ NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), NewPrometheusMetric("metric2", []string{"label2"}, []string{"value2"}, 1.0), @@ -1356,10 +1366,10 @@ func Test_EnsureLabelConsistencyAndRemoveDuplicates(t *testing.T) { NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), }, - observedLabels: map[string]model.LabelSet{}, + observedLabels: map[string]model.LabelSet{"metric1": {"label1": {}}, "metric2": {"label1": {}, "label2": {}}}, output: []*PrometheusMetric{ - NewPrometheusMetric("metric2", []string{"label2"}, []string{"value2"}, 1.0), - NewPrometheusMetric("metric2", []string{"label1"}, []string{"value1"}, 1.0), + NewPrometheusMetric("metric2", []string{"label1", "label2"}, []string{"", "value2"}, 1.0), + NewPrometheusMetric("metric2", []string{"label1", "label2"}, []string{"value1", ""}, 1.0), NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), }, }, @@ -1367,7 +1377,7 @@ func Test_EnsureLabelConsistencyAndRemoveDuplicates(t *testing.T) { for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - actual := EnsureLabelConsistencyAndRemoveDuplicates(tc.metrics, tc.observedLabels) + actual := EnsureLabelConsistencyAndRemoveDuplicates(tc.metrics, tc.observedLabels, logging.NewNopLogger()) require.ElementsMatch(t, tc.output, actual) }) } @@ -1381,6 +1391,7 @@ func Benchmark_EnsureLabelConsistencyAndRemoveDuplicates(b *testing.B) { NewPrometheusMetric("metric1", []string{"label1"}, []string{"value1"}, 1.0), } observedLabels := map[string]model.LabelSet{"metric1": {"label1": {}, "label2": {}, "label3": {}}} + logger := logging.NewNopLogger() var output []*PrometheusMetric @@ -1388,7 +1399,7 @@ func Benchmark_EnsureLabelConsistencyAndRemoveDuplicates(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - output = EnsureLabelConsistencyAndRemoveDuplicates(metrics, observedLabels) + output = EnsureLabelConsistencyAndRemoveDuplicates(metrics, observedLabels, logger) } expectedOutput := []*PrometheusMetric{ diff --git a/pkg/promutil/prometheus.go b/pkg/promutil/prometheus.go index 680c4a9f..2ad39d95 100644 --- a/pkg/promutil/prometheus.go +++ b/pkg/promutil/prometheus.go @@ -154,6 +154,10 @@ func (p *PrometheusMetric) Labels() ([]string, []string) { return p.labels.keys, p.labels.vals } +func (p *PrometheusMetric) LabelsLen() int { + return len(p.labels.keys) +} + func (p *PrometheusMetric) Value() float64 { return p.value } @@ -199,6 +203,21 @@ func (p *PrometheusMetric) AddIfMissingLabelPair(key, val string) { } } +func (p *PrometheusMetric) RemoveDuplicateLabels() { + seen := map[string]struct{}{} + idx := 0 + for i, key := range p.labels.keys { + if _, ok := seen[key]; !ok { + seen[key] = struct{}{} + p.labels.keys[idx] = key + p.labels.vals[idx] = p.labels.vals[i] + idx++ + } + } + p.labels.keys = p.labels.keys[:idx] + p.labels.vals = p.labels.vals[:idx] +} + type PrometheusCollector struct { metrics []prometheus.Metric } diff --git a/pkg/promutil/prometheus_test.go b/pkg/promutil/prometheus_test.go index 65a05221..251e7e8d 100644 --- a/pkg/promutil/prometheus_test.go +++ b/pkg/promutil/prometheus_test.go @@ -121,6 +121,47 @@ func TestPromStringTag(t *testing.T) { } } +func TestPrometheusMetric(t *testing.T) { + t.Run("NewPrometheusMetric panics with wrong label size", func(t *testing.T) { + require.Panics(t, func() { + NewPrometheusMetric("metric1", []string{"key1"}, []string{}, 1.0) + }) + require.Panics(t, func() { + NewPrometheusMetric("metric1", []string{}, []string{"label1"}, 1.0) + }) + require.Panics(t, func() { + NewPrometheusMetric("metric1", []string{"key1", "key2"}, []string{"label1"}, 1.0) + }) + require.Panics(t, func() { + NewPrometheusMetric("metric1", []string{"key1"}, []string{"label1", "label2"}, 1.0) + }) + }) + + t.Run("NewPrometheusMetric sorts labels", func(t *testing.T) { + metric := NewPrometheusMetric("metric", []string{"key2", "key1"}, []string{"value2", "value1"}, 1.0) + keys, vals := metric.Labels() + require.Equal(t, []string{"key1", "key2"}, keys) + require.Equal(t, []string{"value1", "value2"}, vals) + }) + + t.Run("AddIfMissingLabelPair keeps labels sorted", func(t *testing.T) { + metric := NewPrometheusMetric("metric", []string{"key2"}, []string{"value2"}, 1.0) + metric.AddIfMissingLabelPair("key1", "value1") + keys, vals := metric.Labels() + require.Equal(t, []string{"key1", "key2"}, keys) + require.Equal(t, []string{"value1", "value2"}, vals) + }) + + t.Run("RemoveDuplicateLabels", func(t *testing.T) { + metric := NewPrometheusMetric("metric", []string{"key1", "key1"}, []string{"value1", "value2"}, 1.0) + require.Equal(t, 2, metric.LabelsLen()) + metric.RemoveDuplicateLabels() + keys, vals := metric.Labels() + require.Equal(t, []string{"key1"}, keys) + require.Equal(t, []string{"value1"}, vals) + }) +} + func TestNewPrometheusCollector_CanReportMetricsAndErrors(t *testing.T) { metrics := []*PrometheusMetric{ NewPrometheusMetric("this*is*not*valid", []string{}, []string{}, 0), From ec1739a2f3b4306089316234c5d1850e08a754dc Mon Sep 17 00:00:00 2001 From: Cristian Greco Date: Thu, 26 Sep 2024 09:56:25 +0200 Subject: [PATCH 4/4] extend logging for detected duplicated labels --- pkg/promutil/migrate.go | 4 ++-- pkg/promutil/prometheus.go | 6 +++++- pkg/promutil/prometheus_test.go | 11 ++++++----- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/pkg/promutil/migrate.go b/pkg/promutil/migrate.go index cb2e1f01..86e90455 100644 --- a/pkg/promutil/migrate.go +++ b/pkg/promutil/migrate.go @@ -301,8 +301,8 @@ func EnsureLabelConsistencyAndRemoveDuplicates(metrics []*PrometheusMetric, obse } if len(observedLabels) != metric.LabelsLen() { - logger.Warn("metric has duplicate labels", "metric_name", metric.Name(), "observed_labels", len(observedLabels), "labels_len", metric.LabelsLen()) - metric.RemoveDuplicateLabels() + duplicates := metric.RemoveDuplicateLabels() + logger.Warn("metric has duplicate labels", "metric_name", metric.Name(), "observed_labels", len(observedLabels), "labels_len", metric.LabelsLen(), "duplicated_labels", duplicates) } metricKey := metric.Name() + "-" + strconv.FormatUint(metric.LabelsSignature(), 10) diff --git a/pkg/promutil/prometheus.go b/pkg/promutil/prometheus.go index 2ad39d95..b8421ba5 100644 --- a/pkg/promutil/prometheus.go +++ b/pkg/promutil/prometheus.go @@ -203,8 +203,9 @@ func (p *PrometheusMetric) AddIfMissingLabelPair(key, val string) { } } -func (p *PrometheusMetric) RemoveDuplicateLabels() { +func (p *PrometheusMetric) RemoveDuplicateLabels() []string { seen := map[string]struct{}{} + duplicates := []string{} idx := 0 for i, key := range p.labels.keys { if _, ok := seen[key]; !ok { @@ -212,10 +213,13 @@ func (p *PrometheusMetric) RemoveDuplicateLabels() { p.labels.keys[idx] = key p.labels.vals[idx] = p.labels.vals[i] idx++ + } else { + duplicates = append(duplicates, key) } } p.labels.keys = p.labels.keys[:idx] p.labels.vals = p.labels.vals[:idx] + return duplicates } type PrometheusCollector struct { diff --git a/pkg/promutil/prometheus_test.go b/pkg/promutil/prometheus_test.go index 251e7e8d..aaa483ff 100644 --- a/pkg/promutil/prometheus_test.go +++ b/pkg/promutil/prometheus_test.go @@ -153,12 +153,13 @@ func TestPrometheusMetric(t *testing.T) { }) t.Run("RemoveDuplicateLabels", func(t *testing.T) { - metric := NewPrometheusMetric("metric", []string{"key1", "key1"}, []string{"value1", "value2"}, 1.0) - require.Equal(t, 2, metric.LabelsLen()) - metric.RemoveDuplicateLabels() + metric := NewPrometheusMetric("metric", []string{"key1", "key2", "key1", "key3"}, []string{"value-key1", "value-key2", "value-dup-key1", "value-key3"}, 1.0) + require.Equal(t, 4, metric.LabelsLen()) + duplicates := metric.RemoveDuplicateLabels() keys, vals := metric.Labels() - require.Equal(t, []string{"key1"}, keys) - require.Equal(t, []string{"value1"}, vals) + require.Equal(t, []string{"key1", "key2", "key3"}, keys) + require.Equal(t, []string{"value-key1", "value-key2", "value-key3"}, vals) + require.Equal(t, []string{"key1"}, duplicates) }) }