Skip to content

Commit

Permalink
Exclude tags and metrics from Datadog sink by prefix (#772)
Browse files Browse the repository at this point in the history
  • Loading branch information
kaplanelad authored and ChimeraCoder committed Jan 7, 2020
1 parent 4332512 commit 00be4dd
Show file tree
Hide file tree
Showing 6 changed files with 105 additions and 33 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# 14.0.0, in progress

## Added
* The Datadog sink can now filter metric names by prefix with `datadog_metric_name_prefix_drops`. Thanks, [kaplanelad](https://github.com/kaplanelad)!
* The Datadog sink can now filter tags by metric names prefix with `datadog_exclude_tags_prefix_by_prefix_metric`. Thanks, [kaplanelad](https://github.com/kaplanelad)!

# 13.0.0, 2020-01-03

## Added
Expand Down
22 changes: 13 additions & 9 deletions config.go
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
package veneur

type Config struct {
Aggregates []string `yaml:"aggregates"`
AwsAccessKeyID string `yaml:"aws_access_key_id"`
AwsRegion string `yaml:"aws_region"`
AwsS3Bucket string `yaml:"aws_s3_bucket"`
AwsSecretAccessKey string `yaml:"aws_secret_access_key"`
BlockProfileRate int `yaml:"block_profile_rate"`
CountUniqueTimeseries bool `yaml:"count_unique_timeseries"`
DatadogAPIHostname string `yaml:"datadog_api_hostname"`
DatadogAPIKey string `yaml:"datadog_api_key"`
Aggregates []string `yaml:"aggregates"`
AwsAccessKeyID string `yaml:"aws_access_key_id"`
AwsRegion string `yaml:"aws_region"`
AwsS3Bucket string `yaml:"aws_s3_bucket"`
AwsSecretAccessKey string `yaml:"aws_secret_access_key"`
BlockProfileRate int `yaml:"block_profile_rate"`
CountUniqueTimeseries bool `yaml:"count_unique_timeseries"`
DatadogAPIHostname string `yaml:"datadog_api_hostname"`
DatadogAPIKey string `yaml:"datadog_api_key"`
DatadogExcludeTagsPrefixByPrefixMetric []struct {
MetricPrefix string `yaml:"metric_prefix"`
Tags []string `yaml:"tags"`
} `yaml:"datadog_exclude_tags_prefix_by_prefix_metric"`
DatadogFlushMaxPerBody int `yaml:"datadog_flush_max_per_body"`
DatadogMetricNamePrefixDrops []string `yaml:"datadog_metric_name_prefix_drops"`
DatadogSpanBufferSize int `yaml:"datadog_span_buffer_size"`
Expand Down
7 changes: 7 additions & 0 deletions example.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -285,6 +285,13 @@ datadog_trace_api_address: ""
datadog_metric_name_prefix_drops:
- "an_ignorable_metric."

# Excluded tags *prefixes* from specific metric *prefixes*
# Any metrics that have tags *prefixes* will be dropped before sending to Datadog.
datadog_exclude_tags_prefix_by_prefix_metric:
- metric_prefix: "metric_prefix"
tags:
- an_ignorable_tag_prefix"

# The size of the ring buffer used for retaining spans during a flush interval.
datadog_span_buffer_size: 16384

Expand Down
7 changes: 7 additions & 0 deletions server.go
Original file line number Diff line number Diff line change
Expand Up @@ -495,9 +495,16 @@ func NewFromConfig(logger *logrus.Logger, conf Config) (*Server, error) {
ret.metricSinks = append(ret.metricSinks, sfxSink)
}
if conf.DatadogAPIKey != "" && conf.DatadogAPIHostname != "" {

excludeTagsPrefixByPrefixMetric := map[string][]string{}
for _, m := range conf.DatadogExcludeTagsPrefixByPrefixMetric {
excludeTagsPrefixByPrefixMetric[m.MetricPrefix] = m.Tags
}

ddSink, err := datadog.NewDatadogMetricSink(
ret.interval.Seconds(), conf.DatadogFlushMaxPerBody, conf.Hostname, ret.Tags,
conf.DatadogAPIHostname, conf.DatadogAPIKey, ret.HTTPClient, log, conf.DatadogMetricNamePrefixDrops,
excludeTagsPrefixByPrefixMetric,
)
if err != nil {
return ret, err
Expand Down
63 changes: 42 additions & 21 deletions sinks/datadog/datadog.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,17 +32,18 @@ const datadogSpanType = "web"
const datadogSpanBufferSize = 1 << 14

type DatadogMetricSink struct {
HTTPClient *http.Client
APIKey string
DDHostname string
hostname string
flushMaxPerBody int
tags []string
interval float64
traceClient *trace.Client
log *logrus.Logger
metricNamePrefixDrops []string
excludedTags []string
HTTPClient *http.Client
APIKey string
DDHostname string
hostname string
flushMaxPerBody int
tags []string
interval float64
traceClient *trace.Client
log *logrus.Logger
metricNamePrefixDrops []string
excludedTags []string
excludeTagsPrefixByPrefixMetric map[string][]string
}

// DDEvent represents the structure of datadog's undocumented /intake endpoint
Expand Down Expand Up @@ -81,17 +82,18 @@ type DDServiceCheck struct {
}

// NewDatadogMetricSink creates a new Datadog sink for trace spans.
func NewDatadogMetricSink(interval float64, flushMaxPerBody int, hostname string, tags []string, ddHostname string, apiKey string, httpClient *http.Client, log *logrus.Logger, metricNamePrefixDrops []string) (*DatadogMetricSink, error) {
func NewDatadogMetricSink(interval float64, flushMaxPerBody int, hostname string, tags []string, ddHostname string, apiKey string, httpClient *http.Client, log *logrus.Logger, metricNamePrefixDrops []string, excludeTagsPrefixByPrefixMetric map[string][]string) (*DatadogMetricSink, error) {
return &DatadogMetricSink{
HTTPClient: httpClient,
APIKey: apiKey,
DDHostname: ddHostname,
interval: interval,
flushMaxPerBody: flushMaxPerBody,
hostname: hostname,
tags: tags,
metricNamePrefixDrops: metricNamePrefixDrops,
log: log,
HTTPClient: httpClient,
APIKey: apiKey,
DDHostname: ddHostname,
interval: interval,
flushMaxPerBody: flushMaxPerBody,
hostname: hostname,
tags: tags,
metricNamePrefixDrops: metricNamePrefixDrops,
excludeTagsPrefixByPrefixMetric: excludeTagsPrefixByPrefixMetric,
log: log,
}, nil
}

Expand Down Expand Up @@ -270,6 +272,17 @@ METRICLOOP:
// Defensively copy tags since we're gonna mutate it
tags := make([]string, 0, len(dd.tags))

// Prepare exclude tags by specific prefix metric
var excludeTagsPrefixByPrefixMetric []string
if len(dd.excludeTagsPrefixByPrefixMetric) > 0 {
for prefixMetric, tags := range dd.excludeTagsPrefixByPrefixMetric {
if strings.HasPrefix(m.Name, prefixMetric) {
excludeTagsPrefixByPrefixMetric = tags
break
}
}
}

for i := range dd.tags {
exclude := false
for j := range dd.excludedTags {
Expand Down Expand Up @@ -301,6 +314,14 @@ METRICLOOP:
exclude = true
break
}

}

for i := range excludeTagsPrefixByPrefixMetric {
if strings.HasPrefix(tag, excludeTagsPrefixByPrefixMetric[i]) {
exclude = true
break
}
}
if !exclude {
tags = append(tags, tag)
Expand Down
35 changes: 32 additions & 3 deletions sinks/datadog/datadog_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ func TestDatadogMetricRouting(t *testing.T) {

func TestDatadogFlushEvents(t *testing.T) {
transport := &DatadogRoundTripper{Endpoint: "/intake", Contains: ""}
ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil)
ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil, nil)
assert.NoError(t, err)

testEvent := ssf.SSFSample{
Expand Down Expand Up @@ -350,7 +350,7 @@ func TestDatadogFlushEvents(t *testing.T) {

func TestDatadogFlushOtherMetricsForServiceChecks(t *testing.T) {
transport := &DatadogRoundTripper{Endpoint: "/api/v1/check_run", Contains: ""}
ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil)
ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil, nil)
assert.NoError(t, err)

testCheck := ssf.SSFSample{
Expand All @@ -373,7 +373,7 @@ func TestDatadogFlushOtherMetricsForServiceChecks(t *testing.T) {

func TestDatadogFlushServiceCheck(t *testing.T) {
transport := &DatadogRoundTripper{Endpoint: "/api/v1/check_run", Contains: ""}
ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil)
ddSink, err := NewDatadogMetricSink(10, 2500, "example.com", []string{"gloobles:toots"}, "http://example.com", "secret", &http.Client{Transport: transport}, logrus.New(), nil, nil)
assert.NoError(t, err)

testCheck := samplers.InterMetric{
Expand Down Expand Up @@ -468,3 +468,32 @@ func TestDataDogDropMetric(t *testing.T) {
assert.Empty(t, serviceChecks, "No service check metrics are reported")
assert.Equal(t, 2, len(ddMetrics))
}

func TestDataDogDropTagsByMetricPrefix(t *testing.T) {

ddSink := DatadogMetricSink{
excludeTagsPrefixByPrefixMetric: map[string][]string{
"remove.a": []string{"tag-ab"},
},
}

testsMetricCount := []struct {
Name string
Metric samplers.InterMetric
expectedTagCount int
}{
{"Ignore dropped tags", samplers.InterMetric{Name: "foo.a.b", Tags: []string{"tag-a", "tag-ab", "tag-abc"}}, 3},
{"dropped tags", samplers.InterMetric{Name: "remove.a.b", Tags: []string{"tag-a", "tag-ab", "tag-abc"}}, 1},
{"dropped tags", samplers.InterMetric{Name: "remove.a", Tags: []string{"tag-a", "tag-ab"}}, 1},
}

for _, test := range testsMetricCount {
t.Run(test.Name, func(t *testing.T) {
metrics := []samplers.InterMetric{test.Metric}
ddMetrics, serviceChecks := ddSink.finalizeMetrics(metrics)
assert.Empty(t, serviceChecks, "No service check metrics are reported")
assert.Equal(t, test.expectedTagCount, len(ddMetrics[0].Tags))
})
}

}

0 comments on commit 00be4dd

Please sign in to comment.