Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow configuration of the percentile metrics to submit in Datadog API reporter #1360

Merged
merged 1 commit into from
Oct 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions reporters/kamon-datadog/src/main/resources/reference.conf
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,11 @@ kamon {
# The log level in which to log failures to submit metrics.
failure-log-level = "error"

# For histograms, which percentiles to submit.
# Each value configured here will correspond to a different custom metric submitted to Datadog.
# Currently only applicable to the API reporter.
percentiles = [95.0]

# All time values are collected in nanoseconds,
# to scale before sending to datadog set "time-units" to "s" or "ms" or "µs".
# Value "n" is equivalent to omitting the setting
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,17 @@ import java.nio.charset.StandardCharsets
import java.text.{DecimalFormat, DecimalFormatSymbols}
import java.time.Duration
import java.util.Locale

import com.typesafe.config.Config
import kamon.metric.MeasurementUnit.Dimension.{Information, Time}
import kamon.metric.{MeasurementUnit, MetricSnapshot, PeriodSnapshot}
import kamon.tag.{Tag, TagSet}
import kamon.util.{EnvironmentTags, Filter}
import kamon.{module, Kamon}
import kamon.{Kamon, module}
import kamon.datadog.DatadogAPIReporter.Configuration
import kamon.module.{MetricReporter, ModuleFactory}
import org.slf4j.LoggerFactory
import org.slf4j.event.Level

import scala.collection.JavaConverters._
import scala.util.{Failure, Success}

class DatadogAPIReporterFactory extends ModuleFactory {
Expand Down Expand Up @@ -82,6 +81,12 @@ class DatadogAPIReporter(
val interval = Math.round(Duration.between(snapshot.from, snapshot.to).toMillis() / 1000d)
val seriesBuilder = new StringBuilder()

@inline
def doubleToPercentileString(double: Double) = {
if (double == double.toLong) f"${double.toLong}%d"
else f"$double%s"
}

def addDistribution(metric: MetricSnapshot.Distributions): Unit = {
val unit = metric.settings.unit
metric.instruments.foreach { d =>
Expand All @@ -91,12 +96,14 @@ class DatadogAPIReporter(
addMetric(metric.name + ".avg", valueFormat.format(scale(average, unit)), gauge, d.tags)
addMetric(metric.name + ".count", valueFormat.format(dist.count), count, d.tags)
addMetric(metric.name + ".median", valueFormat.format(scale(dist.percentile(50d).value, unit)), gauge, d.tags)
addMetric(
metric.name + ".95percentile",
valueFormat.format(scale(dist.percentile(95d).value, unit)),
gauge,
d.tags
)
configuration.percentiles.foreach { p =>
addMetric(
metric.name + s".${doubleToPercentileString(p)}percentile",
valueFormat.format(scale(dist.percentile(p).value, unit)),
gauge,
d.tags
)
}
addMetric(metric.name + ".max", valueFormat.format(scale(dist.max, unit)), gauge, d.tags)
addMetric(metric.name + ".min", valueFormat.format(scale(dist.min, unit)), gauge, d.tags)
}
Expand Down Expand Up @@ -164,6 +171,7 @@ private object DatadogAPIReporter {

case class Configuration(
httpConfig: Config,
percentiles: Set[Double],
timeUnit: MeasurementUnit,
informationUnit: MeasurementUnit,
extraTags: Seq[(String, String)],
Expand All @@ -187,6 +195,7 @@ private object DatadogAPIReporter {

Configuration(
datadogConfig.getConfig("api"),
percentiles = datadogConfig.getDoubleList("percentiles").asScala.toList.map(_.toDouble).toSet,
timeUnit = readTimeUnit(datadogConfig.getString("time-unit")),
informationUnit = readInformationUnit(datadogConfig.getString("information-unit")),
// Remove the "host" tag since it gets added to the datadog payload separately
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,43 @@ class DatadogAPIReporterSpec extends AbstractHttpReporter with Matchers with Rec
new DatadogAPIReporterFactory().create(ModuleFactory.Settings(Kamon.config(), ExecutionContext.global))
val now = Instant.ofEpochMilli(1523395554)

val examplePeriodWithDistributions: PeriodSnapshot = {
val distributionExample = new Distribution {
override def dynamicRange: DynamicRange = ???
override def min: Long = 0
override def max: Long = 10
override def sum: Long = 100
override def count: Long = 5
override def percentile(rank: Double): Distribution.Percentile = new Percentile {
override def rank: Double = 0
override def value: Long = 0
override def countAtRank: Long = 0
}
override def percentiles: Seq[Distribution.Percentile] = ???
override def percentilesIterator: Iterator[Distribution.Percentile] = ???
override def buckets: Seq[Distribution.Bucket] = ???
override def bucketsIterator: Iterator[Distribution.Bucket] = ???
}
PeriodSnapshot.apply(
now.minusMillis(1000),
now,
Nil,
Nil,
Nil,
MetricSnapshot.ofDistributions(
"test.timer",
"test",
Metric.Settings.ForDistributionInstrument(
MeasurementUnit.none,
java.time.Duration.ZERO,
DynamicRange.Default
),
Instrument.Snapshot.apply(TagSet.Empty, distributionExample) :: Nil
) :: Nil,
Nil
)
}

"sends metrics - compressed" in {
val baseUrl = mockResponse("/test", new MockResponse().setStatus("HTTP/1.1 200 OK"))
applyConfig("kamon.datadog.api.api-url = \"" + baseUrl + "\"")
Expand Down Expand Up @@ -90,50 +127,38 @@ class DatadogAPIReporterSpec extends AbstractHttpReporter with Matchers with Rec

}

"send timer metrics" in {
"send timer metrics with the p95 percentile by default" in {
val baseUrl = mockResponse("/test", new MockResponse().setStatus("HTTP/1.1 200 OK"))
applyConfig("kamon.datadog.api.api-url = \"" + baseUrl + "\"")
applyConfig("kamon.datadog.api.api-key = \"dummy\"")
applyConfig("kamon.datadog.api.compression = false")
reporter.reconfigure(Kamon.config())

val distribution = new Distribution {
override def dynamicRange: DynamicRange = ???
override def min: Long = 0
override def max: Long = 10
override def sum: Long = 100
override def count: Long = 5
override def percentile(rank: Double): Distribution.Percentile = new Percentile {
override def rank: Double = 0
override def value: Long = 0
override def countAtRank: Long = 0
}
override def percentiles: Seq[Distribution.Percentile] = ???
override def percentilesIterator: Iterator[Distribution.Percentile] = ???
override def buckets: Seq[Distribution.Bucket] = ???
override def bucketsIterator: Iterator[Distribution.Bucket] = ???
}

reporter.reportPeriodSnapshot(
PeriodSnapshot.apply(
now.minusMillis(1000),
now,
Nil,
Nil,
Nil,
MetricSnapshot.ofDistributions(
"test.timer",
"test",
Metric.Settings.ForDistributionInstrument(
MeasurementUnit.none,
java.time.Duration.ZERO,
DynamicRange.Default
),
Instrument.Snapshot.apply(TagSet.Empty, distribution) :: Nil
) :: Nil,
Nil
reporter.reportPeriodSnapshot(examplePeriodWithDistributions)
val request = server.takeRequest()
request.getRequestUrl.toString shouldEqual baseUrl + "?api_key=dummy"
request.getMethod shouldEqual "POST"
Json.parse(request.getBody.readUtf8()) shouldEqual Json
.parse(
"""{"series":[
|{"metric":"test.timer.avg","interval":1,"points":[[1523394,20]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.count","interval":1,"points":[[1523394,5]],"type":"count","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.median","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.95percentile","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.max","interval":1,"points":[[1523394,10]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.min","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]}]}""".stripMargin
)
)
}

"send timer metrics allowing configuration of percentiles to submit" in {
val baseUrl = mockResponse("/test", new MockResponse().setStatus("HTTP/1.1 200 OK"))
applyConfig("kamon.datadog.api.api-url = \"" + baseUrl + "\"")
applyConfig("kamon.datadog.api.api-key = \"dummy\"")
applyConfig("kamon.datadog.api.compression = false")
applyConfig("kamon.datadog.percentiles = [95.0, 99, 94.5]")
reporter.reconfigure(Kamon.config())

reporter.reportPeriodSnapshot(examplePeriodWithDistributions)
val request = server.takeRequest()
request.getRequestUrl.toString shouldEqual baseUrl + "?api_key=dummy"
request.getMethod shouldEqual "POST"
Expand All @@ -144,6 +169,31 @@ class DatadogAPIReporterSpec extends AbstractHttpReporter with Matchers with Rec
|{"metric":"test.timer.count","interval":1,"points":[[1523394,5]],"type":"count","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.median","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.95percentile","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.99percentile","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.94.5percentile","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.max","interval":1,"points":[[1523394,10]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.min","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]}]}""".stripMargin
)
}

"send timer metrics without percentiles" in {
val baseUrl = mockResponse("/test", new MockResponse().setStatus("HTTP/1.1 200 OK"))
applyConfig("kamon.datadog.api.api-url = \"" + baseUrl + "\"")
applyConfig("kamon.datadog.api.api-key = \"dummy\"")
applyConfig("kamon.datadog.api.compression = false")
applyConfig("kamon.datadog.percentiles = []")
reporter.reconfigure(Kamon.config())

reporter.reportPeriodSnapshot(examplePeriodWithDistributions)
val request = server.takeRequest()
request.getRequestUrl.toString shouldEqual baseUrl + "?api_key=dummy"
request.getMethod shouldEqual "POST"
Json.parse(request.getBody.readUtf8()) shouldEqual Json
.parse(
"""{"series":[
|{"metric":"test.timer.avg","interval":1,"points":[[1523394,20]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.count","interval":1,"points":[[1523394,5]],"type":"count","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.median","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.max","interval":1,"points":[[1523394,10]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]},
|{"metric":"test.timer.min","interval":1,"points":[[1523394,0]],"type":"gauge","host":"test","tags":["env:staging","service:kamon-application"]}]}""".stripMargin
)
Expand Down