Skip to content

Commit

Permalink
wire up scrape_classic_histogram for the prometheus converter (#5336)
Browse files Browse the repository at this point in the history
* wire up scrape_classic_histogram for the prometheus converter

Signed-off-by: erikbaranowski <39704712+erikbaranowski@users.noreply.github.com>

---------

Signed-off-by: erikbaranowski <39704712+erikbaranowski@users.noreply.github.com>
  • Loading branch information
erikbaranowski authored Sep 29, 2023
1 parent e3b5856 commit 9404e2f
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 26 deletions.
44 changes: 21 additions & 23 deletions converter/internal/prometheusconvert/scrape.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,6 @@ func appendPrometheusScrape(pb *prometheusBlocks, scrapeConfig *prom_config.Scra
func validatePrometheusScrape(scrapeConfig *prom_config.ScrapeConfig) diag.Diagnostics {
var diags diag.Diagnostics

if scrapeConfig.ScrapeClassicHistograms {
diags.Add(diag.SeverityLevelError, "unsupported scrape_classic_histograms for scrape_configs")
}

if scrapeConfig.NativeHistogramBucketLimit != 0 {
diags.Add(diag.SeverityLevelError, "unsupported native_histogram_bucket_limit for scrape_configs")
}
Expand All @@ -47,25 +43,27 @@ func toScrapeArguments(scrapeConfig *prom_config.ScrapeConfig, forwardTo []stora
}

return &scrape.Arguments{
Targets: targets,
ForwardTo: forwardTo,
JobName: scrapeConfig.JobName,
HonorLabels: scrapeConfig.HonorLabels,
HonorTimestamps: scrapeConfig.HonorTimestamps,
Params: scrapeConfig.Params,
ScrapeInterval: time.Duration(scrapeConfig.ScrapeInterval),
ScrapeTimeout: time.Duration(scrapeConfig.ScrapeTimeout),
MetricsPath: scrapeConfig.MetricsPath,
Scheme: scrapeConfig.Scheme,
BodySizeLimit: scrapeConfig.BodySizeLimit,
SampleLimit: scrapeConfig.SampleLimit,
TargetLimit: scrapeConfig.TargetLimit,
LabelLimit: scrapeConfig.LabelLimit,
LabelNameLengthLimit: scrapeConfig.LabelNameLengthLimit,
LabelValueLengthLimit: scrapeConfig.LabelValueLengthLimit,
HTTPClientConfig: *ToHttpClientConfig(&scrapeConfig.HTTPClientConfig),
ExtraMetrics: false,
Clustering: scrape.Clustering{Enabled: false},
Targets: targets,
ForwardTo: forwardTo,
JobName: scrapeConfig.JobName,
HonorLabels: scrapeConfig.HonorLabels,
HonorTimestamps: scrapeConfig.HonorTimestamps,
Params: scrapeConfig.Params,
ScrapeClassicHistograms: scrapeConfig.ScrapeClassicHistograms,
ScrapeInterval: time.Duration(scrapeConfig.ScrapeInterval),
ScrapeTimeout: time.Duration(scrapeConfig.ScrapeTimeout),
MetricsPath: scrapeConfig.MetricsPath,
Scheme: scrapeConfig.Scheme,
BodySizeLimit: scrapeConfig.BodySizeLimit,
SampleLimit: scrapeConfig.SampleLimit,
TargetLimit: scrapeConfig.TargetLimit,
LabelLimit: scrapeConfig.LabelLimit,
LabelNameLengthLimit: scrapeConfig.LabelNameLengthLimit,
LabelValueLengthLimit: scrapeConfig.LabelValueLengthLimit,
HTTPClientConfig: *ToHttpClientConfig(&scrapeConfig.HTTPClientConfig),
ExtraMetrics: false,
EnableProtobufNegotiation: false,
Clustering: scrape.Clustering{Enabled: false},
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
(Error) unsupported rule_files config was provided
(Error) unsupported HTTP Client config no_proxy was provided
(Error) unsupported service discovery nomad was provided
(Error) unsupported scrape_classic_histograms for scrape_configs
(Error) unsupported native_histogram_bucket_limit for scrape_configs
(Error) unsupported storage config was provided
(Error) unsupported tracing config was provided
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@ prometheus.scrape "prometheus2" {
targets = [{
__address__ = "localhost:9091",
}]
forward_to = [prometheus.remote_write.default.receiver]
job_name = "prometheus2"
forward_to = [prometheus.remote_write.default.receiver]
job_name = "prometheus2"
scrape_classic_histogram = true
}

prometheus.remote_write "default" {
Expand Down

0 comments on commit 9404e2f

Please sign in to comment.