Skip to content

Commit

Permalink
vmscrapeconfig: support scrape_protocols option
Browse files Browse the repository at this point in the history
  • Loading branch information
AndrewChubatiuk committed Jul 2, 2024
1 parent 7533384 commit 0a41a70
Show file tree
Hide file tree
Showing 13 changed files with 35 additions and 14 deletions.
2 changes: 1 addition & 1 deletion api/operator/v1beta1/vmnodescrape_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ type VMNodeScrapeSpec struct {
// one of scrape_interval or interval can be used
// +optional
ScrapeInterval string `json:"scrape_interval,omitempty"`
// Timeout after which the scrape is ended
// ScrapeTimeout after which the scrape is ended
// +optional
ScrapeTimeout string `json:"scrapeTimeout,omitempty"`
// OAuth2 defines auth configuration
Expand Down
2 changes: 1 addition & 1 deletion api/operator/v1beta1/vmpodscrape_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ type PodMetricsEndpoint struct {
// one of scrape_interval or interval can be used
// +optional
ScrapeInterval string `json:"scrape_interval,omitempty"`
// Timeout after which the scrape is ended
// ScrapeTimeout after which the scrape is ended
// +optional
ScrapeTimeout string `json:"scrapeTimeout,omitempty"`
// SampleLimit defines per-podEndpoint limit on number of scraped samples that will be accepted.
Expand Down
3 changes: 3 additions & 0 deletions api/operator/v1beta1/vmscrapeconfig_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,9 @@ type VMScrapeConfigSpec struct {
// ScrapeInterval is the interval between consecutive scrapes.
// +optional
ScrapeInterval string `json:"scrapeInterval,omitempty"`
// ScrapeProtocols defines Prometheus scrape protocol.
// +optional
ScrapeProtocols []string `json:"scrape_protocols,omitempty"`
// ScrapeTimeout is the number of seconds to wait until a scrape request times out.
// +optional
ScrapeTimeout string `json:"scrapeTimeout,omitempty"`
Expand Down
2 changes: 1 addition & 1 deletion api/operator/v1beta1/vmservicescrape_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ type Endpoint struct {
// one of scrape_interval or interval can be used
// +optional
ScrapeInterval string `json:"scrape_interval,omitempty"`
// Timeout after which the scrape is ended
// ScrapeTimeout after which the scrape is ended
// +optional
ScrapeTimeout string `json:"scrapeTimeout,omitempty"`
// SampleLimit defines per-endpoint limit on number of scraped samples that will be accepted.
Expand Down
2 changes: 1 addition & 1 deletion api/operator/v1beta1/vmstaticscrape_types.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ type TargetEndpoint struct {
// one of scrape_interval or interval can be used
// +optional
ScrapeInterval string `json:"scrape_interval,omitempty"`
// Timeout after which the scrape is ended
// ScrapeTimeout after which the scrape is ended
// +optional
ScrapeTimeout string `json:"scrapeTimeout,omitempty"`
// OAuth2 defines auth configuration
Expand Down
5 changes: 5 additions & 0 deletions api/operator/v1beta1/zz_generated.deepcopy.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 9 additions & 4 deletions config/crd/overlay/crd.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16602,7 +16602,7 @@ spec:
one of scrape_interval or interval can be used
type: string
scrapeTimeout:
description: Timeout after which the scrape is ended
description: ScrapeTimeout after which the scrape is ended
type: string
selector:
description: Selector to select kubernetes Nodes.
Expand Down Expand Up @@ -17676,7 +17676,7 @@ spec:
one of scrape_interval or interval can be used
type: string
scrapeTimeout:
description: Timeout after which the scrape is ended
description: ScrapeTimeout after which the scrape is ended
type: string
seriesLimit:
description: |-
Expand Down Expand Up @@ -23134,6 +23134,11 @@ spec:
- HTTP
- HTTPS
type: string
scrape_protocols:
description: ScrapeProtocols defines Prometheus scrape protocol.
items:
type: string
type: array
scrapeInterval:
description: ScrapeInterval is the interval between consecutive scrapes.
type: string
Expand Down Expand Up @@ -24171,7 +24176,7 @@ spec:
one of scrape_interval or interval can be used
type: string
scrapeTimeout:
description: Timeout after which the scrape is ended
description: ScrapeTimeout after which the scrape is ended
type: string
seriesLimit:
description: |-
Expand Down Expand Up @@ -26924,7 +26929,7 @@ spec:
one of scrape_interval or interval can be used
type: string
scrapeTimeout:
description: Timeout after which the scrape is ended
description: ScrapeTimeout after which the scrape is ended
type: string
seriesLimit:
description: |-
Expand Down
2 changes: 2 additions & 0 deletions docs/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ aliases:
- [operator](./README.md): fix VM CRs' `xxNamespaceSelector` and `xxSelector` options, previously they are inverted. See this [issue](https://github.com/VictoriaMetrics/operator/issues/980) for details.
- [vmnodescrape](./api.md#vmnodescrape): remove duplicated `series_limit` and `sample_limit` fields in generated scrape_config. See [this issue](https://github.com/VictoriaMetrics/operator/issues/986).

- [vmservicescrape](./api.md#vmservicescrape), [vmpodscrape](./api.md#vmpodscrape), [vmnodescrape](./api.md#vmnodescrape) - added `scrape_protocols` parameter for scrape protocols configuration

<a name="v0.45.0"></a>

## [v0.45.0](https://github.com/VictoriaMetrics/operator/releases/tag/v0.45.0) - 10 Jun 2024
Expand Down
9 changes: 5 additions & 4 deletions docs/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -652,7 +652,7 @@ _Appears in:_
| `relabelConfigs` | RelabelConfigs to apply to samples during service discovery. | _[RelabelConfig](#relabelconfig) array_ | false |
| `sampleLimit` | SampleLimit defines per-endpoint limit on number of scraped samples that will be accepted. | _integer_ | false |
| `scheme` | HTTP scheme to use for scraping. | _string_ | false |
| `scrapeTimeout` | Timeout after which the scrape is ended | _string_ | false |
| `scrapeTimeout` | ScrapeTimeout after which the scrape is ended | _string_ | false |
| `scrape_interval` | ScrapeInterval is the same as Interval and has priority over it.<br />one of scrape_interval or interval can be used | _string_ | false |
| `seriesLimit` | SeriesLimit defines per-scrape limit on number of unique time series<br />a single target can expose during all the scrapes on the time window of 24h. | _integer_ | false |
| `targetPort` | Name or number of the pod port this endpoint refers to. Mutually exclusive with port. | _[IntOrString](#intorstring)_ | false |
Expand Down Expand Up @@ -1218,7 +1218,7 @@ _Appears in:_
| `relabelConfigs` | RelabelConfigs to apply to samples during service discovery. | _[RelabelConfig](#relabelconfig) array_ | false |
| `sampleLimit` | SampleLimit defines per-podEndpoint limit on number of scraped samples that will be accepted. | _integer_ | false |
| `scheme` | HTTP scheme to use for scraping. | _string_ | false |
| `scrapeTimeout` | Timeout after which the scrape is ended | _string_ | false |
| `scrapeTimeout` | ScrapeTimeout after which the scrape is ended | _string_ | false |
| `scrape_interval` | ScrapeInterval is the same as Interval and has priority over it.<br />one of scrape_interval or interval can be used | _string_ | false |
| `seriesLimit` | SeriesLimit defines per-scrape limit on number of unique time series<br />a single target can expose during all the scrapes on the time window of 24h. | _integer_ | false |
| `tlsConfig` | TLSConfig configuration to use when scraping the endpoint | _[TLSConfig](#tlsconfig)_ | false |
Expand Down Expand Up @@ -1829,7 +1829,7 @@ _Appears in:_
| `relabelConfigs` | RelabelConfigs to apply to samples during service discovery. | _[RelabelConfig](#relabelconfig) array_ | false |
| `sampleLimit` | SampleLimit defines per-scrape limit on number of scraped samples that will be accepted. | _integer_ | false |
| `scheme` | HTTP scheme to use for scraping. | _string_ | false |
| `scrapeTimeout` | Timeout after which the scrape is ended | _string_ | false |
| `scrapeTimeout` | ScrapeTimeout after which the scrape is ended | _string_ | false |
| `scrape_interval` | ScrapeInterval is the same as Interval and has priority over it.<br />one of scrape_interval or interval can be used | _string_ | false |
| `seriesLimit` | SeriesLimit defines per-scrape limit on number of unique time series<br />a single target can expose during all the scrapes on the time window of 24h. | _integer_ | false |
| `targets` | Targets static targets addresses in form of ["192.122.55.55:9100","some-name:9100"]. | _string array_ | true |
Expand Down Expand Up @@ -2782,7 +2782,7 @@ _Appears in:_
| `relabelConfigs` | RelabelConfigs to apply to samples during service discovery. | _[RelabelConfig](#relabelconfig) array_ | false |
| `sampleLimit` | SampleLimit defines per-scrape limit on number of scraped samples that will be accepted. | _integer_ | false |
| `scheme` | HTTP scheme to use for scraping. | _string_ | false |
| `scrapeTimeout` | Timeout after which the scrape is ended | _string_ | false |
| `scrapeTimeout` | ScrapeTimeout after which the scrape is ended | _string_ | false |
| `scrape_interval` | ScrapeInterval is the same as Interval and has priority over it.<br />one of scrape_interval or interval can be used | _string_ | false |
| `selector` | Selector to select kubernetes Nodes. | _[LabelSelector](https://kubernetes.io/docs/reference/generated/kubernetes-api/v/#labelselector-v1-meta)_ | false |
| `seriesLimit` | SeriesLimit defines per-scrape limit on number of unique time series<br />a single target can expose during all the scrapes on the time window of 24h. | _integer_ | false |
Expand Down Expand Up @@ -3071,6 +3071,7 @@ _Appears in:_
| `scheme` | Configures the protocol scheme used for requests.<br />If empty, use HTTP by default. | _string_ | false |
| `scrapeInterval` | ScrapeInterval is the interval between consecutive scrapes. | _string_ | false |
| `scrapeTimeout` | ScrapeTimeout is the number of seconds to wait until a scrape request times out. | _string_ | false |
| `scrape_protocols` | ScrapeProtocols defines Prometheus scrape protocol. | _string array_ | false |
| `seriesLimit` | SeriesLimit defines per-scrape limit on number of unique time series<br />a single target can expose during all the scrapes on the time window of 24h. | _integer_ | false |
| `staticConfigs` | StaticConfigs defines a list of static targets with a common label set. | _[StaticConfig](#staticconfig) array_ | false |
| `tlsConfig` | TLS configuration to use on every scrape request | _[TLSConfig](#tlsconfig)_ | false |
Expand Down
2 changes: 1 addition & 1 deletion docs/vars.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ menu:

<!-- this doc autogenerated - don't edit it manually -->
# Auto Generated vars for package config
updated at Tue Jul 2 15:08:03 UTC 2024
updated at Tue Jul 2 17:12:36 UTC 2024


| variable name | variable default value | variable required | variable description |
Expand Down
1 change: 0 additions & 1 deletion internal/controller/operator/factory/vmagent/nodescrape.go
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ func generateNodeScrapeConfig(
if scrapeInterval != "" {
cfg = append(cfg, yaml.MapItem{Key: "scrape_interval", Value: scrapeInterval})
}

if nodeSpec.ScrapeTimeout != "" {
cfg = append(cfg, yaml.MapItem{Key: "scrape_timeout", Value: nodeSpec.ScrapeTimeout})
}
Expand Down
3 changes: 3 additions & 0 deletions internal/controller/operator/factory/vmagent/scrapeconfig.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ func generateScrapeConfig(
if sc.Spec.ScrapeInterval != "" {
cfg = append(cfg, yaml.MapItem{Key: "scrape_interval", Value: scrapeInterval})
}
if len(sc.Spec.ScrapeProtocols) != 0 {
cfg = append(cfg, yaml.MapItem{Key: "scrape_protocols", Value: sc.Spec.ScrapeProtocols})
}
if sc.Spec.ScrapeTimeout != "" {
cfg = append(cfg, yaml.MapItem{Key: "scrape_timeout", Value: sc.Spec.ScrapeTimeout})
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ func TestGenerateScrapeConfig(t *testing.T) {
Username: corev1.SecretKeySelector{Key: "username"},
Password: corev1.SecretKeySelector{Key: "password"},
},
ScrapeProtocols: []string{"PrometheusProto"},
},
},
ssCache: &scrapesSecretsCache{
Expand All @@ -65,6 +66,8 @@ func TestGenerateScrapeConfig(t *testing.T) {
want: `job_name: scrapeConfig/default/static-1
honor_labels: false
scrape_interval: 30s
scrape_protocols:
- PrometheusProto
basic_auth:
username: admin
password: dangerous
Expand Down

0 comments on commit 0a41a70

Please sign in to comment.