Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: ENV expansion, source tenants scoped cache, structured logging #90

Merged
merged 1 commit into from
Oct 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]
- Fixed: Ignore white spaces around rule names in the `disabled_validation_rules` annotation CSV format (Thanks @jmichalek13 !)
- Added: Support ENV expansion in the config file in format `$ENV_VAR` or `${ENV_VAR}`
- Changed: The cache format is now scoped by source tenants (internal change, no action required)
- Changed: Logging uses structured logs now
- Added: new `httpHeaders` field in the `prometheus` section of the config to set custom headers in the Prometheus requests

## [3.3.0] - 2024-10-03
- Changed: Upgrade to go 1.23
Expand Down
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@ promruval validate --config-file ./rules/validation.yaml --config-file ./rules/p
### Configuration

Promruval uses a yaml configuration file to define the validation rules.

Basic structure is:

```yaml
Expand All @@ -159,6 +160,9 @@ prometheus:
queryOffset: 1m
# OPTIONAL how long into the past to look in queries supporting time range (just metadata queries for now).
queryLookback: 20m
# OPTIONAL HTTP headers to be added to the request
httpHeaders:
foo: bar

validationRules:
# Name of the validation rule.
Expand Down Expand Up @@ -186,6 +190,9 @@ For a complete list of supported validations see the [docs/validations.md](docs/

If you want to see example configuration see the [`examples/validation.yaml`](examples/validation.yaml).

#### ENV expansion
You can use ENV variable expansion in the configuration file. Just use the `$ENV` or `${ENV_VAR}` syntax and it will be replaced with the value of the `ENV_VAR` environment variable.

### How to run it

If you downloaded the [prebuilt binary](https://github.com/FUSAKLA/promruval/releases/latest) or built it on your own:
Expand Down
23 changes: 13 additions & 10 deletions pkg/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"fmt"
"os"
"path"
"strings"
"sync"
"time"

Expand Down Expand Up @@ -48,7 +49,7 @@ type Loader struct {
}

func (l *Loader) Load() (*Config, error) {
configFile, err := os.Open(l.ConfigPath)
configFileData, err := os.ReadFile(l.ConfigPath)
if err != nil {
return nil, fmt.Errorf("open config file: %w", err)
}
Expand All @@ -58,7 +59,8 @@ func (l *Loader) Load() (*Config, error) {
configDirMtx.Unlock()
}()
validationConfig := Config{}
decoder := yaml.NewDecoder(configFile)
expandedConfigFileData := os.ExpandEnv(string(configFileData))
decoder := yaml.NewDecoder(strings.NewReader(expandedConfigFileData))
decoder.KnownFields(true)
if err := decoder.Decode(&validationConfig); err != nil {
return nil, fmt.Errorf("loading config file: %w", err)
Expand All @@ -74,14 +76,15 @@ type Config struct {
}

type PrometheusConfig struct {
URL string `yaml:"url"`
Timeout time.Duration `yaml:"timeout" default:"30s"`
InsecureSkipTLSVerify bool `yaml:"insecureSkipTlsVerify"`
CacheFile string `yaml:"cacheFile,omitempty" default:".promruval_cache.json"`
MaxCacheAge time.Duration `yaml:"maxCacheAge,omitempty" default:"1h"`
BearerTokenFile string `yaml:"bearerTokenFile,omitempty"`
QueryOffset time.Duration `yaml:"queryOffset,omitempty" default:"1m"`
QueryLookback time.Duration `yaml:"queryLookback,omitempty" default:"20m"`
URL string `yaml:"url"`
Timeout time.Duration `yaml:"timeout" default:"30s"`
InsecureSkipTLSVerify bool `yaml:"insecureSkipTlsVerify"`
CacheFile string `yaml:"cacheFile,omitempty" default:".promruval_cache.json"`
MaxCacheAge time.Duration `yaml:"maxCacheAge,omitempty" default:"1h"`
BearerTokenFile string `yaml:"bearerTokenFile,omitempty"`
QueryOffset time.Duration `yaml:"queryOffset,omitempty" default:"1m"`
QueryLookback time.Duration `yaml:"queryLookback,omitempty" default:"20m"`
HTTPHeaders map[string]string `yaml:"httpHeaders,omitempty"`
}

func (c *PrometheusConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
Expand Down
64 changes: 44 additions & 20 deletions pkg/prometheus/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,43 +10,49 @@ import (

func newCache(file, prometheusURL string, maxAge time.Duration) *cache {
emptyCache := cache{
file: file,
PrometheusURL: prometheusURL,
Created: time.Now(),
QueriesStats: make(map[string]queryStats),
KnownLabels: []string{},
SelectorMatchingSeries: make(map[string]int),
file: file,
PrometheusURL: prometheusURL,
Created: time.Now(),
SourceTenants: make(map[string]*cacheData),
}
previousCache := emptyCache
f, err := os.Open(file)
if err != nil {
if !os.IsNotExist(err) {
f, err = os.Create(file)
if err != nil {
log.Warnf("error creating cache file %s: %s", file, err)
log.WithError(err).WithField("file", file).Warn("error creating cache file")
return &emptyCache
}
} else {
log.Warnf("error opening cache file %s, skipping: %s", file, err)
log.WithError(err).WithField("file", file).Warn("error opening cache file, skipping")
return &emptyCache
}
}
if err := json.NewDecoder(f).Decode(&previousCache); err != nil {
log.Warnf("invalid cache file `%s` format: %s", file, err)
log.WithError(err).WithField("file", file).Warn("invalid cache file format")
return &emptyCache
}
pruneCache := false
cacheAge := time.Since(previousCache.Created)
if maxAge != 0 && cacheAge > maxAge {
log.Infof("%s old cache %s is outdated, limit is %s", cacheAge, file, maxAge)
log.WithFields(log.Fields{
"cacheAge": cacheAge,
"maxCacheAge": maxAge,
"file_name": file,
}).Info("cache is outdated")
pruneCache = true
}
if previousCache.PrometheusURL != prometheusURL {
log.Infof("data in cache file %s are from different Prometheus on URL %s, cannot be used for the instance on %s URL", file, previousCache.PrometheusURL, prometheusURL)
log.WithFields(log.Fields{
"previousPrometheusURL": previousCache.PrometheusURL,
"newPrometheusURL": prometheusURL,
"file_name": file,
}).Info("data in cache file are from different Prometheus, cannot be used")
pruneCache = true
}
if pruneCache {
log.Warnf("Pruning cache file %s", file)
log.WithField("file", file).Warn("Pruning cache file")
return &emptyCache
}
return &previousCache
Expand All @@ -57,30 +63,48 @@ type queryStats struct {
Series int `json:"series"`
Duration time.Duration `json:"duration"`
}
type cache struct {
file string
PrometheusURL string `json:"prometheus_url"`
Created time.Time `json:"created"`

type cacheData struct {
QueriesStats map[string]queryStats `json:"queries_stats"`
KnownLabels []string `json:"known_labels"`
SelectorMatchingSeries map[string]int `json:"selector_matching_series"`
}
type cache struct {
file string
PrometheusURL string `json:"prometheus_url"`
Created time.Time `json:"created"`
SourceTenants map[string]*cacheData `json:"source_tenants"`
}

func (c *cache) SourceTenantsData(sourceTenants []string) *cacheData {
key := sourceTenantsToHeader(sourceTenants)
data, found := c.SourceTenants[key]
if !found {
data = &cacheData{
QueriesStats: make(map[string]queryStats),
SelectorMatchingSeries: make(map[string]int),
KnownLabels: []string{},
}
c.SourceTenants[key] = data
}
return data
}

func (c *cache) Dump() {
f, err := os.Create(c.file)
if err != nil {
log.Warnf("failed to create cache file %s: %s", c.file, err)
log.WithError(err).WithField("file", c.file).Warn("failed to create cache file")
return
}
defer func(f *os.File) {
_ = f.Close()
}(f)
e := json.NewEncoder(f)
e.SetIndent("", " ")
e.SetIndent("", "")
err = e.Encode(c)
if err != nil {
log.Warnf("failed to write cache data: %s", err)
log.WithError(err).Warn("failed to write cache data")
return
}
log.Infof("successfully dumped cache to file %s", c.file)
log.WithField("file_name", c.file).Info("successfully dumped cache to file")
}
Loading