From 03279b132abbcc28f452fb41a603a1010a007d41 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 15 Sep 2020 21:30:42 +0200 Subject: [PATCH 01/45] Adds logfmt, regexp and json logql parser Signed-off-by: Cyril Tovena --- pkg/logql/expr.y | 2 +- pkg/logql/labels_parser.go | 147 +++++++++++++++++ pkg/logql/labels_parser_test.go | 264 ++++++++++++++++++++++++++++++ pkg/logql/logfmt/decode.go | 214 ++++++++++++++++++++++++ pkg/logql/logfmt/jsonstring.go | 277 ++++++++++++++++++++++++++++++++ pkg/storage/batch.go | 5 +- 6 files changed, 907 insertions(+), 2 deletions(-) create mode 100644 pkg/logql/labels_parser.go create mode 100644 pkg/logql/labels_parser_test.go create mode 100644 pkg/logql/logfmt/decode.go create mode 100644 pkg/logql/logfmt/jsonstring.go diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index ae6de033009d2..f1bf6452a07e7 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -54,7 +54,7 @@ import ( %token DURATION %token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE SUM AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK - BYTES_OVER_TIME BYTES_RATE BOOL + BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT // Operators are listed with increasing precedence. %left OR diff --git a/pkg/logql/labels_parser.go b/pkg/logql/labels_parser.go new file mode 100644 index 0000000000000..02abe93d44dc1 --- /dev/null +++ b/pkg/logql/labels_parser.go @@ -0,0 +1,147 @@ +package logql + +import ( + "errors" + "fmt" + "regexp" + "strconv" + + "github.com/grafana/loki/pkg/logql/logfmt" + jsoniter "github.com/json-iterator/go" + "github.com/prometheus/prometheus/pkg/labels" +) + +const ( + jsonSpacer = "_" + + errJson = "JSONParserError" + errLogfmt = "LogfmtParserError" + errorLabel = "__error__" + + duplicateSuffix = "_extracted" +) + +var ( + errMissingCapture = errors.New("at least one named capture must be supplied") +) + +type LabelParser interface { + Parse(line []byte, lbs labels.Labels) labels.Labels +} + +type jsonParser struct { + builder *labels.Builder +} + +func NewJSONParser() *jsonParser { + return &jsonParser{ + builder: labels.NewBuilder(nil), + } +} + +func (j *jsonParser) Parse(line []byte, lbs labels.Labels) labels.Labels { + data := map[string]interface{}{} + j.builder.Reset(lbs) + err := jsoniter.ConfigFastest.Unmarshal(line, &data) + if err != nil { + j.builder.Set(errorLabel, errJson) + return j.builder.Labels() + } + parseMap("", data, addLabel(j.builder, lbs)) + return j.builder.Labels() +} + +func addLabel(builder *labels.Builder, lbs labels.Labels) func(key, value string) { + return func(key, value string) { + if lbs.Has(key) { + key = fmt.Sprintf("%s%s", key, duplicateSuffix) + } + builder.Set(key, value) + } +} + +func parseMap(prefix string, data map[string]interface{}, add func(key, value string)) { + for key, val := range data { + switch concrete := val.(type) { + case map[string]interface{}: + parseMap(jsonKey(prefix, key), concrete, add) + case string: + add(jsonKey(prefix, key), concrete) + case float64: + f := strconv.FormatFloat(concrete, 'f', -1, 64) + add(jsonKey(prefix, key), f) + } + } +} + +func jsonKey(prefix, key string) string { + if prefix == "" { + return key + } + return fmt.Sprintf("%s%s%s", prefix, jsonSpacer, key) +} + +type regexpParser struct { + regex *regexp.Regexp + builder *labels.Builder + nameIndex map[int]string +} + +func NewRegexpParser(re string) (*regexpParser, error) { + regex, err := regexp.Compile(re) + if err != nil { + return nil, err + } + if regex.NumSubexp() == 0 { + return nil, errMissingCapture + } + nameIndex := map[int]string{} + for i, n := range regex.SubexpNames() { + if n != "" { + nameIndex[i] = n + } + } + if len(nameIndex) == 0 { + return nil, errMissingCapture + } + return ®expParser{ + regex: regex, + builder: labels.NewBuilder(nil), + nameIndex: nameIndex, + }, nil +} + +func (r *regexpParser) Parse(line []byte, lbs labels.Labels) labels.Labels { + r.builder.Reset(lbs) + for i, value := range r.regex.FindSubmatch(line) { + if name, ok := r.nameIndex[i]; ok { + addLabel(r.builder, lbs)(name, string(value)) + } + } + return r.builder.Labels() +} + +type logfmtParser struct { + builder *labels.Builder + dec *logfmt.Decoder +} + +func NewLogfmtParser() *logfmtParser { + return &logfmtParser{ + builder: labels.NewBuilder(nil), + dec: logfmt.NewDecoder(), + } +} + +func (l *logfmtParser) Parse(line []byte, lbs labels.Labels) labels.Labels { + l.builder.Reset(lbs) + l.dec.Reset(line) + + for l.dec.ScanKeyval() { + addLabel(l.builder, lbs)(string(l.dec.Key()), string(l.dec.Value())) + } + if l.dec.Err() != nil { + l.builder.Set(errorLabel, errLogfmt) + } + return l.builder.Labels() +} diff --git a/pkg/logql/labels_parser_test.go b/pkg/logql/labels_parser_test.go new file mode 100644 index 0000000000000..f71679b2787af --- /dev/null +++ b/pkg/logql/labels_parser_test.go @@ -0,0 +1,264 @@ +package logql + +import ( + "sort" + "testing" + + "github.com/prometheus/prometheus/pkg/labels" + "github.com/stretchr/testify/require" +) + +func Test_jsonParser_Parse(t *testing.T) { + + tests := []struct { + name string + j *jsonParser + line []byte + lbs labels.Labels + want labels.Labels + }{ + { + "multi depth", + NewJSONParser(), + []byte(`{"app":"foo","namespace":"prod","pod":{"uuid":"foo","deployment":{"ref":"foobar"}}}`), + labels.Labels{}, + labels.Labels{ + labels.Label{Name: "app", Value: "foo"}, + labels.Label{Name: "namespace", Value: "prod"}, + labels.Label{Name: "pod_uuid", Value: "foo"}, + labels.Label{Name: "pod_deployment_ref", Value: "foobar"}, + }, + }, + { + "numeric", + NewJSONParser(), + []byte(`{"counter":1, "price": {"_net_":5.56909}}`), + labels.Labels{}, + labels.Labels{ + labels.Label{Name: "counter", Value: "1"}, + labels.Label{Name: "price__net_", Value: "5.56909"}, + }, + }, + { + "skip arrays", + NewJSONParser(), + []byte(`{"counter":1, "price": {"net_":["10","20"]}}`), + labels.Labels{}, + labels.Labels{ + labels.Label{Name: "counter", Value: "1"}, + }, + }, + { + "errors", + NewJSONParser(), + []byte(`{n}`), + labels.Labels{}, + labels.Labels{ + labels.Label{Name: errorLabel, Value: errJson}, + }, + }, + { + "duplicate extraction", + NewJSONParser(), + []byte(`{"app":"foo","namespace":"prod","pod":{"uuid":"foo","deployment":{"ref":"foobar"}}}`), + labels.Labels{ + labels.Label{Name: "app", Value: "bar"}, + }, + labels.Labels{ + labels.Label{Name: "app", Value: "bar"}, + labels.Label{Name: "app_extracted", Value: "foo"}, + labels.Label{Name: "namespace", Value: "prod"}, + labels.Label{Name: "pod_uuid", Value: "foo"}, + labels.Label{Name: "pod_deployment_ref", Value: "foobar"}}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + sort.Sort(tt.want) + got := tt.j.Parse(tt.line, tt.lbs) + require.Equal(t, tt.want, got) + }) + } +} + +func TestNewRegexpParser(t *testing.T) { + tests := []struct { + name string + re string + wantErr bool + }{ + {"no sub", "w.*", true}, + {"sub but not named", "f(.*) (foo|bar|buzz)", true}, + {"named and unamed", "blah (.*) (?P)", false}, + {"named", "blah (.*) (?Pfoo)(?Pbarr)", false}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := NewRegexpParser(tt.re) + if (err != nil) != tt.wantErr { + t.Errorf("NewRegexpParser() error = %v, wantErr %v", err, tt.wantErr) + return + } + }) + } +} + +func Test_regexpParser_Parse(t *testing.T) { + tests := []struct { + name string + parser *regexpParser + line []byte + lbs labels.Labels + want labels.Labels + }{ + { + "no matches", + mustNewRegexParser("(?Pfoo|bar)buzz"), + []byte("blah"), + labels.Labels{ + labels.Label{Name: "app", Value: "foo"}, + }, + labels.Labels{ + labels.Label{Name: "app", Value: "foo"}, + }, + }, + { + "double matches", + mustNewRegexParser("(?P.*)buzz"), + []byte("matchebuzz barbuzz"), + labels.Labels{ + labels.Label{Name: "app", Value: "bar"}, + }, + labels.Labels{ + labels.Label{Name: "app", Value: "bar"}, + labels.Label{Name: "foo", Value: "matchebuzz bar"}, + }, + }, + { + "duplicate labels", + mustNewRegexParser("(?Pbar)buzz"), + []byte("barbuzz"), + labels.Labels{ + labels.Label{Name: "bar", Value: "foo"}, + }, + labels.Labels{ + labels.Label{Name: "bar", Value: "foo"}, + labels.Label{Name: "bar_extracted", Value: "bar"}, + }, + }, + { + "multiple labels extracted", + mustNewRegexParser("status=(?P\\w+),latency=(?P\\w+)(ms|ns)"), + []byte("status=200,latency=500ms"), + labels.Labels{ + labels.Label{Name: "app", Value: "foo"}, + }, + labels.Labels{ + labels.Label{Name: "app", Value: "foo"}, + labels.Label{Name: "status", Value: "200"}, + labels.Label{Name: "latency", Value: "500"}, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + sort.Sort(tt.want) + got := tt.parser.Parse(tt.line, tt.lbs) + require.Equal(t, tt.want, got) + }) + } +} + +func mustNewRegexParser(re string) *regexpParser { + r, err := NewRegexpParser(re) + if err != nil { + panic(err) + } + return r +} + +func Test_logfmtParser_Parse(t *testing.T) { + tests := []struct { + name string + line []byte + lbs labels.Labels + want labels.Labels + }{ + { + "not logfmt", + []byte("foobar====wqe=sdad1r"), + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + }, + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + labels.Label{Name: errorLabel, Value: errLogfmt}, + }, + }, + { + "key alone logfmt", + []byte("buzz bar=foo"), + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + }, + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + labels.Label{Name: "bar", Value: "foo"}, + }, + }, + { + "quoted logfmt", + []byte(`foobar="foo bar"`), + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + }, + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + labels.Label{Name: "foobar", Value: "foo bar"}, + }, + }, + { + "double property logfmt", + []byte(`foobar="foo bar" latency=10ms`), + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + }, + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + labels.Label{Name: "foobar", Value: "foo bar"}, + labels.Label{Name: "latency", Value: "10ms"}, + }, + }, + { + "duplicate from line property", + []byte(`foobar="foo bar" foobar=10ms`), + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + }, + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + labels.Label{Name: "foobar", Value: "10ms"}, + }, + }, + { + "duplicate property", + []byte(`foo="foo bar" foobar=10ms`), + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + }, + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + labels.Label{Name: "foo_extracted", Value: "foo bar"}, + labels.Label{Name: "foobar", Value: "10ms"}, + }, + }, + } + p := NewLogfmtParser() + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + sort.Sort(tt.want) + got := p.Parse(tt.line, tt.lbs) + require.Equal(t, tt.want, got) + }) + } +} diff --git a/pkg/logql/logfmt/decode.go b/pkg/logql/logfmt/decode.go new file mode 100644 index 0000000000000..65cff00d1e8d0 --- /dev/null +++ b/pkg/logql/logfmt/decode.go @@ -0,0 +1,214 @@ +// Adapted from https://github.com/go-logfmt/logfmt/ but []byte as parameter instead +package logfmt + +import ( + "bytes" + "fmt" + "unicode/utf8" +) + +// A Decoder reads and decodes logfmt records from an input stream. +type Decoder struct { + pos int + key []byte + value []byte + line []byte + err error +} + +// NewDecoder returns a new decoder that reads from r. +// +// The decoder introduces its own buffering and may read data from r beyond +// the logfmt records requested. +func NewDecoder() *Decoder { + dec := &Decoder{} + return dec +} + +func (dec *Decoder) Reset(line []byte) { + dec.pos = 0 + dec.line = line + dec.err = nil +} + +// ScanKeyval advances the Decoder to the next key/value pair of the current +// record, which can then be retrieved with the Key and Value methods. It +// returns false when decoding stops, either by reaching the end of the +// current record or an error. +func (dec *Decoder) ScanKeyval() bool { + dec.key, dec.value = nil, nil + + line := dec.line + // garbage + for p, c := range line[dec.pos:] { + if c > ' ' { + dec.pos += p + goto key + } + } + dec.pos = len(line) + return false + +key: + const invalidKeyError = "invalid key" + + start, multibyte := dec.pos, false + for p, c := range line[dec.pos:] { + switch { + case c == '=': + dec.pos += p + if dec.pos > start { + dec.key = line[start:dec.pos] + if multibyte && bytes.ContainsRune(dec.key, utf8.RuneError) { + dec.syntaxError(invalidKeyError) + return false + } + } + if dec.key == nil { + dec.unexpectedByte(c) + return false + } + goto equal + case c == '"': + dec.pos += p + dec.unexpectedByte(c) + return false + case c <= ' ': + dec.pos += p + if dec.pos > start { + dec.key = line[start:dec.pos] + if multibyte && bytes.ContainsRune(dec.key, utf8.RuneError) { + dec.syntaxError(invalidKeyError) + return false + } + } + return true + case c >= utf8.RuneSelf: + multibyte = true + } + } + dec.pos = len(line) + if dec.pos > start { + dec.key = line[start:dec.pos] + if multibyte && bytes.ContainsRune(dec.key, utf8.RuneError) { + dec.syntaxError(invalidKeyError) + return false + } + } + return true + +equal: + dec.pos++ + if dec.pos >= len(line) { + return true + } + switch c := line[dec.pos]; { + case c <= ' ': + return true + case c == '"': + goto qvalue + } + + // value + start = dec.pos + for p, c := range line[dec.pos:] { + switch { + case c == '=' || c == '"': + dec.pos += p + dec.unexpectedByte(c) + return false + case c <= ' ': + dec.pos += p + if dec.pos > start { + dec.value = line[start:dec.pos] + } + return true + } + } + dec.pos = len(line) + if dec.pos > start { + dec.value = line[start:dec.pos] + } + return true + +qvalue: + const ( + untermQuote = "unterminated quoted value" + invalidQuote = "invalid quoted value" + ) + + hasEsc, esc := false, false + start = dec.pos + for p, c := range line[dec.pos+1:] { + switch { + case esc: + esc = false + case c == '\\': + hasEsc, esc = true, true + case c == '"': + dec.pos += p + 2 + if hasEsc { + v, ok := unquoteBytes(line[start:dec.pos]) + if !ok { + dec.syntaxError(invalidQuote) + return false + } + dec.value = v + } else { + start++ + end := dec.pos - 1 + if end > start { + dec.value = line[start:end] + } + } + return true + } + } + dec.pos = len(line) + dec.syntaxError(untermQuote) + return false +} + +// Key returns the most recent key found by a call to ScanKeyval. The returned +// slice may point to internal buffers and is only valid until the next call +// to ScanRecord. It does no allocation. +func (dec *Decoder) Key() []byte { + return dec.key +} + +// Value returns the most recent value found by a call to ScanKeyval. The +// returned slice may point to internal buffers and is only valid until the +// next call to ScanRecord. It does no allocation when the value has no +// escape sequences. +func (dec *Decoder) Value() []byte { + return dec.value +} + +// Err returns the first non-EOF error that was encountered by the Scanner. +func (dec *Decoder) Err() error { + return dec.err +} + +func (dec *Decoder) syntaxError(msg string) { + dec.err = &SyntaxError{ + Msg: msg, + Pos: dec.pos + 1, + } +} + +func (dec *Decoder) unexpectedByte(c byte) { + dec.err = &SyntaxError{ + Msg: fmt.Sprintf("unexpected %q", c), + Pos: dec.pos + 1, + } +} + +// A SyntaxError represents a syntax error in the logfmt input stream. +type SyntaxError struct { + Msg string + Pos int +} + +func (e *SyntaxError) Error() string { + return fmt.Sprintf("logfmt syntax error at pos %d : %s", e.Pos, e.Msg) +} diff --git a/pkg/logql/logfmt/jsonstring.go b/pkg/logql/logfmt/jsonstring.go new file mode 100644 index 0000000000000..030ac85fcc2e9 --- /dev/null +++ b/pkg/logql/logfmt/jsonstring.go @@ -0,0 +1,277 @@ +package logfmt + +import ( + "bytes" + "io" + "strconv" + "sync" + "unicode" + "unicode/utf16" + "unicode/utf8" +) + +// Taken from Go's encoding/json and modified for use here. + +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +var hex = "0123456789abcdef" + +var bufferPool = sync.Pool{ + New: func() interface{} { + return &bytes.Buffer{} + }, +} + +func getBuffer() *bytes.Buffer { + return bufferPool.Get().(*bytes.Buffer) +} + +func poolBuffer(buf *bytes.Buffer) { + buf.Reset() + bufferPool.Put(buf) +} + +// NOTE: keep in sync with writeQuotedBytes below. +func writeQuotedString(w io.Writer, s string) (int, error) { + buf := getBuffer() + buf.WriteByte('"') + start := 0 + for i := 0; i < len(s); { + if b := s[i]; b < utf8.RuneSelf { + if 0x20 <= b && b != '\\' && b != '"' { + i++ + continue + } + if start < i { + buf.WriteString(s[start:i]) + } + switch b { + case '\\', '"': + buf.WriteByte('\\') + buf.WriteByte(b) + case '\n': + buf.WriteByte('\\') + buf.WriteByte('n') + case '\r': + buf.WriteByte('\\') + buf.WriteByte('r') + case '\t': + buf.WriteByte('\\') + buf.WriteByte('t') + default: + // This encodes bytes < 0x20 except for \n, \r, and \t. + buf.WriteString(`\u00`) + buf.WriteByte(hex[b>>4]) + buf.WriteByte(hex[b&0xF]) + } + i++ + start = i + continue + } + c, size := utf8.DecodeRuneInString(s[i:]) + if c == utf8.RuneError { + if start < i { + buf.WriteString(s[start:i]) + } + buf.WriteString(`\ufffd`) + i += size + start = i + continue + } + i += size + } + if start < len(s) { + buf.WriteString(s[start:]) + } + buf.WriteByte('"') + n, err := w.Write(buf.Bytes()) + poolBuffer(buf) + return n, err +} + +// NOTE: keep in sync with writeQuoteString above. +func writeQuotedBytes(w io.Writer, s []byte) (int, error) { + buf := getBuffer() + buf.WriteByte('"') + start := 0 + for i := 0; i < len(s); { + if b := s[i]; b < utf8.RuneSelf { + if 0x20 <= b && b != '\\' && b != '"' { + i++ + continue + } + if start < i { + buf.Write(s[start:i]) + } + switch b { + case '\\', '"': + buf.WriteByte('\\') + buf.WriteByte(b) + case '\n': + buf.WriteByte('\\') + buf.WriteByte('n') + case '\r': + buf.WriteByte('\\') + buf.WriteByte('r') + case '\t': + buf.WriteByte('\\') + buf.WriteByte('t') + default: + // This encodes bytes < 0x20 except for \n, \r, and \t. + buf.WriteString(`\u00`) + buf.WriteByte(hex[b>>4]) + buf.WriteByte(hex[b&0xF]) + } + i++ + start = i + continue + } + c, size := utf8.DecodeRune(s[i:]) + if c == utf8.RuneError { + if start < i { + buf.Write(s[start:i]) + } + buf.WriteString(`\ufffd`) + i += size + start = i + continue + } + i += size + } + if start < len(s) { + buf.Write(s[start:]) + } + buf.WriteByte('"') + n, err := w.Write(buf.Bytes()) + poolBuffer(buf) + return n, err +} + +// getu4 decodes \uXXXX from the beginning of s, returning the hex value, +// or it returns -1. +func getu4(s []byte) rune { + if len(s) < 6 || s[0] != '\\' || s[1] != 'u' { + return -1 + } + r, err := strconv.ParseUint(string(s[2:6]), 16, 64) + if err != nil { + return -1 + } + return rune(r) +} + +func unquoteBytes(s []byte) (t []byte, ok bool) { + if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' { + return + } + s = s[1 : len(s)-1] + + // Check for unusual characters. If there are none, + // then no unquoting is needed, so return a slice of the + // original bytes. + r := 0 + for r < len(s) { + c := s[r] + if c == '\\' || c == '"' || c < ' ' { + break + } + if c < utf8.RuneSelf { + r++ + continue + } + rr, size := utf8.DecodeRune(s[r:]) + if rr == utf8.RuneError { + break + } + r += size + } + if r == len(s) { + return s, true + } + + b := make([]byte, len(s)+2*utf8.UTFMax) + w := copy(b, s[0:r]) + for r < len(s) { + // Out of room? Can only happen if s is full of + // malformed UTF-8 and we're replacing each + // byte with RuneError. + if w >= len(b)-2*utf8.UTFMax { + nb := make([]byte, (len(b)+utf8.UTFMax)*2) + copy(nb, b[0:w]) + b = nb + } + switch c := s[r]; { + case c == '\\': + r++ + if r >= len(s) { + return + } + switch s[r] { + default: + return + case '"', '\\', '/', '\'': + b[w] = s[r] + r++ + w++ + case 'b': + b[w] = '\b' + r++ + w++ + case 'f': + b[w] = '\f' + r++ + w++ + case 'n': + b[w] = '\n' + r++ + w++ + case 'r': + b[w] = '\r' + r++ + w++ + case 't': + b[w] = '\t' + r++ + w++ + case 'u': + r-- + rr := getu4(s[r:]) + if rr < 0 { + return + } + r += 6 + if utf16.IsSurrogate(rr) { + rr1 := getu4(s[r:]) + if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar { + // A valid pair; consume. + r += 6 + w += utf8.EncodeRune(b[w:], dec) + break + } + // Invalid surrogate; fall back to replacement rune. + rr = unicode.ReplacementChar + } + w += utf8.EncodeRune(b[w:], rr) + } + + // Quote, control characters are invalid. + case c == '"', c < ' ': + return + + // ASCII + case c < utf8.RuneSelf: + b[w] = c + r++ + w++ + + // Coerce to well-formed UTF-8. + default: + rr, size := utf8.DecodeRune(s[r:]) + r += size + w += utf8.EncodeRune(b[w:], rr) + } + } + return b[0:w], true +} diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index f0d9387bcd1bc..fdc8464cab231 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -440,6 +440,8 @@ func (it *logBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, through iterators[i], iterators[j] = iterators[j], iterators[i] } } + // TODO(cyriltovena): Setting labels here is wrong now as labels can be different within the same chunk due to + // label extraction feature. result = append(result, iter.NewNonOverlappingIterator(iterators, labels)) } @@ -537,7 +539,8 @@ func (it *sampleBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, thro } iterators = append(iterators, iterator) } - + // TODO(cyriltovena): Setting labels here is wrong now as labels can be different within the same chunk due to + // label extraction feature. result = append(result, iter.NewNonOverlappingSampleIterator(iterators, labels)) } From 31c2f3f5988943eaf39bcc9fd193722a093090a2 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 16 Sep 2020 15:08:51 +0200 Subject: [PATCH 02/45] hook the ast with parsers. Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 63 +++++ pkg/logql/ast_test.go | 42 ++- pkg/logql/expr.y | 11 +- pkg/logql/expr.y.go | 436 ++++++++++++++++++++------------ pkg/logql/labels_parser.go | 15 ++ pkg/logql/labels_parser_test.go | 8 - pkg/logql/lex.go | 6 + 7 files changed, 408 insertions(+), 173 deletions(-) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 9d6cf2261c4ff..4a779b9b9feb0 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -75,6 +75,7 @@ type Querier interface { type LogSelectorExpr interface { Filter() (LineFilter, error) Matchers() []*labels.Matcher + Parser() (LabelParser, error) Expr } @@ -108,6 +109,10 @@ func (e *matchersExpr) Filter() (LineFilter, error) { return nil, nil } +func (e *matchersExpr) Parser() (LabelParser, error) { + return NoopLabelParser, nil +} + type filterExpr struct { left LogSelectorExpr ty labels.MatchType @@ -167,6 +172,58 @@ func (e *filterExpr) Filter() (LineFilter, error) { return f, nil } +func (e *filterExpr) Parser() (LabelParser, error) { + return NoopLabelParser, nil +} + +type parserExpr struct { + left LogSelectorExpr + op string + param string + implicit +} + +func newParserExpr(left LogSelectorExpr, op, param string) LogSelectorExpr { + // todo(cyriltovena): we might want to pre-validate param here to fail fast. + return &parserExpr{ + left: left, + op: op, + param: param, + } +} + +func (e *parserExpr) Matchers() []*labels.Matcher { + return e.left.Matchers() +} + +func (e *parserExpr) Filter() (LineFilter, error) { + return e.left.Filter() +} + +func (e *parserExpr) Parser() (LabelParser, error) { + switch e.op { + case OpParserTypeJSON: + return NewJSONParser(), nil + case OpParserTypeLogfmt: + return NewLogfmtParser(), nil + case OpParserTypeRegexp: + return NewRegexpParser(e.param) + default: + return nil, fmt.Errorf("unknown parser operator: %s", e.op) + } +} + +func (e *parserExpr) String() string { + var sb strings.Builder + sb.WriteString(e.left.String()) + sb.WriteString("|") + sb.WriteString(e.op) + if e.param != "" { + sb.WriteString(strconv.Quote(e.param)) + } + return sb.String() +} + func mustNewMatcher(t labels.MatchType, n, v string) *labels.Matcher { m, err := labels.NewMatcher(t, n, v) if err != nil { @@ -242,6 +299,11 @@ const ( OpTypeGTE = ">=" OpTypeLT = "<" OpTypeLTE = "<=" + + // parsers + OpParserTypeJSON = "json" + OpParserTypeLogfmt = "logfmt" + OpParserTypeRegexp = "regexp" ) func IsComparisonOperator(op string) bool { @@ -505,6 +567,7 @@ func (e *literalExpr) Selector() LogSelectorExpr { return e } func (e *literalExpr) Operations() []string { return nil } func (e *literalExpr) Filter() (LineFilter, error) { return nil, nil } func (e *literalExpr) Matchers() []*labels.Matcher { return nil } +func (e *literalExpr) Parser() (LabelParser, error) { return NoopLabelParser, nil } func (e *literalExpr) Extractor() (SampleExtractor, error) { return nil, nil } // helper used to impl Stringer for vector and range aggregations diff --git a/pkg/logql/ast_test.go b/pkg/logql/ast_test.go index 79b79053a44bf..5d4de102b22e3 100644 --- a/pkg/logql/ast_test.go +++ b/pkg/logql/ast_test.go @@ -4,10 +4,9 @@ import ( "strings" "testing" + "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - - "github.com/prometheus/prometheus/pkg/labels" ) func Test_logSelectorExpr_String(t *testing.T) { @@ -25,6 +24,9 @@ func Test_logSelectorExpr_String(t *testing.T) { {`{foo="bar", bar!="baz"} |~ ".*"`, false}, {`{foo="bar", bar!="baz"} |= "" |= ""`, false}, {`{foo="bar", bar!="baz"} |~ "" |= "" |~ ".*"`, false}, + {`{foo="bar", bar!="baz"} != "bip" !~ ".+bop" | json`, true}, + {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | logfmt`, true}, + {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | regexp "(?Pfoo|bar)"`, true}, } for _, tt := range tests { @@ -54,6 +56,9 @@ func Test_SampleExpr_String(t *testing.T) { `sum without(a) ( rate ( ( {job="mysql"} |="error" !="timeout" ) [10s] ) )`, `sum by(a) (rate( ( {job="mysql"} |="error" !="timeout" ) [10s] ) )`, `sum(count_over_time({job="mysql"}[5m]))`, + `sum(count_over_time({job="mysql"} | json [5m]))`, + `sum(count_over_time({job="mysql"} | logfmt [5m]))`, + `sum(count_over_time({job="mysql"} | regexp "(?Pfoo|bar)" [5m]))`, `topk(10,sum(rate({region="us-east1"}[5m])) by (name))`, `avg( rate( ( {job="nginx"} |= "GET" ) [10s] ) ) by (region)`, `sum by (cluster) (count_over_time({job="mysql"}[5m]))`, @@ -249,3 +254,36 @@ func BenchmarkContainsFilter(b *testing.B) { } } } + +func Test_parserExpr_Parser(t *testing.T) { + tests := []struct { + name string + op string + param string + want LabelParser + wantErr bool + }{ + {"json", OpParserTypeJSON, "", NewJSONParser(), false}, + {"logfmt", OpParserTypeLogfmt, "", NewLogfmtParser(), false}, + {"regexp", OpParserTypeRegexp, "(?Pfoo)", mustNewRegexParser("(?Pfoo)"), false}, + {"regexp err ", OpParserTypeRegexp, "foo", nil, true}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + e := &parserExpr{ + op: tt.op, + param: tt.param, + } + got, err := e.Parser() + if (err != nil) != tt.wantErr { + t.Errorf("parserExpr.Parser() error = %v, wantErr %v", err, tt.wantErr) + return + } + if tt.wantErr { + require.Nil(t, got) + } else { + require.Equal(t, tt.want, got) + } + }) + } +} diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index f1bf6452a07e7..9223fe8fd02b2 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -28,6 +28,7 @@ import ( duration time.Duration LiteralExpr *literalExpr BinOpModifier BinOpOptions + LabelParser struct{ op, param string} } %start root @@ -49,12 +50,13 @@ import ( %type binOpExpr %type literalExpr %type binOpModifier +%type labelparser %token IDENTIFIER STRING NUMBER %token DURATION %token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE SUM AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK - BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT + BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE // Operators are listed with increasing precedence. %left OR @@ -84,6 +86,7 @@ metricExpr: logExpr: selector { $$ = newMatcherExpr($1)} | logExpr filter STRING { $$ = NewFilterExpr( $1, $2, $3 ) } + | logExpr labelparser { $$ = newParserExpr($1, $2.op, $2.param) } | OPEN_PARENTHESIS logExpr CLOSE_PARENTHESIS { $$ = $2 } | logExpr filter error | logExpr error @@ -194,4 +197,10 @@ grouping: BY OPEN_PARENTHESIS labels CLOSE_PARENTHESIS { $$ = &grouping{ without: false , groups: $3 } } | WITHOUT OPEN_PARENTHESIS labels CLOSE_PARENTHESIS { $$ = &grouping{ without: true , groups: $3 } } ; + +labelparser: + PIPE JSON { $$ = struct{ op, param string}{ op: OpParserTypeJSON} } + | PIPE LOGFMT { $$ = struct{ op, param string}{ op: OpParserTypeLogfmt} } + | PIPE REGEXP STRING { $$ = struct{ op, param string}{ op: OpParserTypeRegexp, param: $3} } + ; %% diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index d8264e82f1619..364aa6b9097b0 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -1,15 +1,18 @@ // Code generated by goyacc -p expr -o pkg/logql/expr.y.go pkg/logql/expr.y. DO NOT EDIT. +//line pkg/logql/expr.y:2 package logql import __yyfmt__ "fmt" +//line pkg/logql/expr.y:2 import ( "github.com/prometheus/prometheus/pkg/labels" "time" ) +//line pkg/logql/expr.y:10 type exprSymType struct { yys int Expr Expr @@ -32,6 +35,7 @@ type exprSymType struct { duration time.Duration LiteralExpr *literalExpr BinOpModifier BinOpOptions + LabelParser struct{ op, param string } } const IDENTIFIER = 57346 @@ -69,21 +73,25 @@ const TOPK = 57377 const BYTES_OVER_TIME = 57378 const BYTES_RATE = 57379 const BOOL = 57380 -const OR = 57381 -const AND = 57382 -const UNLESS = 57383 -const CMP_EQ = 57384 -const NEQ = 57385 -const LT = 57386 -const LTE = 57387 -const GT = 57388 -const GTE = 57389 -const ADD = 57390 -const SUB = 57391 -const MUL = 57392 -const DIV = 57393 -const MOD = 57394 -const POW = 57395 +const JSON = 57381 +const REGEXP = 57382 +const LOGFMT = 57383 +const PIPE = 57384 +const OR = 57385 +const AND = 57386 +const UNLESS = 57387 +const CMP_EQ = 57388 +const NEQ = 57389 +const LT = 57390 +const LTE = 57391 +const GT = 57392 +const GTE = 57393 +const ADD = 57394 +const SUB = 57395 +const MUL = 57396 +const DIV = 57397 +const MOD = 57398 +const POW = 57399 var exprToknames = [...]string{ "$end", @@ -124,6 +132,10 @@ var exprToknames = [...]string{ "BYTES_OVER_TIME", "BYTES_RATE", "BOOL", + "JSON", + "REGEXP", + "LOGFMT", + "PIPE", "OR", "AND", "UNLESS", @@ -146,7 +158,9 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 +//line pkg/logql/expr.y:206 +//line yacctab:1 var exprExca = [...]int{ -1, 1, 1, -1, @@ -154,154 +168,157 @@ var exprExca = [...]int{ -1, 3, 1, 2, 22, 2, - 39, 2, - 40, 2, - 41, 2, - 42, 2, + 43, 2, 44, 2, 45, 2, 46, 2, - 47, 2, 48, 2, 49, 2, 50, 2, 51, 2, 52, 2, 53, 2, + 54, 2, + 55, 2, + 56, 2, + 57, 2, -2, 0, - -1, 52, - 39, 2, - 40, 2, - 41, 2, - 42, 2, + -1, 54, + 43, 2, 44, 2, 45, 2, 46, 2, - 47, 2, 48, 2, 49, 2, 50, 2, 51, 2, 52, 2, 53, 2, + 54, 2, + 55, 2, + 56, 2, + 57, 2, -2, 0, } const exprPrivate = 57344 -const exprLast = 279 +const exprLast = 277 var exprAct = [...]int{ - 60, 4, 45, 130, 56, 3, 93, 38, 51, 53, - 2, 66, 52, 30, 31, 32, 39, 40, 43, 44, + 62, 4, 45, 136, 58, 3, 98, 38, 53, 55, + 2, 68, 54, 30, 31, 32, 39, 40, 43, 44, 41, 42, 33, 34, 35, 36, 37, 38, 14, 33, 34, 35, 36, 37, 38, 11, 35, 36, 37, 38, - 89, 91, 92, 6, 61, 62, 142, 17, 18, 21, + 85, 87, 86, 6, 63, 64, 148, 17, 18, 21, 22, 24, 25, 23, 26, 27, 28, 29, 19, 20, - 59, 96, 61, 62, 94, 138, 127, 139, 11, 139, - 15, 16, 141, 90, 140, 101, 95, 102, 103, 104, - 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, - 115, 83, 116, 100, 99, 98, 121, 58, 128, 88, - 129, 125, 126, 64, 132, 31, 32, 39, 40, 43, - 44, 41, 42, 33, 34, 35, 36, 37, 38, 134, - 63, 120, 133, 86, 46, 97, 10, 143, 136, 121, - 137, 119, 11, 82, 49, 85, 81, 118, 87, 117, - 6, 47, 48, 144, 17, 18, 21, 22, 24, 25, - 23, 26, 27, 28, 29, 19, 20, 65, 55, 131, - 57, 57, 9, 13, 8, 50, 5, 15, 16, 39, - 40, 43, 44, 41, 42, 33, 34, 35, 36, 37, - 38, 12, 7, 54, 1, 0, 0, 0, 0, 67, - 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, - 78, 79, 80, 46, 0, 0, 0, 122, 124, 0, - 0, 0, 122, 49, 0, 0, 0, 49, 0, 0, - 47, 48, 49, 84, 47, 48, 0, 135, 46, 47, - 48, 0, 123, 124, 0, 0, 0, 46, 49, 0, - 0, 0, 0, 0, 50, 47, 48, 49, 50, 0, - 0, 0, 0, 50, 47, 48, 0, 84, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 50, - 0, 0, 0, 0, 0, 0, 0, 0, 50, + 144, 133, 61, 101, 63, 64, 99, 94, 96, 97, + 88, 145, 105, 145, 15, 16, 147, 106, 146, 107, + 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, + 118, 119, 120, 104, 11, 103, 60, 122, 93, 134, + 140, 127, 100, 139, 95, 135, 131, 132, 66, 138, + 31, 32, 39, 40, 43, 44, 41, 42, 33, 34, + 35, 36, 37, 38, 84, 91, 65, 83, 57, 126, + 59, 102, 125, 124, 142, 127, 143, 90, 11, 123, + 92, 121, 149, 137, 59, 46, 6, 67, 10, 150, + 17, 18, 21, 22, 24, 25, 23, 26, 27, 28, + 29, 19, 20, 39, 40, 43, 44, 41, 42, 33, + 34, 35, 36, 37, 38, 9, 13, 15, 16, 69, + 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, + 80, 81, 82, 47, 8, 5, 12, 47, 130, 47, + 7, 56, 130, 50, 1, 0, 0, 50, 128, 50, + 48, 49, 128, 89, 48, 49, 48, 49, 50, 89, + 0, 0, 50, 0, 0, 48, 49, 0, 141, 48, + 49, 47, 129, 52, 0, 0, 0, 52, 51, 52, + 0, 50, 51, 0, 51, 0, 0, 0, 48, 49, + 0, 0, 0, 51, 0, 0, 0, 51, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 52, 0, 0, 0, 0, 51, } var exprPact = [...]int{ - 22, -1000, -26, 122, -1000, -1000, 22, -1000, -1000, -1000, - -1000, 156, 76, 39, -1000, 114, 97, -1000, -1000, -1000, + 22, -1000, -30, 229, -1000, -1000, 22, -1000, -1000, -1000, + -1000, 126, 75, 41, -1000, 120, 102, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -27, -27, -27, -27, -27, -27, -27, -27, -27, -27, - -27, -27, -27, -27, -27, 131, -1000, -1000, -1000, -1000, - -1000, 69, 235, -26, 121, 85, -1000, 30, 55, 119, - 74, 73, 72, -1000, -1000, 22, -1000, 22, 22, 22, + -27, -27, -27, -27, -27, 122, -1000, -1000, -1000, -1000, + -1000, -1000, 1, 48, 197, -30, 123, 84, -1000, 57, + 81, 125, 74, 72, 51, -1000, -1000, 22, -1000, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, - 22, -1000, -1000, -1000, -1000, -1000, -1000, 157, -1000, 134, - 132, 126, 116, 210, 226, 55, 44, 81, 22, 155, - 155, 65, 127, 127, -14, -14, -46, -46, -46, -46, - -19, -19, -19, -19, -19, -19, -1000, -1000, -1000, -1000, - -1000, 117, -1000, -1000, -1000, 201, 205, 21, 22, 43, - 52, -1000, 50, -1000, -1000, -1000, -1000, 24, -1000, 123, - -1000, -1000, 21, -1000, -1000, + 22, 22, 22, -1000, -1000, -1000, -1000, 136, -1000, -1000, + -1000, -1000, 140, -1000, 134, 128, 127, 124, 210, 195, + 81, 39, 82, 22, 139, 139, 66, 117, 117, -18, + -18, -50, -50, -50, -50, -23, -23, -23, -23, -23, + -23, -1000, -1000, -1000, -1000, -1000, -1000, 98, -1000, -1000, + -1000, 191, 206, 21, 22, 38, 56, -1000, 54, -1000, + -1000, -1000, -1000, 24, -1000, 138, -1000, -1000, 21, -1000, + -1000, } var exprPgo = [...]int{ - 0, 184, 9, 2, 0, 3, 5, 1, 6, 4, - 183, 182, 181, 166, 164, 163, 162, 126, 157, + 0, 204, 9, 2, 0, 3, 5, 1, 6, 4, + 201, 200, 196, 195, 194, 176, 175, 148, 147, 145, } var exprR1 = [...]int{ 0, 1, 2, 2, 7, 7, 7, 7, 7, 6, - 6, 6, 6, 6, 8, 8, 8, 8, 8, 11, - 14, 14, 14, 14, 14, 3, 3, 3, 3, 13, - 13, 13, 10, 10, 9, 9, 9, 9, 16, 16, + 6, 6, 6, 6, 6, 8, 8, 8, 8, 8, + 11, 14, 14, 14, 14, 14, 3, 3, 3, 3, + 13, 13, 13, 10, 10, 9, 9, 9, 9, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 18, 18, 17, 17, 17, 15, 15, - 15, 15, 15, 15, 15, 15, 15, 12, 12, 12, - 12, 5, 5, 4, 4, + 16, 16, 16, 16, 18, 18, 17, 17, 17, 15, + 15, 15, 15, 15, 15, 15, 15, 15, 12, 12, + 12, 12, 5, 5, 4, 4, 19, 19, 19, } var exprR2 = [...]int{ 0, 1, 1, 1, 1, 1, 1, 1, 3, 1, - 3, 3, 3, 2, 2, 3, 3, 3, 2, 4, - 4, 5, 5, 6, 7, 1, 1, 1, 1, 3, - 3, 3, 1, 3, 3, 3, 3, 3, 4, 4, + 3, 2, 3, 3, 2, 2, 3, 3, 3, 2, + 4, 4, 5, 5, 6, 7, 1, 1, 1, 1, + 3, 3, 3, 1, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 0, 1, 1, 2, 2, 1, 1, + 4, 4, 4, 4, 0, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 3, 4, 4, + 1, 1, 1, 3, 4, 4, 2, 2, 3, } var exprChk = [...]int{ -1000, -1, -2, -6, -7, -13, 21, -11, -14, -16, - -17, 13, -12, -15, 6, 48, 49, 25, 26, 36, + -17, 13, -12, -15, 6, 52, 53, 25, 26, 36, 37, 27, 28, 31, 29, 30, 32, 33, 34, 35, - 39, 40, 41, 48, 49, 50, 51, 52, 53, 42, - 43, 46, 47, 44, 45, -3, 2, 19, 20, 12, - 43, -7, -6, -2, -10, 2, -9, 4, 21, 21, - -4, 23, 24, 6, 6, -18, 38, -18, -18, -18, + 43, 44, 45, 52, 53, 54, 55, 56, 57, 46, + 47, 50, 51, 48, 49, -3, -19, 2, 19, 20, + 12, 47, 42, -7, -6, -2, -10, 2, -9, 4, + 21, 21, -4, 23, 24, 6, 6, -18, 38, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, - -18, 5, 2, 22, 22, 14, 2, 17, 14, 10, - 43, 11, 12, -8, -6, 21, -7, 6, 21, 21, - 21, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, -2, -2, -2, -2, -9, 5, 5, 5, - 5, -3, 2, 22, 7, -6, -8, 22, 17, -7, - -5, 4, -5, 5, 2, 22, -4, -7, 22, 17, - 22, 22, 22, 4, -4, + -18, -18, -18, 5, 2, 39, 41, 40, 22, 22, + 14, 2, 17, 14, 10, 47, 11, 12, -8, -6, + 21, -7, 6, 21, 21, 21, -2, -2, -2, -2, + -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, + -2, 5, -9, 5, 5, 5, 5, -3, 2, 22, + 7, -6, -8, 22, 17, -7, -5, 4, -5, 5, + 2, 22, -4, -7, 22, 17, 22, 22, 22, 4, + -4, } var exprDef = [...]int{ 0, -2, 1, -2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 55, 0, 0, 67, 68, 69, - 70, 58, 59, 60, 61, 62, 63, 64, 65, 66, - 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, - 53, 53, 53, 53, 53, 0, 13, 25, 26, 27, - 28, 3, -2, 0, 0, 0, 32, 0, 0, 0, - 0, 0, 0, 56, 57, 0, 54, 0, 0, 0, + 7, 0, 0, 0, 56, 0, 0, 68, 69, 70, + 71, 59, 60, 61, 62, 63, 64, 65, 66, 67, + 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, + 54, 54, 54, 54, 54, 0, 11, 14, 26, 27, + 28, 29, 0, 3, -2, 0, 0, 0, 33, 0, + 0, 0, 0, 0, 0, 57, 58, 0, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 10, 12, 8, 11, 29, 30, 0, 31, 0, - 0, 0, 0, 0, 0, 0, 3, 55, 0, 0, - 0, 38, 39, 40, 41, 42, 43, 44, 45, 46, - 47, 48, 49, 50, 51, 52, 33, 34, 35, 36, - 37, 0, 18, 19, 14, 0, 0, 20, 0, 3, - 0, 71, 0, 15, 17, 16, 22, 3, 21, 0, - 73, 74, 23, 72, 24, + 0, 0, 0, 10, 13, 76, 77, 0, 8, 12, + 30, 31, 0, 32, 0, 0, 0, 0, 0, 0, + 0, 3, 56, 0, 0, 0, 39, 40, 41, 42, + 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, + 53, 78, 34, 35, 36, 37, 38, 0, 19, 20, + 15, 0, 0, 21, 0, 3, 0, 72, 0, 16, + 18, 17, 23, 3, 22, 0, 74, 75, 24, 73, + 25, } var exprTok1 = [...]int{ @@ -314,7 +331,7 @@ var exprTok2 = [...]int{ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, + 52, 53, 54, 55, 56, 57, } var exprTok3 = [...]int{ 0, @@ -326,6 +343,7 @@ var exprErrorMessages = [...]struct { msg string }{} +//line yaccpar:1 /* parser for yacc output */ @@ -658,353 +676,447 @@ exprdefault: case 1: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:71 { exprlex.(*lexer).expr = exprDollar[1].Expr } case 2: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:74 { exprVAL.Expr = exprDollar[1].LogExpr } case 3: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:75 { exprVAL.Expr = exprDollar[1].MetricExpr } case 4: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:79 { exprVAL.MetricExpr = exprDollar[1].RangeAggregationExpr } case 5: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:80 { exprVAL.MetricExpr = exprDollar[1].VectorAggregationExpr } case 6: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:81 { exprVAL.MetricExpr = exprDollar[1].BinOpExpr } case 7: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:82 { exprVAL.MetricExpr = exprDollar[1].LiteralExpr } case 8: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:83 { exprVAL.MetricExpr = exprDollar[2].MetricExpr } case 9: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:87 { exprVAL.LogExpr = newMatcherExpr(exprDollar[1].Selector) } case 10: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:88 { exprVAL.LogExpr = NewFilterExpr(exprDollar[1].LogExpr, exprDollar[2].Filter, exprDollar[3].str) } case 11: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:89 + { + exprVAL.LogExpr = newParserExpr(exprDollar[1].LogExpr, exprDollar[2].LabelParser.op, exprDollar[2].LabelParser.param) + } + case 12: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:90 { exprVAL.LogExpr = exprDollar[2].LogExpr } - case 14: + case 15: exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:96 { exprVAL.LogRangeExpr = newLogRange(exprDollar[1].LogExpr, exprDollar[2].duration) } - case 15: + case 16: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:97 { exprVAL.LogRangeExpr = addFilterToLogRangeExpr(exprDollar[1].LogRangeExpr, exprDollar[2].Filter, exprDollar[3].str) } - case 16: + case 17: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:98 { exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr } - case 19: + case 20: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:103 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp) } - case 20: + case 21: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:107 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } - case 21: + case 22: exprDollar = exprS[exprpt-5 : exprpt+1] +//line pkg/logql/expr.y:108 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } - case 22: + case 23: exprDollar = exprS[exprpt-5 : exprpt+1] +//line pkg/logql/expr.y:109 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } - case 23: + case 24: exprDollar = exprS[exprpt-6 : exprpt+1] +//line pkg/logql/expr.y:111 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } - case 24: + case 25: exprDollar = exprS[exprpt-7 : exprpt+1] +//line pkg/logql/expr.y:112 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } - case 25: + case 26: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:116 { exprVAL.Filter = labels.MatchRegexp } - case 26: + case 27: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:117 { exprVAL.Filter = labels.MatchEqual } - case 27: + case 28: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:118 { exprVAL.Filter = labels.MatchNotRegexp } - case 28: + case 29: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:119 { exprVAL.Filter = labels.MatchNotEqual } - case 29: + case 30: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:123 { exprVAL.Selector = exprDollar[2].Matchers } - case 30: + case 31: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:124 { exprVAL.Selector = exprDollar[2].Matchers } - case 31: + case 32: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:125 { } - case 32: + case 33: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:129 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } - case 33: + case 34: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:130 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } - case 34: + case 35: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:134 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } - case 35: + case 36: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:135 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } - case 36: + case 37: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:136 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } - case 37: + case 38: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:137 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } - case 38: + case 39: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:143 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 39: + case 40: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:144 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 40: + case 41: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:145 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 41: + case 42: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:146 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 42: + case 43: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:147 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 43: + case 44: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:148 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 44: + case 45: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:149 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 45: + case 46: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:150 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 46: + case 47: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:151 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 47: + case 48: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:152 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 48: + case 49: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:153 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 49: + case 50: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:154 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 50: + case 51: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:155 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 51: + case 52: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:156 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 52: + case 53: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:157 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 53: + case 54: exprDollar = exprS[exprpt-0 : exprpt+1] +//line pkg/logql/expr.y:161 { exprVAL.BinOpModifier = BinOpOptions{} } - case 54: + case 55: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:162 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } - case 55: + case 56: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:166 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 56: + case 57: exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:167 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 57: + case 58: exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:168 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 58: + case 59: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:172 { exprVAL.VectorOp = OpTypeSum } - case 59: + case 60: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:173 { exprVAL.VectorOp = OpTypeAvg } - case 60: + case 61: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:174 { exprVAL.VectorOp = OpTypeCount } - case 61: + case 62: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:175 { exprVAL.VectorOp = OpTypeMax } - case 62: + case 63: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:176 { exprVAL.VectorOp = OpTypeMin } - case 63: + case 64: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:177 { exprVAL.VectorOp = OpTypeStddev } - case 64: + case 65: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:178 { exprVAL.VectorOp = OpTypeStdvar } - case 65: + case 66: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:179 { exprVAL.VectorOp = OpTypeBottomK } - case 66: + case 67: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:180 { exprVAL.VectorOp = OpTypeTopK } - case 67: + case 68: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:184 { exprVAL.RangeOp = OpRangeTypeCount } - case 68: + case 69: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:185 { exprVAL.RangeOp = OpRangeTypeRate } - case 69: + case 70: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:186 { exprVAL.RangeOp = OpRangeTypeBytes } - case 70: + case 71: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:187 { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 71: + case 72: exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:192 { exprVAL.Labels = []string{exprDollar[1].str} } - case 72: + case 73: exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:193 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 73: + case 74: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:197 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } - case 74: + case 75: exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:198 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } + case 76: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:202 + { + exprVAL.LabelParser = struct{ op, param string }{op: OpParserTypeJSON} + } + case 77: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:203 + { + exprVAL.LabelParser = struct{ op, param string }{op: OpParserTypeLogfmt} + } + case 78: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:204 + { + exprVAL.LabelParser = struct{ op, param string }{op: OpParserTypeRegexp, param: exprDollar[3].str} + } } goto exprstack /* stack new state and value */ } diff --git a/pkg/logql/labels_parser.go b/pkg/logql/labels_parser.go index 02abe93d44dc1..ae79a742e2232 100644 --- a/pkg/logql/labels_parser.go +++ b/pkg/logql/labels_parser.go @@ -23,12 +23,19 @@ const ( var ( errMissingCapture = errors.New("at least one named capture must be supplied") + NoopLabelParser = noopParser{} ) type LabelParser interface { Parse(line []byte, lbs labels.Labels) labels.Labels } +type noopParser struct{} + +func (noopParser) Parse(_ []byte, lbs labels.Labels) labels.Labels { + return lbs +} + type jsonParser struct { builder *labels.Builder } @@ -111,6 +118,14 @@ func NewRegexpParser(re string) (*regexpParser, error) { }, nil } +func mustNewRegexParser(re string) *regexpParser { + r, err := NewRegexpParser(re) + if err != nil { + panic(err) + } + return r +} + func (r *regexpParser) Parse(line []byte, lbs labels.Labels) labels.Labels { r.builder.Reset(lbs) for i, value := range r.regex.FindSubmatch(line) { diff --git a/pkg/logql/labels_parser_test.go b/pkg/logql/labels_parser_test.go index f71679b2787af..595cdebfc498f 100644 --- a/pkg/logql/labels_parser_test.go +++ b/pkg/logql/labels_parser_test.go @@ -169,14 +169,6 @@ func Test_regexpParser_Parse(t *testing.T) { } } -func mustNewRegexParser(re string) *regexpParser { - r, err := NewRegexpParser(re) - if err != nil { - panic(err) - } - return r -} - func Test_logfmtParser_Parse(t *testing.T) { tests := []struct { name string diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go index 4b3bfc88210ed..768a14d173778 100644 --- a/pkg/logql/lex.go +++ b/pkg/logql/lex.go @@ -19,6 +19,7 @@ var tokens = map[string]int{ "!~": NRE, "|=": PIPE_EXACT, "|~": PIPE_MATCH, + "|": PIPE, "(": OPEN_PARENTHESIS, ")": CLOSE_PARENTHESIS, "by": BY, @@ -56,6 +57,11 @@ var tokens = map[string]int{ OpTypeGTE: GTE, OpTypeLT: LT, OpTypeLTE: LTE, + + // parsers + OpParserTypeJSON: JSON, + OpParserTypeRegexp: REGEXP, + OpParserTypeLogfmt: LOGFMT, } type lexer struct { From eaf72bd57782ed277ef6516e957ddf3d2b5060f6 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 17 Sep 2020 06:32:05 +0200 Subject: [PATCH 03/45] hook parser with memchunk. Signed-off-by: Cyril Tovena --- pkg/chunkenc/dumb_chunk.go | 5 +- pkg/chunkenc/interface.go | 9 ++- pkg/chunkenc/memchunk.go | 148 ++++++++++++++++++---------------- pkg/chunkenc/memchunk_test.go | 38 ++++----- 4 files changed, 104 insertions(+), 96 deletions(-) diff --git a/pkg/chunkenc/dumb_chunk.go b/pkg/chunkenc/dumb_chunk.go index a831ccbc34c19..eb811d0b2956d 100644 --- a/pkg/chunkenc/dumb_chunk.go +++ b/pkg/chunkenc/dumb_chunk.go @@ -8,6 +8,7 @@ import ( "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" + "github.com/prometheus/prometheus/pkg/labels" ) const ( @@ -68,7 +69,7 @@ func (c *dumbChunk) Utilization() float64 { // Returns an iterator that goes from _most_ recent to _least_ recent (ie, // backwards). -func (c *dumbChunk) Iterator(_ context.Context, from, through time.Time, direction logproto.Direction, _ logql.LineFilter) (iter.EntryIterator, error) { +func (c *dumbChunk) Iterator(_ context.Context, from, through time.Time, direction logproto.Direction, _ labels.Labels, _ logql.LineFilter, _ logql.LabelParser) (iter.EntryIterator, error) { i := sort.Search(len(c.entries), func(i int) bool { return !from.After(c.entries[i].Timestamp) }) @@ -93,7 +94,7 @@ func (c *dumbChunk) Iterator(_ context.Context, from, through time.Time, directi }, nil } -func (c *dumbChunk) SampleIterator(_ context.Context, from, through time.Time, _ logql.LineFilter, _ logql.SampleExtractor) iter.SampleIterator { +func (c *dumbChunk) SampleIterator(_ context.Context, from, through time.Time, _ labels.Labels, _ logql.LineFilter, _ logql.SampleExtractor, _ logql.LabelParser) iter.SampleIterator { return nil } diff --git a/pkg/chunkenc/interface.go b/pkg/chunkenc/interface.go index f989ee296b651..7722065e0ee7f 100644 --- a/pkg/chunkenc/interface.go +++ b/pkg/chunkenc/interface.go @@ -10,6 +10,7 @@ import ( "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" + "github.com/prometheus/prometheus/pkg/labels" ) // Errors returned by the chunk interface. @@ -98,8 +99,8 @@ type Chunk interface { Bounds() (time.Time, time.Time) SpaceFor(*logproto.Entry) bool Append(*logproto.Entry) error - Iterator(ctx context.Context, from, through time.Time, direction logproto.Direction, filter logql.LineFilter) (iter.EntryIterator, error) - SampleIterator(ctx context.Context, from, through time.Time, filter logql.LineFilter, extractor logql.SampleExtractor) iter.SampleIterator + Iterator(ctx context.Context, mintT, maxtT time.Time, direction logproto.Direction, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) (iter.EntryIterator, error) + SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator // Returns the list of blocks in the chunks. Blocks(mintT, maxtT time.Time) []Block Size() int @@ -122,7 +123,7 @@ type Block interface { // Entries is the amount of entries in the block. Entries() int // Iterator returns an entry iterator for the block. - Iterator(context.Context, logql.LineFilter) iter.EntryIterator + Iterator(ctx context.Context, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) iter.EntryIterator // SampleIterator returns a sample iterator for the block. - SampleIterator(context.Context, logql.LineFilter, logql.SampleExtractor) iter.SampleIterator + SampleIterator(ctx context.Context, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator } diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index 6a96dc5f530ff..917171bfe2402 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -15,6 +15,7 @@ import ( "github.com/cortexproject/cortex/pkg/util" "github.com/go-kit/kit/log/level" "github.com/pkg/errors" + "github.com/prometheus/prometheus/pkg/labels" "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" @@ -475,7 +476,7 @@ func (c *MemChunk) Bounds() (fromT, toT time.Time) { } // Iterator implements Chunk. -func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, direction logproto.Direction, filter logql.LineFilter) (iter.EntryIterator, error) { +func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, direction logproto.Direction, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) (iter.EntryIterator, error) { mint, maxt := mintT.UnixNano(), maxtT.UnixNano() its := make([]iter.EntryIterator, 0, len(c.blocks)+1) @@ -483,11 +484,11 @@ func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, directi if maxt < b.mint || b.maxt < mint { continue } - its = append(its, b.Iterator(ctx, filter)) + its = append(its, b.Iterator(ctx, lbs, filter, parser)) } if !c.head.isEmpty() { - its = append(its, c.head.iterator(ctx, mint, maxt, filter)) + its = append(its, c.head.iterator(ctx, direction, mint, maxt, lbs, filter, parser)) } iterForward := iter.NewTimeRangedIterator( @@ -504,19 +505,19 @@ func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, directi } // Iterator implements Chunk. -func (c *MemChunk) SampleIterator(ctx context.Context, mintT, maxtT time.Time, filter logql.LineFilter, extractor logql.SampleExtractor) iter.SampleIterator { - mint, maxt := mintT.UnixNano(), maxtT.UnixNano() +func (c *MemChunk) SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator { + mint, maxt := from.UnixNano(), through.UnixNano() its := make([]iter.SampleIterator, 0, len(c.blocks)+1) for _, b := range c.blocks { if maxt < b.mint || b.maxt < mint { continue } - its = append(its, b.SampleIterator(ctx, filter, extractor)) + its = append(its, b.SampleIterator(ctx, lbs, filter, extractor, parser)) } if !c.head.isEmpty() { - its = append(its, c.head.sampleIterator(ctx, mint, maxt, filter, extractor)) + its = append(its, c.head.sampleIterator(ctx, mint, maxt, lbs, filter, extractor, parser)) } return iter.NewTimeRangedSampleIterator( @@ -539,18 +540,18 @@ func (c *MemChunk) Blocks(mintT, maxtT time.Time) []Block { return blocks } -func (b block) Iterator(ctx context.Context, filter logql.LineFilter) iter.EntryIterator { +func (b block) Iterator(ctx context.Context, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) iter.EntryIterator { if len(b.b) == 0 { - return emptyIterator + return iter.NoopIterator } - return newEntryIterator(ctx, b.readers, b.b, filter) + return newEntryIterator(ctx, b.readers, b.b, lbs, filter, parser) } -func (b block) SampleIterator(ctx context.Context, filter logql.LineFilter, extractor logql.SampleExtractor) iter.SampleIterator { +func (b block) SampleIterator(ctx context.Context, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator { if len(b.b) == 0 { return iter.NoopIterator } - return newSampleIterator(ctx, b.readers, b.b, filter, extractor) + return newSampleIterator(ctx, b.readers, b.b, lbs, filter, extractor, parser) } func (b block) Offset() int { @@ -567,9 +568,9 @@ func (b block) MaxTime() int64 { return b.maxt } -func (hb *headBlock) iterator(ctx context.Context, mint, maxt int64, filter logql.LineFilter) iter.EntryIterator { +func (hb *headBlock) iterator(ctx context.Context, direction logproto.Direction, mint, maxt int64, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) iter.EntryIterator { if hb.isEmpty() || (maxt < hb.mint || hb.maxt < mint) { - return emptyIterator + return iter.NoopIterator } chunkStats := stats.GetChunkData(ctx) @@ -579,36 +580,61 @@ func (hb *headBlock) iterator(ctx context.Context, mint, maxt int64, filter logq // but the tradeoff is that queries to near-realtime data would be much lower than // cutting of blocks. chunkStats.HeadChunkLines += int64(len(hb.entries)) - entries := make([]entry, 0, len(hb.entries)) + streams := map[uint64]*logproto.Stream{} for _, e := range hb.entries { chunkStats.HeadChunkBytes += int64(len(e.s)) - if filter == nil || filter.Filter([]byte(e.s)) { - entries = append(entries, e) + line := []byte(e.s) + if filter == nil || filter.Filter(line) { + parsedLbs := parser.Parse(line, lbs) + var ok bool + var stream *logproto.Stream + lhash := parsedLbs.Hash() + if stream, ok = streams[lhash]; !ok { + stream = &logproto.Stream{ + Labels: parsedLbs.String(), + } + streams[lhash] = stream + } + stream.Entries = append(stream.Entries, logproto.Entry{ + Timestamp: time.Unix(0, e.t), + Line: e.s, + }) } } - if len(entries) == 0 { - return emptyIterator + if len(streams) == 0 { + return iter.NoopIterator } - - return &listIterator{ - entries: entries, - cur: -1, + streamsResult := make([]logproto.Stream, 0, len(streams)) + for _, stream := range streams { + streamsResult = append(streamsResult, *stream) } + return iter.NewStreamsIterator(ctx, streamsResult, direction) } -func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, filter logql.LineFilter, extractor logql.SampleExtractor) iter.SampleIterator { +func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator { if hb.isEmpty() || (maxt < hb.mint || hb.maxt < mint) { return iter.NoopIterator } chunkStats := stats.GetChunkData(ctx) chunkStats.HeadChunkLines += int64(len(hb.entries)) - samples := make([]logproto.Sample, 0, len(hb.entries)) + series := map[uint64]*logproto.Series{} for _, e := range hb.entries { chunkStats.HeadChunkBytes += int64(len(e.s)) - if filter == nil || filter.Filter([]byte(e.s)) { + line := []byte(e.s) + if filter == nil || filter.Filter(line) { + parsedLbs := parser.Parse(line, lbs) if value, ok := extractor.Extract([]byte(e.s)); ok { - samples = append(samples, logproto.Sample{ + var ok bool + var s *logproto.Series + lhash := parsedLbs.Hash() + if s, ok = series[lhash]; !ok { + s = &logproto.Series{ + Labels: parsedLbs.String(), + } + series[lhash] = s + } + s.Samples = append(s.Samples, logproto.Sample{ Timestamp: e.t, Value: value, Hash: xxhash.Sum64([]byte(e.s)), @@ -618,43 +644,16 @@ func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, filte } } - if len(samples) == 0 { + if len(series) == 0 { return iter.NoopIterator } - - return iter.NewSeriesIterator(logproto.Series{Samples: samples}) -} - -var emptyIterator = &listIterator{} - -type listIterator struct { - entries []entry - cur int -} - -func (li *listIterator) Next() bool { - li.cur++ - - return li.cur < len(li.entries) -} - -func (li *listIterator) Entry() logproto.Entry { - if li.cur < 0 || li.cur >= len(li.entries) { - return logproto.Entry{} - } - - cur := li.entries[li.cur] - - return logproto.Entry{ - Timestamp: time.Unix(0, cur.t), - Line: cur.s, + seriesRes := make([]logproto.Series, 0, len(series)) + for _, s := range series { + seriesRes = append(seriesRes, *s) } + return iter.NewMultiSeriesIterator(ctx, seriesRes) } -func (li *listIterator) Error() error { return nil } -func (li *listIterator) Close() error { return nil } -func (li *listIterator) Labels() string { return "" } - type bufferedIterator struct { origBytes []byte stats *stats.ChunkData @@ -665,18 +664,21 @@ type bufferedIterator struct { err error - decBuf []byte // The buffer for decoding the lengths. - buf []byte // The buffer for a single entry. - currLine []byte // the current line, this is the same as the buffer but sliced the the line size. - currTs int64 - consumed bool + decBuf []byte // The buffer for decoding the lengths. + buf []byte // The buffer for a single entry. + currLine []byte // the current line, this is the same as the buffer but sliced the the line size. + currTs int64 + currLabels string + consumed bool closed bool - filter logql.LineFilter + baseLbs labels.Labels + parser logql.LabelParser + filter logql.LineFilter } -func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, filter logql.LineFilter) *bufferedIterator { +func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) *bufferedIterator { chunkStats := stats.GetChunkData(ctx) chunkStats.CompressedBytes += int64(len(b)) return &bufferedIterator{ @@ -688,6 +690,8 @@ func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, filter filter: filter, decBuf: make([]byte, binary.MaxVarintLen64), consumed: true, + baseLbs: lbs, + parser: parser, } } @@ -713,6 +717,8 @@ func (si *bufferedIterator) Next() bool { si.currTs = ts si.currLine = line si.consumed = false + // todo(cyriltovena) add cache for building the string of labels via some sort of decode context. + si.currLabels = si.parser.Parse(line, si.baseLbs).String() return true } } @@ -797,11 +803,11 @@ func (si *bufferedIterator) close() { si.decBuf = nil } -func (si *bufferedIterator) Labels() string { return "" } +func (si *bufferedIterator) Labels() string { return si.currLabels } -func newEntryIterator(ctx context.Context, pool ReaderPool, b []byte, filter logql.LineFilter) iter.EntryIterator { +func newEntryIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) iter.EntryIterator { return &entryBufferedIterator{ - bufferedIterator: newBufferedIterator(ctx, pool, b, filter), + bufferedIterator: newBufferedIterator(ctx, pool, b, lbs, filter, parser), } } @@ -819,9 +825,9 @@ func (e *entryBufferedIterator) Entry() logproto.Entry { return e.cur } -func newSampleIterator(ctx context.Context, pool ReaderPool, b []byte, filter logql.LineFilter, extractor logql.SampleExtractor) iter.SampleIterator { +func newSampleIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator { it := &sampleBufferedIterator{ - bufferedIterator: newBufferedIterator(ctx, pool, b, filter), + bufferedIterator: newBufferedIterator(ctx, pool, b, lbs, filter, parser), extractor: extractor, } return it diff --git a/pkg/chunkenc/memchunk_test.go b/pkg/chunkenc/memchunk_test.go index 18b94f29d30c7..ddb71cabf0f5d 100644 --- a/pkg/chunkenc/memchunk_test.go +++ b/pkg/chunkenc/memchunk_test.go @@ -113,7 +113,7 @@ func TestBlock(t *testing.T) { } } - it, err := chk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil) + it, err := chk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, nil, logql.NoopLabelParser) require.NoError(t, err) idx := 0 @@ -128,7 +128,7 @@ func TestBlock(t *testing.T) { require.NoError(t, it.Close()) require.Equal(t, len(cases), idx) - sampleIt := chk.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, logql.ExtractCount) + sampleIt := chk.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, nil, logql.ExtractCount, logql.NoopLabelParser) idx = 0 for sampleIt.Next() { s := sampleIt.Sample() @@ -143,7 +143,7 @@ func TestBlock(t *testing.T) { require.Equal(t, len(cases), idx) t.Run("bounded-iteration", func(t *testing.T) { - it, err := chk.Iterator(context.Background(), time.Unix(0, 3), time.Unix(0, 7), logproto.FORWARD, nil) + it, err := chk.Iterator(context.Background(), time.Unix(0, 3), time.Unix(0, 7), logproto.FORWARD, nil, nil, logql.NoopLabelParser) require.NoError(t, err) idx := 2 @@ -176,7 +176,7 @@ func TestReadFormatV1(t *testing.T) { t.Fatal(err) } - it, err := r.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil) + it, err := r.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, nil, logql.NoopLabelParser) if err != nil { t.Fatal(err) } @@ -203,7 +203,7 @@ func TestRoundtripV2(t *testing.T) { assertLines := func(c *MemChunk) { require.Equal(t, enc, c.Encoding()) - it, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil) + it, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, nil, logql.NoopLabelParser) if err != nil { t.Fatal(err) } @@ -265,7 +265,7 @@ func TestSerialization(t *testing.T) { bc, err := NewByteChunk(byt, testBlockSize, testTargetSize) require.NoError(t, err) - it, err := bc.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil) + it, err := bc.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, nil, logql.NoopLabelParser) require.NoError(t, err) for i := 0; i < numSamples; i++ { require.True(t, it.Next()) @@ -276,7 +276,7 @@ func TestSerialization(t *testing.T) { } require.NoError(t, it.Error()) - sampleIt := bc.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, logql.ExtractCount) + sampleIt := bc.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, nil, logql.ExtractCount, logql.NoopLabelParser) for i := 0; i < numSamples; i++ { require.True(t, sampleIt.Next(), i) @@ -319,7 +319,7 @@ func TestChunkFilling(t *testing.T) { require.Equal(t, int64(lines), i) - it, err := chk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, 100), logproto.FORWARD, nil) + it, err := chk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, 100), logproto.FORWARD, nil, nil, logql.NoopLabelParser) require.NoError(t, err) i = 0 for it.Next() { @@ -462,7 +462,7 @@ func TestChunkStats(t *testing.T) { expectedSize := (inserted * len(entry.Line)) + (inserted * 2 * binary.MaxVarintLen64) ctx := stats.NewContext(context.Background()) - it, err := c.Iterator(ctx, first.Add(-time.Hour), entry.Timestamp.Add(time.Hour), logproto.BACKWARD, logql.LineFilterFunc(func(line []byte) bool { return false })) + it, err := c.Iterator(ctx, first.Add(-time.Hour), entry.Timestamp.Add(time.Hour), logproto.BACKWARD, nil, logql.LineFilterFunc(func(line []byte) bool { return false }), logql.NoopLabelParser) if err != nil { t.Fatal(err) } @@ -491,7 +491,7 @@ func TestChunkStats(t *testing.T) { t.Fatal(err) } ctx = stats.NewContext(context.Background()) - it, err = cb.Iterator(ctx, first.Add(-time.Hour), entry.Timestamp.Add(time.Hour), logproto.BACKWARD, logql.LineFilterFunc(func(line []byte) bool { return false })) + it, err = cb.Iterator(ctx, first.Add(-time.Hour), entry.Timestamp.Add(time.Hour), logproto.BACKWARD, nil, logql.LineFilterFunc(func(line []byte) bool { return false }), logql.NoopLabelParser) if err != nil { t.Fatal(err) } @@ -539,7 +539,7 @@ func TestIteratorClose(t *testing.T) { } { c := NewMemChunk(enc, testBlockSize, testTargetSize) inserted := fillChunk(c) - iter, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, inserted), logproto.BACKWARD, nil) + iter, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, inserted), logproto.BACKWARD, nil, nil, logql.NoopLabelParser) if err != nil { t.Fatal(err) } @@ -590,7 +590,7 @@ func BenchmarkRead(b *testing.B) { for n := 0; n < b.N; n++ { for _, c := range chunks { // use forward iterator for benchmark -- backward iterator does extra allocations by keeping entries in memory - iterator, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Now(), logproto.FORWARD, nil) + iterator, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Now(), logproto.FORWARD, nil, nil, logql.NoopLabelParser) if err != nil { panic(err) } @@ -615,7 +615,7 @@ func BenchmarkBackwardIterator(b *testing.B) { _ = fillChunk(c) b.ResetTimer() for n := 0; n < b.N; n++ { - iterator, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Now(), logproto.BACKWARD, nil) + iterator, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Now(), logproto.BACKWARD, nil, nil, logql.NoopLabelParser) if err != nil { panic(err) } @@ -636,9 +636,9 @@ func TestGenerateDataSize(t *testing.T) { bytesRead := uint64(0) for _, c := range chunks { // use forward iterator for benchmark -- backward iterator does extra allocations by keeping entries in memory - iterator, err := c.Iterator(context.TODO(), time.Unix(0, 0), time.Now(), logproto.FORWARD, logql.LineFilterFunc(func(line []byte) bool { + iterator, err := c.Iterator(context.TODO(), time.Unix(0, 0), time.Now(), logproto.FORWARD, nil, logql.LineFilterFunc(func(line []byte) bool { return true // return all - })) + }), logql.NoopLabelParser) if err != nil { panic(err) } @@ -672,7 +672,7 @@ func BenchmarkHeadBlockIterator(b *testing.B) { b.ResetTimer() for n := 0; n < b.N; n++ { - iter := h.iterator(context.Background(), 0, math.MaxInt64, nil) + iter := h.iterator(context.Background(), logproto.BACKWARD, 0, math.MaxInt64, nil, nil, logql.NoopLabelParser) for iter.Next() { _ = iter.Entry() @@ -731,7 +731,7 @@ func TestMemChunk_IteratorBounds(t *testing.T) { c := createChunk() // testing headchunk - it, err := c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil) + it, err := c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil, nil, logql.NoopLabelParser) require.NoError(t, err) for i := range tt.expect { require.Equal(t, tt.expect[i], it.Next()) @@ -740,7 +740,7 @@ func TestMemChunk_IteratorBounds(t *testing.T) { // testing chunk blocks require.NoError(t, c.cut()) - it, err = c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil) + it, err = c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil, nil, logql.NoopLabelParser) require.NoError(t, err) for i := range tt.expect { require.Equal(t, tt.expect[i], it.Next()) @@ -759,7 +759,7 @@ func TestMemchunkLongLine(t *testing.T) { for i := 1; i <= 10; i++ { require.NoError(t, c.Append(&logproto.Entry{Timestamp: time.Unix(0, int64(i)), Line: strings.Repeat("e", 200000)})) } - it, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, 100), logproto.FORWARD, nil) + it, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, 100), logproto.FORWARD, nil, nil, logql.NoopLabelParser) require.NoError(t, err) for i := 1; i <= 10; i++ { require.True(t, it.Next()) From 7d1dc7bf74cb8948613f6c0d1ecb11ad200d4b92 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 17 Sep 2020 07:18:09 +0200 Subject: [PATCH 04/45] hook parser with the storage. Signed-off-by: Cyril Tovena --- pkg/chunkenc/dumb_chunk.go | 2 +- pkg/storage/batch.go | 26 ++++++++++---------------- pkg/storage/batch_test.go | 8 +++----- pkg/storage/lazy_chunk.go | 13 +++++++++---- pkg/storage/lazy_chunk_test.go | 17 ++++++++++------- pkg/storage/store.go | 29 +++++++++++++++++------------ pkg/storage/store_test.go | 2 +- 7 files changed, 51 insertions(+), 46 deletions(-) diff --git a/pkg/chunkenc/dumb_chunk.go b/pkg/chunkenc/dumb_chunk.go index eb811d0b2956d..2651dc347ddd9 100644 --- a/pkg/chunkenc/dumb_chunk.go +++ b/pkg/chunkenc/dumb_chunk.go @@ -138,7 +138,7 @@ func (i *dumbChunkIterator) Entry() logproto.Entry { } func (i *dumbChunkIterator) Labels() string { - panic("Labels() called on chunk iterator") + return "" } func (i *dumbChunkIterator) Error() error { diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index fdc8464cab231..5b3a242ef1560 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -351,7 +351,7 @@ type logBatchIterator struct { metrics *ChunkMetrics matchers []*labels.Matcher filter logql.LineFilter - labels labelCache + parser logql.LabelParser } func newLogBatchIterator( @@ -361,6 +361,7 @@ func newLogBatchIterator( batchSize int, matchers []*labels.Matcher, filter logql.LineFilter, + parser logql.LabelParser, direction logproto.Direction, start, end time.Time, ) (iter.EntryIterator, error) { @@ -369,9 +370,9 @@ func newLogBatchIterator( // The same applies to the sharding label which is injected by the cortex storage code. matchers = removeMatchersByName(matchers, labels.MetricName, astmapper.ShardLabel) logbatch := &logBatchIterator{ - labels: map[model.Fingerprint]string{}, matchers: matchers, filter: filter, + parser: parser, metrics: metrics, ctx: ctx, } @@ -421,15 +422,13 @@ func (it *logBatchIterator) buildIterators(chks map[model.Fingerprint][][]*LazyC func (it *logBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, through time.Time, nextChunk *LazyChunk) (iter.EntryIterator, error) { result := make([]iter.EntryIterator, 0, len(chks)) - // __name__ is only used for upstream compatibility and is hardcoded within loki. Strip it from the return label set. - labels := it.labels.computeLabels(chks[0][0]) for i := range chks { iterators := make([]iter.EntryIterator, 0, len(chks[i])) for j := range chks[i] { if !chks[i][j].IsValid { continue } - iterator, err := chks[i][j].Iterator(it.ctx, from, through, it.direction, it.filter, nextChunk) + iterator, err := chks[i][j].Iterator(it.ctx, from, through, it.direction, it.filter, it.parser, nextChunk) if err != nil { return nil, err } @@ -440,9 +439,7 @@ func (it *logBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, through iterators[i], iterators[j] = iterators[j], iterators[i] } } - // TODO(cyriltovena): Setting labels here is wrong now as labels can be different within the same chunk due to - // label extraction feature. - result = append(result, iter.NewNonOverlappingIterator(iterators, labels)) + result = append(result, iter.NewNonOverlappingIterator(iterators, "")) } return iter.NewHeapIterator(it.ctx, result, it.direction), nil @@ -455,8 +452,8 @@ type sampleBatchIterator struct { metrics *ChunkMetrics matchers []*labels.Matcher filter logql.LineFilter + parser logql.LabelParser extractor logql.SampleExtractor - labels labelCache } func newSampleBatchIterator( @@ -466,6 +463,7 @@ func newSampleBatchIterator( batchSize int, matchers []*labels.Matcher, filter logql.LineFilter, + parser logql.LabelParser, extractor logql.SampleExtractor, start, end time.Time, ) (iter.SampleIterator, error) { @@ -475,9 +473,9 @@ func newSampleBatchIterator( matchers = removeMatchersByName(matchers, labels.MetricName, astmapper.ShardLabel) samplebatch := &sampleBatchIterator{ - labels: map[model.Fingerprint]string{}, matchers: matchers, filter: filter, + parser: parser, extractor: extractor, metrics: metrics, ctx: ctx, @@ -525,23 +523,19 @@ func (it *sampleBatchIterator) buildIterators(chks map[model.Fingerprint][][]*La func (it *sampleBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, through time.Time, nextChunk *LazyChunk) (iter.SampleIterator, error) { result := make([]iter.SampleIterator, 0, len(chks)) - // __name__ is only used for upstream compatibility and is hardcoded within loki. Strip it from the return label set. - labels := it.labels.computeLabels(chks[0][0]) for i := range chks { iterators := make([]iter.SampleIterator, 0, len(chks[i])) for j := range chks[i] { if !chks[i][j].IsValid { continue } - iterator, err := chks[i][j].SampleIterator(it.ctx, from, through, it.filter, it.extractor, nextChunk) + iterator, err := chks[i][j].SampleIterator(it.ctx, from, through, it.filter, it.extractor, it.parser, nextChunk) if err != nil { return nil, err } iterators = append(iterators, iterator) } - // TODO(cyriltovena): Setting labels here is wrong now as labels can be different within the same chunk due to - // label extraction feature. - result = append(result, iter.NewNonOverlappingSampleIterator(iterators, labels)) + result = append(result, iter.NewNonOverlappingSampleIterator(iterators, "")) } return iter.NewHeapSampleIterator(it.ctx, result), nil diff --git a/pkg/storage/batch_test.go b/pkg/storage/batch_test.go index 6cf0e4b0c2186..798b21be9e641 100644 --- a/pkg/storage/batch_test.go +++ b/pkg/storage/batch_test.go @@ -9,7 +9,6 @@ import ( "github.com/cespare/xxhash/v2" "github.com/cortexproject/cortex/pkg/chunk" "github.com/pkg/errors" - "github.com/prometheus/common/model" "github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/promql" "github.com/stretchr/testify/require" @@ -957,7 +956,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { for name, tt := range tests { tt := tt t.Run(name, func(t *testing.T) { - it, err := newLogBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), nil, tt.direction, tt.start, tt.end) + it, err := newLogBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), nil, logql.NoopLabelParser, tt.direction, tt.start, tt.end) require.NoError(t, err) streams, _, err := iter.ReadBatch(it, 1000) _ = it.Close() @@ -1242,7 +1241,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { for name, tt := range tests { tt := tt t.Run(name, func(t *testing.T) { - it, err := newSampleBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), nil, logql.ExtractCount, tt.start, tt.end) + it, err := newSampleBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), nil, logql.NoopLabelParser, logql.ExtractCount, tt.start, tt.end) require.NoError(t, err) series, _, err := iter.ReadSampleBatch(it, 1000) _ = it.Close() @@ -1449,8 +1448,7 @@ func TestBuildHeapIterator(t *testing.T) { batchChunkIterator: &batchChunkIterator{ direction: logproto.FORWARD, }, - ctx: ctx, - labels: map[model.Fingerprint]string{}, + ctx: ctx, } it, err := b.buildHeapIterator(tc.input, from, from.Add(6*time.Millisecond), nil) if err != nil { diff --git a/pkg/storage/lazy_chunk.go b/pkg/storage/lazy_chunk.go index 9dee210c44fbe..2a96f6063b241 100644 --- a/pkg/storage/lazy_chunk.go +++ b/pkg/storage/lazy_chunk.go @@ -6,6 +6,7 @@ import ( "time" "github.com/cortexproject/cortex/pkg/chunk" + "github.com/prometheus/prometheus/pkg/labels" "github.com/grafana/loki/pkg/chunkenc" "github.com/grafana/loki/pkg/iter" @@ -33,6 +34,7 @@ func (c *LazyChunk) Iterator( from, through time.Time, direction logproto.Direction, filter logql.LineFilter, + parser logql.LabelParser, nextChunk *LazyChunk, ) (iter.EntryIterator, error) { @@ -58,7 +60,8 @@ func (c *LazyChunk) Iterator( } // if the block is overlapping cache it with the next chunk boundaries. if nextChunk != nil && IsBlockOverlapping(b, nextChunk, direction) { - it := newCachedIterator(b.Iterator(ctx, filter), b.Entries()) + // todo(cyriltovena) we can avoid to drop the metric name for each chunks since many chunks have the same metric/labelset. + it := newCachedIterator(b.Iterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), filter, parser), b.Entries()) its = append(its, it) if c.overlappingBlocks == nil { c.overlappingBlocks = make(map[int]*cachedIterator) @@ -70,7 +73,7 @@ func (c *LazyChunk) Iterator( delete(c.overlappingBlocks, b.Offset()) } // non-overlapping block with the next chunk are not cached. - its = append(its, b.Iterator(ctx, filter)) + its = append(its, b.Iterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), filter, parser)) } // build the final iterator bound to the requested time range. @@ -95,6 +98,7 @@ func (c *LazyChunk) SampleIterator( from, through time.Time, filter logql.LineFilter, extractor logql.SampleExtractor, + parser logql.LabelParser, nextChunk *LazyChunk, ) (iter.SampleIterator, error) { @@ -120,7 +124,8 @@ func (c *LazyChunk) SampleIterator( } // if the block is overlapping cache it with the next chunk boundaries. if nextChunk != nil && IsBlockOverlapping(b, nextChunk, logproto.FORWARD) { - it := newCachedSampleIterator(b.SampleIterator(ctx, filter, extractor), b.Entries()) + // todo(cyriltovena) we can avoid to drop the metric name for each chunks since many chunks have the same metric/labelset. + it := newCachedSampleIterator(b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), filter, extractor, parser), b.Entries()) its = append(its, it) if c.overlappingSampleBlocks == nil { c.overlappingSampleBlocks = make(map[int]*cachedSampleIterator) @@ -132,7 +137,7 @@ func (c *LazyChunk) SampleIterator( delete(c.overlappingSampleBlocks, b.Offset()) } // non-overlapping block with the next chunk are not cached. - its = append(its, b.SampleIterator(ctx, filter, extractor)) + its = append(its, b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), filter, extractor, parser)) } // build the final iterator bound to the requested time range. diff --git a/pkg/storage/lazy_chunk_test.go b/pkg/storage/lazy_chunk_test.go index 9a1aabc3eb1d2..464ea2c0c9189 100644 --- a/pkg/storage/lazy_chunk_test.go +++ b/pkg/storage/lazy_chunk_test.go @@ -7,6 +7,7 @@ import ( "time" "github.com/cortexproject/cortex/pkg/chunk" + "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/chunkenc" @@ -44,7 +45,7 @@ func TestLazyChunkIterator(t *testing.T) { }, } { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { - it, err := tc.chunk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(1000, 0), logproto.FORWARD, logql.TrueFilter, nil) + it, err := tc.chunk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(1000, 0), logproto.FORWARD, logql.TrueFilter, logql.NoopLabelParser, nil) require.Nil(t, err) streams, _, err := iter.ReadBatch(it, 1000) require.Nil(t, err) @@ -168,12 +169,14 @@ type fakeBlock struct { mint, maxt int64 } -func (fakeBlock) Entries() int { return 0 } -func (fakeBlock) Offset() int { return 0 } -func (f fakeBlock) MinTime() int64 { return f.mint } -func (f fakeBlock) MaxTime() int64 { return f.maxt } -func (fakeBlock) Iterator(context.Context, logql.LineFilter) iter.EntryIterator { return nil } -func (fakeBlock) SampleIterator(context.Context, logql.LineFilter, logql.SampleExtractor) iter.SampleIterator { +func (fakeBlock) Entries() int { return 0 } +func (fakeBlock) Offset() int { return 0 } +func (f fakeBlock) MinTime() int64 { return f.mint } +func (f fakeBlock) MaxTime() int64 { return f.maxt } +func (fakeBlock) Iterator(context.Context, labels.Labels, logql.LineFilter, logql.LabelParser) iter.EntryIterator { + return nil +} +func (fakeBlock) SampleIterator(context.Context, labels.Labels, logql.LineFilter, logql.SampleExtractor, logql.LabelParser) iter.SampleIterator { return nil } diff --git a/pkg/storage/store.go b/pkg/storage/store.go index 9fa7e99373fa4..91760147650e9 100644 --- a/pkg/storage/store.go +++ b/pkg/storage/store.go @@ -107,28 +107,29 @@ func NewTableClient(name string, cfg Config) (chunk.TableClient, error) { // decodeReq sanitizes an incoming request, rounds bounds, appends the __name__ matcher, // and adds the "__cortex_shard__" label if this is a sharded query. -func decodeReq(req logql.QueryParams) ([]*labels.Matcher, logql.LineFilter, model.Time, model.Time, error) { +// todo(cyriltovena) refactor this. +func decodeReq(req logql.QueryParams) ([]*labels.Matcher, logql.LineFilter, logql.LabelParser, model.Time, model.Time, error) { expr, err := req.LogSelector() if err != nil { - return nil, nil, 0, 0, err + return nil, nil, nil, 0, 0, err } filter, err := expr.Filter() if err != nil { - return nil, nil, 0, 0, err + return nil, nil, nil, 0, 0, err } matchers := expr.Matchers() nameLabelMatcher, err := labels.NewMatcher(labels.MatchEqual, labels.MetricName, "logs") if err != nil { - return nil, nil, 0, 0, err + return nil, nil, nil, 0, 0, err } matchers = append(matchers, nameLabelMatcher) if shards := req.GetShards(); shards != nil { parsed, err := logql.ParseShards(shards) if err != nil { - return nil, nil, 0, 0, err + return nil, nil, nil, 0, 0, err } for _, s := range parsed { shardMatcher, err := labels.NewMatcher( @@ -137,7 +138,7 @@ func decodeReq(req logql.QueryParams) ([]*labels.Matcher, logql.LineFilter, mode s.String(), ) if err != nil { - return nil, nil, 0, 0, err + return nil, nil, nil, 0, 0, err } matchers = append(matchers, shardMatcher) @@ -147,9 +148,13 @@ func decodeReq(req logql.QueryParams) ([]*labels.Matcher, logql.LineFilter, mode break // nolint:staticcheck } } + p, err := expr.Parser() + if err != nil { + return nil, nil, nil, 0, 0, err + } from, through := util.RoundToMilliseconds(req.GetStart(), req.GetEnd()) - return matchers, filter, from, through, nil + return matchers, filter, p, from, through, nil } // lazyChunks is an internal function used to resolve a set of lazy chunks from the store without actually loading them. It's used internally by `LazyQuery` and `GetSeries` @@ -203,7 +208,7 @@ func (s *store) GetSeries(ctx context.Context, req logql.SelectLogParams) ([]log matchers = []*labels.Matcher{nameLabelMatcher} } else { var err error - matchers, _, from, through, err = decodeReq(req) + matchers, _, _, from, through, err = decodeReq(req) if err != nil { return nil, err } @@ -271,7 +276,7 @@ func (s *store) GetSeries(ctx context.Context, req logql.SelectLogParams) ([]log // SelectLogs returns an iterator that will query the store for more chunks while iterating instead of fetching all chunks upfront // for that request. func (s *store) SelectLogs(ctx context.Context, req logql.SelectLogParams) (iter.EntryIterator, error) { - matchers, filter, from, through, err := decodeReq(req) + matchers, filter, parser, from, through, err := decodeReq(req) if err != nil { return nil, err } @@ -285,12 +290,12 @@ func (s *store) SelectLogs(ctx context.Context, req logql.SelectLogParams) (iter return iter.NoopIterator, nil } - return newLogBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, filter, req.Direction, req.Start, req.End) + return newLogBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, filter, parser, req.Direction, req.Start, req.End) } func (s *store) SelectSamples(ctx context.Context, req logql.SelectSampleParams) (iter.SampleIterator, error) { - matchers, filter, from, through, err := decodeReq(req) + matchers, filter, parser, from, through, err := decodeReq(req) if err != nil { return nil, err } @@ -313,7 +318,7 @@ func (s *store) SelectSamples(ctx context.Context, req logql.SelectSampleParams) if len(lazyChunks) == 0 { return iter.NoopIterator, nil } - return newSampleBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, filter, extractor, req.Start, req.End) + return newSampleBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, filter, parser, extractor, req.Start, req.End) } func (s *store) GetSchemaConfigs() []chunk.PeriodConfig { diff --git a/pkg/storage/store_test.go b/pkg/storage/store_test.go index 5016b4db40977..615490279ad84 100644 --- a/pkg/storage/store_test.go +++ b/pkg/storage/store_test.go @@ -728,7 +728,7 @@ func Test_store_decodeReq_Matchers(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - ms, _, _, _, err := decodeReq(logql.SelectLogParams{QueryRequest: tt.req}) + ms, _, _, _, _, err := decodeReq(logql.SelectLogParams{QueryRequest: tt.req}) if err != nil { t.Errorf("store.GetSeries() error = %v", err) return From 484afc126b78ad5fad3d1f472958848dac39665e Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 17 Sep 2020 07:30:56 +0200 Subject: [PATCH 05/45] hook parser with ingesters Signed-off-by: Cyril Tovena --- pkg/ingester/chunk_test.go | 6 ++++-- pkg/ingester/flush_test.go | 6 +++--- pkg/ingester/instance.go | 16 +++++++++++++--- pkg/ingester/stream.go | 12 ++++++------ pkg/ingester/stream_test.go | 5 +++-- pkg/ingester/transfer_test.go | 1 + 6 files changed, 30 insertions(+), 16 deletions(-) diff --git a/pkg/ingester/chunk_test.go b/pkg/ingester/chunk_test.go index 8edd8b0ac404c..e4c1da942e4c9 100644 --- a/pkg/ingester/chunk_test.go +++ b/pkg/ingester/chunk_test.go @@ -7,12 +7,14 @@ import ( "testing" "time" + "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/grafana/loki/pkg/chunkenc" "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql" ) func testIteratorForward(t *testing.T, iter iter.EntryIterator, from, through int64) { @@ -62,7 +64,7 @@ func TestIterator(t *testing.T) { for i := 0; i < entries; i++ { from := rand.Intn(entries - 1) len := rand.Intn(entries-from) + 1 - iter, err := chunk.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.FORWARD, nil) + iter, err := chunk.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.FORWARD, labels.Labels{}, nil, logql.NoopLabelParser) require.NoError(t, err) testIteratorForward(t, iter, int64(from), int64(from+len)) _ = iter.Close() @@ -71,7 +73,7 @@ func TestIterator(t *testing.T) { for i := 0; i < entries; i++ { from := rand.Intn(entries - 1) len := rand.Intn(entries-from) + 1 - iter, err := chunk.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.BACKWARD, nil) + iter, err := chunk.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.BACKWARD, labels.Labels{}, nil, logql.NoopLabelParser) require.NoError(t, err) testIteratorBackward(t, iter, int64(from), int64(from+len)) _ = iter.Close() diff --git a/pkg/ingester/flush_test.go b/pkg/ingester/flush_test.go index 7033d17b3ec30..6563ec60cabc5 100644 --- a/pkg/ingester/flush_test.go +++ b/pkg/ingester/flush_test.go @@ -319,12 +319,12 @@ func (s *testStore) getChunksForUser(userID string) []chunk.Chunk { return s.chunks[userID] } -func buildStreamsFromChunk(t *testing.T, labels string, chk chunkenc.Chunk) logproto.Stream { - it, err := chk.Iterator(context.TODO(), time.Unix(0, 0), time.Unix(1000, 0), logproto.FORWARD, nil) +func buildStreamsFromChunk(t *testing.T, lbs string, chk chunkenc.Chunk) logproto.Stream { + it, err := chk.Iterator(context.TODO(), time.Unix(0, 0), time.Unix(1000, 0), logproto.FORWARD, labels.Labels{}, nil, logql.NoopLabelParser) require.NoError(t, err) stream := logproto.Stream{ - Labels: labels, + Labels: lbs, } for it.Next() { stream.Entries = append(stream.Entries, it.Entry()) diff --git a/pkg/ingester/instance.go b/pkg/ingester/instance.go index 2903c2b7a9291..9022b05d61471 100644 --- a/pkg/ingester/instance.go +++ b/pkg/ingester/instance.go @@ -204,6 +204,10 @@ func (i *instance) Query(ctx context.Context, req logql.SelectLogParams) ([]iter if err != nil { return nil, err } + parser, err := expr.Parser() + if err != nil { + return nil, err + } ingStats := stats.GetIngesterData(ctx) var iters []iter.EntryIterator @@ -211,7 +215,7 @@ func (i *instance) Query(ctx context.Context, req logql.SelectLogParams) ([]iter expr.Matchers(), func(stream *stream) error { ingStats.TotalChunksMatched += int64(len(stream.chunks)) - iter, err := stream.Iterator(ctx, req.Start, req.End, req.Direction, filter) + iter, err := stream.Iterator(ctx, req.Start, req.End, req.Direction, filter, parser) if err != nil { return err } @@ -239,13 +243,19 @@ func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams if err != nil { return nil, err } + + parser, err := expr.Selector().Parser() + if err != nil { + return nil, err + } + ingStats := stats.GetIngesterData(ctx) var iters []iter.SampleIterator err = i.forMatchingStreams( expr.Selector().Matchers(), func(stream *stream) error { ingStats.TotalChunksMatched += int64(len(stream.chunks)) - iter, err := stream.SampleIterator(ctx, req.Start, req.End, filter, extractor) + iter, err := stream.SampleIterator(ctx, req.Start, req.End, filter, extractor, parser) if err != nil { return err } @@ -531,4 +541,4 @@ func shouldConsiderStream(stream *stream, req *logproto.SeriesRequest) bool { return true } return false -} \ No newline at end of file +} diff --git a/pkg/ingester/stream.go b/pkg/ingester/stream.go index 8551d81915c73..151ccbbc79080 100644 --- a/pkg/ingester/stream.go +++ b/pkg/ingester/stream.go @@ -256,10 +256,10 @@ func (s *stream) cutChunkForSynchronization(entryTimestamp, prevEntryTimestamp t } // Returns an iterator. -func (s *stream) Iterator(ctx context.Context, from, through time.Time, direction logproto.Direction, filter logql.LineFilter) (iter.EntryIterator, error) { +func (s *stream) Iterator(ctx context.Context, from, through time.Time, direction logproto.Direction, filter logql.LineFilter, parser logql.LabelParser) (iter.EntryIterator, error) { iterators := make([]iter.EntryIterator, 0, len(s.chunks)) for _, c := range s.chunks { - itr, err := c.chunk.Iterator(ctx, from, through, direction, filter) + itr, err := c.chunk.Iterator(ctx, from, through, direction, s.labels, filter, parser) if err != nil { return nil, err } @@ -274,19 +274,19 @@ func (s *stream) Iterator(ctx context.Context, from, through time.Time, directio } } - return iter.NewNonOverlappingIterator(iterators, s.labelsString), nil + return iter.NewNonOverlappingIterator(iterators, ""), nil } // Returns an SampleIterator. -func (s *stream) SampleIterator(ctx context.Context, from, through time.Time, filter logql.LineFilter, extractor logql.SampleExtractor) (iter.SampleIterator, error) { +func (s *stream) SampleIterator(ctx context.Context, from, through time.Time, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) (iter.SampleIterator, error) { iterators := make([]iter.SampleIterator, 0, len(s.chunks)) for _, c := range s.chunks { - if itr := c.chunk.SampleIterator(ctx, from, through, filter, extractor); itr != nil { + if itr := c.chunk.SampleIterator(ctx, from, through, s.labels, filter, extractor, parser); itr != nil { iterators = append(iterators, itr) } } - return iter.NewNonOverlappingSampleIterator(iterators, s.labelsString), nil + return iter.NewNonOverlappingSampleIterator(iterators, ""), nil } func (s *stream) addTailer(t *tailer) { diff --git a/pkg/ingester/stream_test.go b/pkg/ingester/stream_test.go index 9c4f84914bfc6..bb3e83a4b65e6 100644 --- a/pkg/ingester/stream_test.go +++ b/pkg/ingester/stream_test.go @@ -16,6 +16,7 @@ import ( "github.com/grafana/loki/pkg/chunkenc" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql" ) func TestMaxReturnedStreamsErrors(t *testing.T) { @@ -119,7 +120,7 @@ func TestStreamIterator(t *testing.T) { for i := 0; i < 100; i++ { from := rand.Intn(chunks*entries - 1) len := rand.Intn(chunks*entries-from) + 1 - iter, err := s.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.FORWARD, nil) + iter, err := s.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.FORWARD, nil, logql.NoopLabelParser) require.NotNil(t, iter) require.NoError(t, err) testIteratorForward(t, iter, int64(from), int64(from+len)) @@ -129,7 +130,7 @@ func TestStreamIterator(t *testing.T) { for i := 0; i < 100; i++ { from := rand.Intn(entries - 1) len := rand.Intn(chunks*entries-from) + 1 - iter, err := s.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.BACKWARD, nil) + iter, err := s.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.BACKWARD, nil, logql.NoopLabelParser) require.NotNil(t, iter) require.NoError(t, err) testIteratorBackward(t, iter, int64(from), int64(from+len)) diff --git a/pkg/ingester/transfer_test.go b/pkg/ingester/transfer_test.go index c9f9119a1ddee..a00350fde7f7a 100644 --- a/pkg/ingester/transfer_test.go +++ b/pkg/ingester/transfer_test.go @@ -96,6 +96,7 @@ func TestTransferOut(t *testing.T) { time.Unix(10, 0), logproto.FORWARD, logql.LineFilterFunc(func([]byte) bool { return true }), + logql.NoopLabelParser, ) if !assert.NoError(t, err) { continue From 0121a3ca4dd77b305b7d76753c03da77ef2f0fcb Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 17 Sep 2020 10:37:46 +0200 Subject: [PATCH 06/45] fixes all tests Signed-off-by: Cyril Tovena --- pkg/iter/entry_iterator.go | 12 ++++++------ pkg/iter/entry_iterator_test.go | 2 +- pkg/storage/batch_test.go | 3 ++- pkg/storage/lazy_chunk_test.go | 1 + 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/pkg/iter/entry_iterator.go b/pkg/iter/entry_iterator.go index 0357fbb339aa9..7b7445483e27c 100644 --- a/pkg/iter/entry_iterator.go +++ b/pkg/iter/entry_iterator.go @@ -554,18 +554,18 @@ func (i *reverseIterator) Close() error { var entryBufferPool = sync.Pool{ New: func() interface{} { return &entryBuffer{ - entries: make([]logproto.Entry, 0, 1024), + entries: make([]entryWithLabels, 0, 1024), } }, } type entryBuffer struct { - entries []logproto.Entry + entries []entryWithLabels } type reverseEntryIterator struct { iter EntryIterator - cur logproto.Entry + cur entryWithLabels buf *entryBuffer loaded bool @@ -590,7 +590,7 @@ func (i *reverseEntryIterator) load() { if !i.loaded { i.loaded = true for i.iter.Next() { - i.buf.entries = append(i.buf.entries, i.iter.Entry()) + i.buf.entries = append(i.buf.entries, entryWithLabels{i.iter.Entry(), i.iter.Labels()}) } i.iter.Close() } @@ -608,11 +608,11 @@ func (i *reverseEntryIterator) Next() bool { } func (i *reverseEntryIterator) Entry() logproto.Entry { - return i.cur + return i.cur.entry } func (i *reverseEntryIterator) Labels() string { - return "" + return i.cur.labels } func (i *reverseEntryIterator) Error() error { return nil } diff --git a/pkg/iter/entry_iterator_test.go b/pkg/iter/entry_iterator_test.go index 4212918f425ca..e983a7e57e2bc 100644 --- a/pkg/iter/entry_iterator_test.go +++ b/pkg/iter/entry_iterator_test.go @@ -331,7 +331,7 @@ func TestReverseEntryIterator(t *testing.T) { for i := int64(testSize - 1); i >= 0; i-- { assert.Equal(t, true, reversedIter.Next()) assert.Equal(t, identity(i), reversedIter.Entry(), fmt.Sprintln("iteration", i)) - assert.Equal(t, reversedIter.Labels(), "") + assert.Equal(t, reversedIter.Labels(), defaultLabels) } assert.Equal(t, false, reversedIter.Next()) diff --git a/pkg/storage/batch_test.go b/pkg/storage/batch_test.go index 798b21be9e641..095c60a95a803 100644 --- a/pkg/storage/batch_test.go +++ b/pkg/storage/batch_test.go @@ -1448,7 +1448,8 @@ func TestBuildHeapIterator(t *testing.T) { batchChunkIterator: &batchChunkIterator{ direction: logproto.FORWARD, }, - ctx: ctx, + ctx: ctx, + parser: logql.NoopLabelParser, } it, err := b.buildHeapIterator(tc.input, from, from.Add(6*time.Millisecond), nil) if err != nil { diff --git a/pkg/storage/lazy_chunk_test.go b/pkg/storage/lazy_chunk_test.go index 464ea2c0c9189..0ee570e49b85b 100644 --- a/pkg/storage/lazy_chunk_test.go +++ b/pkg/storage/lazy_chunk_test.go @@ -34,6 +34,7 @@ func TestLazyChunkIterator(t *testing.T) { }), []logproto.Stream{ { + Labels: fooLabels, Entries: []logproto.Entry{ { Timestamp: from, From 62f282999b86aa5611c9ce40f00b101a6250d263 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Mon, 21 Sep 2020 21:47:37 +0200 Subject: [PATCH 07/45] Refactor to pipeline and implement ast parsing. Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 223 +++- pkg/logql/ast_test.go | 24 +- pkg/logql/expr.y | 98 +- pkg/logql/expr.y.go | 720 +++++++----- pkg/logql/labelfilter/duration.go | 93 ++ pkg/logql/labelfilter/filter.go | 64 + pkg/logql/labelfilter/number.go | 54 + pkg/logql/labelfilter/string.go | 24 + pkg/logql/metrics.go | 2 +- pkg/logql/parser_test.go | 1818 +++++++++++++++-------------- pkg/logql/shardmapper.go | 5 +- pkg/logql/shardmapper_test.go | 77 +- pkg/logql/test_utils.go | 86 +- 13 files changed, 1955 insertions(+), 1333 deletions(-) create mode 100644 pkg/logql/labelfilter/duration.go create mode 100644 pkg/logql/labelfilter/filter.go create mode 100644 pkg/logql/labelfilter/number.go create mode 100644 pkg/logql/labelfilter/string.go diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 4a779b9b9feb0..43831b7e6da25 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -13,6 +13,7 @@ import ( "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql/labelfilter" ) // Expr is the root expression which can be a SampleExpr or LogSelectorExpr @@ -73,18 +74,93 @@ type Querier interface { // LogSelectorExpr is a LogQL expression filtering and returning logs. type LogSelectorExpr interface { - Filter() (LineFilter, error) Matchers() []*labels.Matcher - Parser() (LabelParser, error) + PipelineExpr Expr } +type PipelineExpr interface { + Pipeline() (Pipeline, error) + Expr +} + +type Pipeline interface { + Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) +} + +var NoopPipeline = &noopPipeline{} + +type noopPipeline struct{} + +func (noopPipeline) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + return line, lbs, true +} + +type PipelineFunc func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) + +func (fn PipelineFunc) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + return fn(line, lbs) +} + +type MultiPipeline []Pipeline + +func (m MultiPipeline) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + var ok bool + for _, p := range m { + line, lbs, ok = p.Process(line, lbs) + if !ok { + return line, lbs, ok + } + } + return line, lbs, ok +} + +type MultiPipelineExpr []PipelineExpr + +func (m MultiPipelineExpr) Pipeline() (Pipeline, error) { + c := make(MultiPipeline, 0, len(m)) + for _, e := range m { + p, err := e.Pipeline() + if err != nil { + return nil, err + } + c = append(c, p) + } + return c, nil +} + +func (m MultiPipelineExpr) String() string { + var sb strings.Builder + for _, e := range m { + sb.WriteString(e.String()) + } + return sb.String() +} + +func (MultiPipelineExpr) logQLExpr() {} + +func FilterToPipeline(f LineFilter) Pipeline { + if f == nil || f == TrueFilter { + return NoopPipeline + } + return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + return line, lbs, f.Filter(line) + }) +} + +func ParserToPipeline(p LabelParser) Pipeline { + return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + lbs = p.Parse(line, lbs) + return line, lbs, true + }) +} + type matchersExpr struct { matchers []*labels.Matcher implicit } -func newMatcherExpr(matchers []*labels.Matcher) LogSelectorExpr { +func newMatcherExpr(matchers []*labels.Matcher) *matchersExpr { return &matchersExpr{matchers: matchers} } @@ -105,35 +181,54 @@ func (e *matchersExpr) String() string { return sb.String() } -func (e *matchersExpr) Filter() (LineFilter, error) { - return nil, nil +func (e *matchersExpr) Pipeline() (Pipeline, error) { + return NoopPipeline, nil +} + +type pipelineExpr struct { + pipeline MultiPipelineExpr + left *matchersExpr + implicit } -func (e *matchersExpr) Parser() (LabelParser, error) { - return NoopLabelParser, nil +func newPipelineExpr(left *matchersExpr, pipeline MultiPipelineExpr) LogSelectorExpr { + return &pipelineExpr{ + left: left, + pipeline: pipeline, + } } -type filterExpr struct { - left LogSelectorExpr +func (e *pipelineExpr) Matchers() []*labels.Matcher { + return e.left.Matchers() +} + +func (e *pipelineExpr) String() string { + var sb strings.Builder + sb.WriteString(e.left.String()) + sb.WriteString(e.pipeline.String()) + return sb.String() +} + +func (e *pipelineExpr) Pipeline() (Pipeline, error) { + return e.pipeline.Pipeline() +} + +type lineFilterExpr struct { + left *lineFilterExpr ty labels.MatchType match string implicit } -// NewFilterExpr wraps an existing Expr with a next filter expression. -func NewFilterExpr(left LogSelectorExpr, ty labels.MatchType, match string) LogSelectorExpr { - return &filterExpr{ +func newLineFilterExpr(left *lineFilterExpr, ty labels.MatchType, match string) *lineFilterExpr { + return &lineFilterExpr{ left: left, ty: ty, match: match, } } -func (e *filterExpr) Matchers() []*labels.Matcher { - return e.left.Matchers() -} - -func (e *filterExpr) String() string { +func (e *lineFilterExpr) String() string { var sb strings.Builder sb.WriteString(e.left.String()) switch e.ty { @@ -150,13 +245,13 @@ func (e *filterExpr) String() string { return sb.String() } -func (e *filterExpr) Filter() (LineFilter, error) { +func (e *lineFilterExpr) Filter() (LineFilter, error) { f, err := newFilter(e.match, e.ty) if err != nil { return nil, err } - if nextExpr, ok := e.left.(*filterExpr); ok { - nextFilter, err := nextExpr.Filter() + if e.left != nil { + nextFilter, err := e.left.Filter() if err != nil { return nil, err } @@ -172,35 +267,29 @@ func (e *filterExpr) Filter() (LineFilter, error) { return f, nil } -func (e *filterExpr) Parser() (LabelParser, error) { - return NoopLabelParser, nil +func (e *lineFilterExpr) Pipeline() (Pipeline, error) { + f, err := e.Filter() + if err != nil { + return nil, err + } + return FilterToPipeline(f), nil } -type parserExpr struct { - left LogSelectorExpr +type labelParserExpr struct { op string param string implicit } -func newParserExpr(left LogSelectorExpr, op, param string) LogSelectorExpr { +func newLabelParserExpr(op, param string) *labelParserExpr { // todo(cyriltovena): we might want to pre-validate param here to fail fast. - return &parserExpr{ - left: left, + return &labelParserExpr{ op: op, param: param, } } -func (e *parserExpr) Matchers() []*labels.Matcher { - return e.left.Matchers() -} - -func (e *parserExpr) Filter() (LineFilter, error) { - return e.left.Filter() -} - -func (e *parserExpr) Parser() (LabelParser, error) { +func (e *labelParserExpr) parser() (LabelParser, error) { switch e.op { case OpParserTypeJSON: return NewJSONParser(), nil @@ -213,9 +302,16 @@ func (e *parserExpr) Parser() (LabelParser, error) { } } -func (e *parserExpr) String() string { +func (e *labelParserExpr) Pipeline() (Pipeline, error) { + p, err := e.parser() + if err != nil { + return nil, err + } + return ParserToPipeline(p), nil +} + +func (e *labelParserExpr) String() string { var sb strings.Builder - sb.WriteString(e.left.String()) sb.WriteString("|") sb.WriteString(e.op) if e.param != "" { @@ -224,6 +320,30 @@ func (e *parserExpr) String() string { return sb.String() } +type labelFilterExpr struct { + labelfilter.Filterer + implicit +} + +func (e *labelFilterExpr) Pipeline() (Pipeline, error) { + return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + //todo (cyriltovena): handle error + ok, _ := e.Filterer.Filter(lbs) + return line, lbs, ok + }), nil +} + +// func (e *parserExpr) String() string { +// var sb strings.Builder +// sb.WriteString(e.left.String()) +// sb.WriteString("|") +// sb.WriteString(e.op) +// if e.param != "" { +// sb.WriteString(strconv.Quote(e.param)) +// } +// return sb.String() +// } + func mustNewMatcher(t labels.MatchType, n, v string) *labels.Matcher { m, err := labels.NewMatcher(t, n, v) if err != nil { @@ -232,6 +352,14 @@ func mustNewMatcher(t labels.MatchType, n, v string) *labels.Matcher { return m } +func mustNewFloat(s string) float64 { + n, err := strconv.ParseFloat(s, 64) + if err != nil { + panic(newParseError(fmt.Sprintf("unable to parse float: %s", err.Error()), 0, 0)) + } + return n +} + type logRange struct { left LogSelectorExpr interval time.Duration @@ -252,14 +380,14 @@ func newLogRange(left LogSelectorExpr, interval time.Duration) *logRange { } } -func addFilterToLogRangeExpr(left *logRange, ty labels.MatchType, match string) *logRange { - left.left = &filterExpr{ - left: left.left, - ty: ty, - match: match, - } - return left -} +// func addFilterToLogRangeExpr(left *logRange, ty labels.MatchType, match string) *logRange { +// left.left = &filterExpr{ +// left: left.left, +// ty: ty, +// match: match, +// } +// return left +// } const ( // vector ops @@ -565,9 +693,8 @@ func (e *literalExpr) String() string { // and they will only be present in binary operation legs. func (e *literalExpr) Selector() LogSelectorExpr { return e } func (e *literalExpr) Operations() []string { return nil } -func (e *literalExpr) Filter() (LineFilter, error) { return nil, nil } +func (e *literalExpr) Pipeline() (Pipeline, error) { return NoopPipeline, nil } func (e *literalExpr) Matchers() []*labels.Matcher { return nil } -func (e *literalExpr) Parser() (LabelParser, error) { return NoopLabelParser, nil } func (e *literalExpr) Extractor() (SampleExtractor, error) { return nil, nil } // helper used to impl Stringer for vector and range aggregations diff --git a/pkg/logql/ast_test.go b/pkg/logql/ast_test.go index 5d4de102b22e3..c14c0c9acdf95 100644 --- a/pkg/logql/ast_test.go +++ b/pkg/logql/ast_test.go @@ -37,11 +37,11 @@ func Test_logSelectorExpr_String(t *testing.T) { if err != nil { t.Fatalf("failed to parse log selector: %s", err) } - f, err := expr.Filter() + p, err := expr.Pipeline() if err != nil { t.Fatalf("failed to get filter: %s", err) } - require.Equal(t, tt.expectFilter, f != nil) + require.Equal(t, tt.expectFilter, p != NoopPipeline) if expr.String() != strings.Replace(tt.selector, " ", "", -1) { t.Fatalf("error expected: %s got: %s", tt.selector, expr.String()) } @@ -99,10 +99,11 @@ func Test_NilFilterDoesntPanic(t *testing.T) { expr, err := ParseLogSelector(tc) require.Nil(t, err) - filter, err := expr.Filter() + p, err := expr.Pipeline() require.Nil(t, err) + _, _, ok := p.Process([]byte("bleepbloop"), labelBar) - require.True(t, filter.Filter([]byte("bleepbloop"))) + require.True(t, ok) }) } @@ -181,13 +182,14 @@ func Test_FilterMatcher(t *testing.T) { expr, err := ParseLogSelector(tt.q) assert.Nil(t, err) assert.Equal(t, tt.expectedMatchers, expr.Matchers()) - f, err := expr.Filter() + p, err := expr.Pipeline() assert.Nil(t, err) if tt.lines == nil { - assert.Nil(t, f) + assert.Equal(t, p, NoopPipeline) } else { for _, lc := range tt.lines { - assert.Equal(t, lc.e, f.Filter([]byte(lc.l))) + _, _, ok := p.Process([]byte(lc.l), labelBar) + assert.Equal(t, lc.e, ok) } } }) @@ -239,7 +241,7 @@ func BenchmarkContainsFilter(b *testing.B) { b.Fatal(err) } - f, err := expr.Filter() + p, err := expr.Pipeline() if err != nil { b.Fatal(err) } @@ -249,7 +251,7 @@ func BenchmarkContainsFilter(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - if !f.Filter(line) { + if _, _, ok := p.Process(line, labelBar); !ok { b.Fatal("doesn't match") } } @@ -270,11 +272,11 @@ func Test_parserExpr_Parser(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - e := &parserExpr{ + e := &labelParserExpr{ op: tt.op, param: tt.param, } - got, err := e.Parser() + got, err := e.parser() if (err != nil) != tt.wantErr { t.Errorf("parserExpr.Parser() error = %v, wantErr %v", err, tt.wantErr) return diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 9223fe8fd02b2..0be94af99f16c 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -4,6 +4,7 @@ package logql import ( "time" "github.com/prometheus/prometheus/pkg/labels" + "github.com/grafana/loki/pkg/logql/labelfilter" ) %} @@ -28,7 +29,13 @@ import ( duration time.Duration LiteralExpr *literalExpr BinOpModifier BinOpOptions - LabelParser struct{ op, param string} + LabelParser *labelParserExpr + LineFilters *lineFilterExpr + PipelineExpr MultiPipelineExpr + PipelineStage PipelineExpr + NumberFilter labelfilter.Filterer + DurationFilter labelfilter.Filterer + LabelFilter labelfilter.Filterer } %start root @@ -50,13 +57,20 @@ import ( %type binOpExpr %type literalExpr %type binOpModifier -%type labelparser +%type labelParser +%type pipelineExpr +%type pipelineStage +%type numberFilter +%type durationFilter +%type labelFilter +%type lineFilters + %token IDENTIFIER STRING NUMBER %token DURATION %token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE SUM AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK - BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE + BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE LINE_FMT LABEL_FMT // Operators are listed with increasing precedence. %left OR @@ -85,18 +99,17 @@ metricExpr: logExpr: selector { $$ = newMatcherExpr($1)} - | logExpr filter STRING { $$ = NewFilterExpr( $1, $2, $3 ) } - | logExpr labelparser { $$ = newParserExpr($1, $2.op, $2.param) } + | selector pipelineExpr { $$ = newPipelineExpr(newMatcherExpr($1), $2)} | OPEN_PARENTHESIS logExpr CLOSE_PARENTHESIS { $$ = $2 } - | logExpr filter error | logExpr error ; + logRangeExpr: - logExpr DURATION { $$ = newLogRange($1, $2) } // - | logRangeExpr filter STRING { $$ = addFilterToLogRangeExpr( $1, $2, $3 ) } + logExpr DURATION { $$ = newLogRange($1, $2) } + | selector DURATION pipelineExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2 ) } + | selector DURATION { $$ = newLogRange(newMatcherExpr($1), $2 ) } | OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = $2 } - | logRangeExpr filter error | logRangeExpr error ; @@ -137,6 +150,67 @@ matcher: | IDENTIFIER NRE STRING { $$ = mustNewMatcher(labels.MatchNotRegexp, $1, $3) } ; +pipelineExpr: + pipelineStage { $$ = MultiPipelineExpr{ $1 } } + | pipelineExpr pipelineStage { $$ = append($1, $2)} + ; + +pipelineStage: + lineFilters { $$ = $1 } + | PIPE labelParser { $$ = $2 } + | PIPE labelFilter { $$ = &labelFilterExpr{Filterer: $2 }} +// | PIPE lineFormat +// | PIPE labelFormat + +lineFilters: + filter STRING { $$ = newLineFilterExpr(nil, $1, $2 ) } + | lineFilters filter STRING { $$ = newLineFilterExpr($1, $2, $3 ) } + +labelParser: + JSON { $$ = newLabelParserExpr(OpParserTypeJSON, "") } + | LOGFMT { $$ = newLabelParserExpr(OpParserTypeLogfmt, "") } + | REGEXP STRING { $$ = newLabelParserExpr(OpParserTypeRegexp, $2) } + ; + +// lineFormat: +// LINE_FMT IDENTIFIER +// | LINE_FMT STRING +// ; + +// labelFormat: +// LABEL_FMT IDENTIFIER EQ IDENTIFIER +// | LABEL_FMT IDENTIFIER EQ STRING +// ; + +labelFilter: + matcher { $$ = labelfilter.NewString($1) } + | durationFilter { $$ = $1 } + | numberFilter { $$ = $1 } + | OPEN_PARENTHESIS labelFilter CLOSE_PARENTHESIS { $$ = $2 } + | labelFilter labelFilter { $$ = labelfilter.NewAnd($1, $2 ) } + | labelFilter AND labelFilter { $$ = labelfilter.NewAnd($1, $3 ) } + | labelFilter COMMA labelFilter { $$ = labelfilter.NewAnd($1, $3 ) } + | labelFilter OR labelFilter { $$ = labelfilter.NewOr($1, $3 ) } + ; + +durationFilter: + IDENTIFIER GT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThan, $1, $3) } + | IDENTIFIER GTE DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, $1, $3) } + | IDENTIFIER LT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterLesserThan, $1, $3) } + | IDENTIFIER LTE DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, $1, $3) } + | IDENTIFIER NEQ DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterNotEqual, $1, $3) } + | IDENTIFIER EQ DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterEqual, $1, $3) } + ; + +numberFilter: + IDENTIFIER GT NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, $1, mustNewFloat($3))} + | IDENTIFIER GTE NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, $1, mustNewFloat($3))} + | IDENTIFIER LT NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterLesserThan, $1, mustNewFloat($3))} + | IDENTIFIER LTE NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, $1, mustNewFloat($3))} + | IDENTIFIER NEQ NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterNotEqual, $1, mustNewFloat($3))} + | IDENTIFIER EQ NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterEqual, $1, mustNewFloat($3))} + ; + // TODO(owen-d): add (on,ignoring) clauses to binOpExpr // Operator precedence only works if each of these is listed separately. binOpExpr: @@ -197,10 +271,4 @@ grouping: BY OPEN_PARENTHESIS labels CLOSE_PARENTHESIS { $$ = &grouping{ without: false , groups: $3 } } | WITHOUT OPEN_PARENTHESIS labels CLOSE_PARENTHESIS { $$ = &grouping{ without: true , groups: $3 } } ; - -labelparser: - PIPE JSON { $$ = struct{ op, param string}{ op: OpParserTypeJSON} } - | PIPE LOGFMT { $$ = struct{ op, param string}{ op: OpParserTypeLogfmt} } - | PIPE REGEXP STRING { $$ = struct{ op, param string}{ op: OpParserTypeRegexp, param: $3} } - ; %% diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index 364aa6b9097b0..61d1115b67fa0 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -8,11 +8,12 @@ import __yyfmt__ "fmt" //line pkg/logql/expr.y:2 import ( + "github.com/grafana/loki/pkg/logql/labelfilter" "github.com/prometheus/prometheus/pkg/labels" "time" ) -//line pkg/logql/expr.y:10 +//line pkg/logql/expr.y:11 type exprSymType struct { yys int Expr Expr @@ -35,7 +36,13 @@ type exprSymType struct { duration time.Duration LiteralExpr *literalExpr BinOpModifier BinOpOptions - LabelParser struct{ op, param string } + LabelParser *labelParserExpr + LineFilters *lineFilterExpr + PipelineExpr MultiPipelineExpr + PipelineStage PipelineExpr + NumberFilter labelfilter.Filterer + DurationFilter labelfilter.Filterer + LabelFilter labelfilter.Filterer } const IDENTIFIER = 57346 @@ -77,21 +84,23 @@ const JSON = 57381 const REGEXP = 57382 const LOGFMT = 57383 const PIPE = 57384 -const OR = 57385 -const AND = 57386 -const UNLESS = 57387 -const CMP_EQ = 57388 -const NEQ = 57389 -const LT = 57390 -const LTE = 57391 -const GT = 57392 -const GTE = 57393 -const ADD = 57394 -const SUB = 57395 -const MUL = 57396 -const DIV = 57397 -const MOD = 57398 -const POW = 57399 +const LINE_FMT = 57385 +const LABEL_FMT = 57386 +const OR = 57387 +const AND = 57388 +const UNLESS = 57389 +const CMP_EQ = 57390 +const NEQ = 57391 +const LT = 57392 +const LTE = 57393 +const GT = 57394 +const GTE = 57395 +const ADD = 57396 +const SUB = 57397 +const MUL = 57398 +const DIV = 57399 +const MOD = 57400 +const POW = 57401 var exprToknames = [...]string{ "$end", @@ -136,6 +145,8 @@ var exprToknames = [...]string{ "REGEXP", "LOGFMT", "PIPE", + "LINE_FMT", + "LABEL_FMT", "OR", "AND", "UNLESS", @@ -158,7 +169,7 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:206 +//line pkg/logql/expr.y:274 //line yacctab:1 var exprExca = [...]int{ @@ -168,10 +179,9 @@ var exprExca = [...]int{ -1, 3, 1, 2, 22, 2, - 43, 2, - 44, 2, 45, 2, 46, 2, + 47, 2, 48, 2, 49, 2, 50, 2, @@ -182,12 +192,13 @@ var exprExca = [...]int{ 55, 2, 56, 2, 57, 2, + 58, 2, + 59, 2, -2, 0, - -1, 54, - 43, 2, - 44, 2, + -1, 56, 45, 2, 46, 2, + 47, 2, 48, 2, 49, 2, 50, 2, @@ -198,127 +209,146 @@ var exprExca = [...]int{ 55, 2, 56, 2, 57, 2, + 58, 2, + 59, 2, -2, 0, } const exprPrivate = 57344 -const exprLast = 277 +const exprLast = 284 var exprAct = [...]int{ - 62, 4, 45, 136, 58, 3, 98, 38, 53, 55, - 2, 68, 54, 30, 31, 32, 39, 40, 43, 44, - 41, 42, 33, 34, 35, 36, 37, 38, 14, 33, - 34, 35, 36, 37, 38, 11, 35, 36, 37, 38, - 85, 87, 86, 6, 63, 64, 148, 17, 18, 21, - 22, 24, 25, 23, 26, 27, 28, 29, 19, 20, - 144, 133, 61, 101, 63, 64, 99, 94, 96, 97, - 88, 145, 105, 145, 15, 16, 147, 106, 146, 107, - 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, - 118, 119, 120, 104, 11, 103, 60, 122, 93, 134, - 140, 127, 100, 139, 95, 135, 131, 132, 66, 138, - 31, 32, 39, 40, 43, 44, 41, 42, 33, 34, - 35, 36, 37, 38, 84, 91, 65, 83, 57, 126, - 59, 102, 125, 124, 142, 127, 143, 90, 11, 123, - 92, 121, 149, 137, 59, 46, 6, 67, 10, 150, - 17, 18, 21, 22, 24, 25, 23, 26, 27, 28, - 29, 19, 20, 39, 40, 43, 44, 41, 42, 33, - 34, 35, 36, 37, 38, 9, 13, 15, 16, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 47, 8, 5, 12, 47, 130, 47, - 7, 56, 130, 50, 1, 0, 0, 50, 128, 50, - 48, 49, 128, 89, 48, 49, 48, 49, 50, 89, - 0, 0, 50, 0, 0, 48, 49, 0, 141, 48, - 49, 47, 129, 52, 0, 0, 0, 52, 51, 52, - 0, 50, 51, 0, 51, 0, 0, 0, 48, 49, - 0, 0, 0, 51, 0, 0, 0, 51, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 52, 0, 0, 0, 0, 51, + 64, 47, 4, 133, 46, 159, 3, 5, 108, 55, + 92, 57, 2, 56, 33, 34, 35, 36, 37, 38, + 38, 70, 60, 30, 31, 32, 39, 40, 43, 44, + 41, 42, 33, 34, 35, 36, 37, 38, 35, 36, + 37, 38, 186, 104, 106, 107, 182, 63, 85, 65, + 66, 50, 156, 88, 39, 40, 43, 44, 41, 42, + 33, 34, 35, 36, 37, 38, 112, 153, 53, 109, + 110, 98, 53, 65, 66, 51, 52, 103, 116, 51, + 52, 117, 105, 118, 119, 120, 121, 122, 123, 124, + 125, 126, 127, 128, 129, 130, 131, 96, 49, 138, + 86, 115, 49, 114, 53, 54, 139, 106, 107, 54, + 135, 51, 52, 145, 95, 62, 183, 158, 154, 110, + 155, 185, 161, 31, 32, 39, 40, 43, 44, 41, + 42, 33, 34, 35, 36, 37, 38, 96, 162, 163, + 164, 54, 150, 157, 14, 140, 143, 144, 141, 142, + 135, 11, 177, 176, 95, 165, 183, 180, 178, 6, + 181, 184, 179, 17, 18, 21, 22, 24, 25, 23, + 26, 27, 28, 29, 19, 20, 113, 69, 136, 134, + 85, 11, 68, 11, 147, 169, 168, 188, 150, 111, + 67, 6, 15, 16, 149, 17, 18, 21, 22, 24, + 25, 23, 26, 27, 28, 29, 19, 20, 151, 71, + 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, + 82, 83, 84, 96, 15, 16, 96, 45, 45, 101, + 175, 174, 152, 96, 96, 45, 135, 173, 172, 135, + 95, 100, 148, 95, 102, 171, 170, 99, 99, 147, + 95, 95, 146, 167, 166, 45, 146, 137, 48, 132, + 152, 97, 187, 160, 136, 134, 61, 93, 134, 89, + 91, 90, 59, 94, 61, 87, 10, 9, 13, 8, + 12, 7, 58, 1, } var exprPact = [...]int{ - 22, -1000, -30, 229, -1000, -1000, 22, -1000, -1000, -1000, - -1000, 126, 75, 41, -1000, 120, 102, -1000, -1000, -1000, + 138, -1000, -22, 233, -1000, 56, 138, -1000, -1000, -1000, + -1000, 270, 94, 26, -1000, 184, 176, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -27, -27, -27, -27, -27, -27, -27, -27, -27, -27, - -27, -27, -27, -27, -27, 122, -1000, -1000, -1000, -1000, - -1000, -1000, 1, 48, 197, -30, 123, 84, -1000, 57, - 81, 125, 74, 72, 51, -1000, -1000, 22, -1000, 22, - 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, - 22, 22, 22, -1000, -1000, -1000, -1000, 136, -1000, -1000, - -1000, -1000, 140, -1000, 134, 128, 127, 124, 210, 195, - 81, 39, 82, 22, 139, 139, 66, 117, 117, -18, - -18, -50, -50, -50, -50, -23, -23, -23, -23, -23, - -23, -1000, -1000, -1000, -1000, -1000, -1000, 98, -1000, -1000, - -1000, 191, 206, 21, 22, 38, 56, -1000, 54, -1000, - -1000, -1000, -1000, 24, -1000, 138, -1000, -1000, 21, -1000, - -1000, + -17, -17, -17, -17, -17, -17, -17, -17, -17, -17, + -17, -17, -17, -17, -17, -1000, 56, -1000, 92, 230, + 256, -1000, -1000, -1000, -1000, 49, 226, -22, 227, 63, + -1000, 33, 168, 170, 82, 80, 57, -1000, -1000, 138, + -1000, 138, 138, 138, 138, 138, 138, 138, 138, 138, + 138, 138, 138, 138, 138, -1000, 254, -1000, 219, -1000, + -1000, 252, -1000, -1000, -1000, 229, 96, -1000, -1000, -1000, + -1000, -1000, 262, -1000, 251, 244, 237, 189, 186, 253, + 60, 168, 30, 126, 138, 259, 259, 77, 6, 6, + -18, -18, -39, -39, -39, -39, -40, -40, -40, -40, + -40, -40, -1000, 219, 229, 229, 229, -1000, 133, 247, + 179, 239, 231, 224, 146, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, 56, 225, 140, 50, 138, 24, 139, + -1000, 99, 93, 219, 222, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 56, -1000, + -1000, 20, -1000, 258, -1000, -1000, 50, -1000, -1000, } var exprPgo = [...]int{ - 0, 204, 9, 2, 0, 3, 5, 1, 6, 4, - 201, 200, 196, 195, 194, 176, 175, 148, 147, 145, + 0, 283, 11, 51, 0, 5, 6, 2, 8, 10, + 282, 281, 280, 7, 279, 278, 277, 276, 177, 275, + 4, 1, 273, 267, 3, 258, } var exprR1 = [...]int{ 0, 1, 2, 2, 7, 7, 7, 7, 7, 6, - 6, 6, 6, 6, 6, 8, 8, 8, 8, 8, - 11, 14, 14, 14, 14, 14, 3, 3, 3, 3, - 13, 13, 13, 10, 10, 9, 9, 9, 9, 16, + 6, 6, 6, 8, 8, 8, 8, 8, 11, 14, + 14, 14, 14, 14, 3, 3, 3, 3, 13, 13, + 13, 10, 10, 9, 9, 9, 9, 20, 20, 21, + 21, 21, 25, 25, 19, 19, 19, 24, 24, 24, + 24, 24, 24, 24, 24, 23, 23, 23, 23, 23, + 23, 22, 22, 22, 22, 22, 22, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 18, 18, 17, 17, 17, 15, - 15, 15, 15, 15, 15, 15, 15, 15, 12, 12, - 12, 12, 5, 5, 4, 4, 19, 19, 19, + 16, 16, 18, 18, 17, 17, 17, 15, 15, 15, + 15, 15, 15, 15, 15, 15, 12, 12, 12, 12, + 5, 5, 4, 4, } var exprR2 = [...]int{ 0, 1, 1, 1, 1, 1, 1, 1, 3, 1, - 3, 2, 3, 3, 2, 2, 3, 3, 3, 2, - 4, 4, 5, 5, 6, 7, 1, 1, 1, 1, - 3, 3, 3, 1, 3, 3, 3, 3, 3, 4, + 2, 3, 2, 2, 3, 2, 3, 2, 4, 4, + 5, 5, 6, 7, 1, 1, 1, 1, 3, 3, + 3, 1, 3, 3, 3, 3, 3, 1, 2, 1, + 2, 2, 2, 3, 1, 1, 2, 1, 1, 1, + 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 0, 1, 1, 2, 2, 1, + 4, 4, 0, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 3, 4, 4, 2, 2, 3, + 1, 3, 4, 4, } var exprChk = [...]int{ -1000, -1, -2, -6, -7, -13, 21, -11, -14, -16, - -17, 13, -12, -15, 6, 52, 53, 25, 26, 36, + -17, 13, -12, -15, 6, 54, 55, 25, 26, 36, 37, 27, 28, 31, 29, 30, 32, 33, 34, 35, - 43, 44, 45, 52, 53, 54, 55, 56, 57, 46, - 47, 50, 51, 48, 49, -3, -19, 2, 19, 20, - 12, 47, 42, -7, -6, -2, -10, 2, -9, 4, - 21, 21, -4, 23, 24, 6, 6, -18, 38, -18, - -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, - -18, -18, -18, 5, 2, 39, 41, 40, 22, 22, - 14, 2, 17, 14, 10, 47, 11, 12, -8, -6, - 21, -7, 6, 21, 21, 21, -2, -2, -2, -2, + 45, 46, 47, 54, 55, 56, 57, 58, 59, 48, + 49, 52, 53, 50, 51, 2, -20, -21, -25, 42, + -3, 19, 20, 12, 49, -7, -6, -2, -10, 2, + -9, 4, 21, 21, -4, 23, 24, 6, 6, -18, + 38, -18, -18, -18, -18, -18, -18, -18, -18, -18, + -18, -18, -18, -18, -18, -21, -3, -19, -24, 39, + 41, 40, -9, -23, -22, 21, 4, 5, 22, 22, + 14, 2, 17, 14, 10, 49, 11, 12, -8, -6, + -13, 21, -7, 6, 21, 21, 21, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, 5, -9, 5, 5, 5, 5, -3, 2, 22, - 7, -6, -8, 22, 17, -7, -5, 4, -5, 5, - 2, 22, -4, -7, 22, 17, 22, 22, 22, 4, - -4, + -2, -2, 5, -24, 46, 17, 45, 5, -24, 10, + 49, 52, 53, 50, 51, -9, 5, 5, 5, 5, + 2, 22, 7, 7, -6, -8, 22, 17, -7, -5, + 4, -5, -24, -24, -24, 22, 7, 6, 7, 6, + 7, 6, 7, 6, 7, 6, 7, 6, -20, 22, + -4, -7, 22, 17, 22, 22, 22, 4, -4, } var exprDef = [...]int{ 0, -2, 1, -2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 56, 0, 0, 68, 69, 70, - 71, 59, 60, 61, 62, 63, 64, 65, 66, 67, - 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, - 54, 54, 54, 54, 54, 0, 11, 14, 26, 27, - 28, 29, 0, 3, -2, 0, 0, 0, 33, 0, - 0, 0, 0, 0, 0, 57, 58, 0, 55, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 10, 13, 76, 77, 0, 8, 12, - 30, 31, 0, 32, 0, 0, 0, 0, 0, 0, - 0, 3, 56, 0, 0, 0, 39, 40, 41, 42, - 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, - 53, 78, 34, 35, 36, 37, 38, 0, 19, 20, - 15, 0, 0, 21, 0, 3, 0, 72, 0, 16, - 18, 17, 23, 3, 22, 0, 74, 75, 24, 73, - 25, + 7, 0, 0, 0, 84, 0, 0, 96, 97, 98, + 99, 87, 88, 89, 90, 91, 92, 93, 94, 95, + 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, + 82, 82, 82, 82, 82, 12, 10, 37, 39, 0, + 0, 24, 25, 26, 27, 3, -2, 0, 0, 0, + 31, 0, 0, 0, 0, 0, 0, 85, 86, 0, + 83, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 38, 0, 40, 41, 44, + 45, 0, 47, 48, 49, 0, 0, 42, 8, 11, + 28, 29, 0, 30, 0, 0, 0, 0, 0, 0, + 9, 0, 3, 84, 0, 0, 0, 67, 68, 69, + 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, + 80, 81, 43, 51, 0, 0, 0, 46, 0, 0, + 0, 0, 0, 0, 0, 32, 33, 34, 35, 36, + 17, 18, 13, 15, 0, 0, 19, 0, 3, 0, + 100, 0, 52, 53, 54, 50, 60, 66, 59, 65, + 55, 61, 56, 62, 57, 63, 58, 64, 14, 16, + 21, 3, 20, 0, 102, 103, 22, 101, 23, } var exprTok1 = [...]int{ @@ -331,7 +361,7 @@ var exprTok2 = [...]int{ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, + 52, 53, 54, 55, 56, 57, 58, 59, } var exprTok3 = [...]int{ 0, @@ -676,447 +706,609 @@ exprdefault: case 1: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:71 +//line pkg/logql/expr.y:85 { exprlex.(*lexer).expr = exprDollar[1].Expr } case 2: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:74 +//line pkg/logql/expr.y:88 { exprVAL.Expr = exprDollar[1].LogExpr } case 3: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:75 +//line pkg/logql/expr.y:89 { exprVAL.Expr = exprDollar[1].MetricExpr } case 4: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:79 +//line pkg/logql/expr.y:93 { exprVAL.MetricExpr = exprDollar[1].RangeAggregationExpr } case 5: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:80 +//line pkg/logql/expr.y:94 { exprVAL.MetricExpr = exprDollar[1].VectorAggregationExpr } case 6: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:81 +//line pkg/logql/expr.y:95 { exprVAL.MetricExpr = exprDollar[1].BinOpExpr } case 7: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:82 +//line pkg/logql/expr.y:96 { exprVAL.MetricExpr = exprDollar[1].LiteralExpr } case 8: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:83 +//line pkg/logql/expr.y:97 { exprVAL.MetricExpr = exprDollar[2].MetricExpr } case 9: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:87 +//line pkg/logql/expr.y:101 { exprVAL.LogExpr = newMatcherExpr(exprDollar[1].Selector) } case 10: - exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:88 - { - exprVAL.LogExpr = NewFilterExpr(exprDollar[1].LogExpr, exprDollar[2].Filter, exprDollar[3].str) - } - case 11: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:89 +//line pkg/logql/expr.y:102 { - exprVAL.LogExpr = newParserExpr(exprDollar[1].LogExpr, exprDollar[2].LabelParser.op, exprDollar[2].LabelParser.param) + exprVAL.LogExpr = newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr) } - case 12: + case 11: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:90 +//line pkg/logql/expr.y:103 { exprVAL.LogExpr = exprDollar[2].LogExpr } - case 15: + case 13: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:96 +//line pkg/logql/expr.y:109 { exprVAL.LogRangeExpr = newLogRange(exprDollar[1].LogExpr, exprDollar[2].duration) } - case 16: + case 14: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:97 +//line pkg/logql/expr.y:110 + { + exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration) + } + case 15: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:111 { - exprVAL.LogRangeExpr = addFilterToLogRangeExpr(exprDollar[1].LogRangeExpr, exprDollar[2].Filter, exprDollar[3].str) + exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration) } - case 17: + case 16: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:98 +//line pkg/logql/expr.y:112 { exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr } - case 20: + case 18: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:103 +//line pkg/logql/expr.y:116 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp) } - case 21: + case 19: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:107 +//line pkg/logql/expr.y:120 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } - case 22: + case 20: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:108 +//line pkg/logql/expr.y:121 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } - case 23: + case 21: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:109 +//line pkg/logql/expr.y:122 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } - case 24: + case 22: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:111 +//line pkg/logql/expr.y:124 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } - case 25: + case 23: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:112 +//line pkg/logql/expr.y:125 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } - case 26: + case 24: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:116 +//line pkg/logql/expr.y:129 { exprVAL.Filter = labels.MatchRegexp } - case 27: + case 25: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:117 +//line pkg/logql/expr.y:130 { exprVAL.Filter = labels.MatchEqual } - case 28: + case 26: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:118 +//line pkg/logql/expr.y:131 { exprVAL.Filter = labels.MatchNotRegexp } - case 29: + case 27: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:119 +//line pkg/logql/expr.y:132 { exprVAL.Filter = labels.MatchNotEqual } - case 30: + case 28: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:123 +//line pkg/logql/expr.y:136 { exprVAL.Selector = exprDollar[2].Matchers } - case 31: + case 29: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:124 +//line pkg/logql/expr.y:137 { exprVAL.Selector = exprDollar[2].Matchers } - case 32: + case 30: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:125 +//line pkg/logql/expr.y:138 { } - case 33: + case 31: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:129 +//line pkg/logql/expr.y:142 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } - case 34: + case 32: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:130 +//line pkg/logql/expr.y:143 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } - case 35: + case 33: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:134 +//line pkg/logql/expr.y:147 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } - case 36: + case 34: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:135 +//line pkg/logql/expr.y:148 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } - case 37: + case 35: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:136 +//line pkg/logql/expr.y:149 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } - case 38: + case 36: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:137 +//line pkg/logql/expr.y:150 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } + case 37: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:154 + { + exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} + } + case 38: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:155 + { + exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) + } case 39: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:159 + { + exprVAL.PipelineStage = exprDollar[1].LineFilters + } + case 40: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:160 + { + exprVAL.PipelineStage = exprDollar[2].LabelParser + } + case 41: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:161 + { + exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} + } + case 42: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:166 + { + exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) + } + case 43: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:167 + { + exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) + } + case 44: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:170 + { + exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") + } + case 45: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:171 + { + exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") + } + case 46: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:172 + { + exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) + } + case 47: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:186 + { + exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) + } + case 48: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:187 + { + exprVAL.LabelFilter = exprDollar[1].DurationFilter + } + case 49: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:188 + { + exprVAL.LabelFilter = exprDollar[1].NumberFilter + } + case 50: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:189 + { + exprVAL.LabelFilter = exprDollar[2].LabelFilter + } + case 51: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:190 + { + exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) + } + case 52: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:191 + { + exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) + } + case 53: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:192 + { + exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) + } + case 54: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:193 + { + exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) + } + case 55: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:197 + { + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) + } + case 56: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:198 + { + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) + } + case 57: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:199 + { + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) + } + case 58: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:200 + { + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) + } + case 59: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:201 + { + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) + } + case 60: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:202 + { + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) + } + case 61: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:206 + { + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + } + case 62: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:207 + { + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + } + case 63: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:208 + { + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + } + case 64: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:209 + { + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + } + case 65: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:210 + { + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + } + case 66: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:211 + { + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + } + case 67: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:143 +//line pkg/logql/expr.y:217 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 40: + case 68: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:144 +//line pkg/logql/expr.y:218 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 41: + case 69: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:145 +//line pkg/logql/expr.y:219 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 42: + case 70: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:146 +//line pkg/logql/expr.y:220 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 43: + case 71: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:147 +//line pkg/logql/expr.y:221 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 44: + case 72: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:148 +//line pkg/logql/expr.y:222 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 45: + case 73: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:149 +//line pkg/logql/expr.y:223 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 46: + case 74: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:150 +//line pkg/logql/expr.y:224 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 47: + case 75: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:151 +//line pkg/logql/expr.y:225 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 48: + case 76: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:152 +//line pkg/logql/expr.y:226 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 49: + case 77: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:153 +//line pkg/logql/expr.y:227 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 50: + case 78: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:154 +//line pkg/logql/expr.y:228 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 51: + case 79: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:155 +//line pkg/logql/expr.y:229 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 52: + case 80: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:156 +//line pkg/logql/expr.y:230 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 53: + case 81: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:157 +//line pkg/logql/expr.y:231 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 54: + case 82: exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:161 +//line pkg/logql/expr.y:235 { exprVAL.BinOpModifier = BinOpOptions{} } - case 55: + case 83: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:162 +//line pkg/logql/expr.y:236 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } - case 56: + case 84: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:166 +//line pkg/logql/expr.y:240 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 57: + case 85: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:167 +//line pkg/logql/expr.y:241 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 58: + case 86: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:168 +//line pkg/logql/expr.y:242 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 59: + case 87: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:172 +//line pkg/logql/expr.y:246 { exprVAL.VectorOp = OpTypeSum } - case 60: + case 88: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:173 +//line pkg/logql/expr.y:247 { exprVAL.VectorOp = OpTypeAvg } - case 61: + case 89: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:174 +//line pkg/logql/expr.y:248 { exprVAL.VectorOp = OpTypeCount } - case 62: + case 90: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:175 +//line pkg/logql/expr.y:249 { exprVAL.VectorOp = OpTypeMax } - case 63: + case 91: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:176 +//line pkg/logql/expr.y:250 { exprVAL.VectorOp = OpTypeMin } - case 64: + case 92: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:177 +//line pkg/logql/expr.y:251 { exprVAL.VectorOp = OpTypeStddev } - case 65: + case 93: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:178 +//line pkg/logql/expr.y:252 { exprVAL.VectorOp = OpTypeStdvar } - case 66: + case 94: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:179 +//line pkg/logql/expr.y:253 { exprVAL.VectorOp = OpTypeBottomK } - case 67: + case 95: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:180 +//line pkg/logql/expr.y:254 { exprVAL.VectorOp = OpTypeTopK } - case 68: + case 96: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:184 +//line pkg/logql/expr.y:258 { exprVAL.RangeOp = OpRangeTypeCount } - case 69: + case 97: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:185 +//line pkg/logql/expr.y:259 { exprVAL.RangeOp = OpRangeTypeRate } - case 70: + case 98: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:186 +//line pkg/logql/expr.y:260 { exprVAL.RangeOp = OpRangeTypeBytes } - case 71: + case 99: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:187 +//line pkg/logql/expr.y:261 { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 72: + case 100: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:192 +//line pkg/logql/expr.y:266 { exprVAL.Labels = []string{exprDollar[1].str} } - case 73: + case 101: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:193 +//line pkg/logql/expr.y:267 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 74: + case 102: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:197 +//line pkg/logql/expr.y:271 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } - case 75: + case 103: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:198 +//line pkg/logql/expr.y:272 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } - case 76: - exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:202 - { - exprVAL.LabelParser = struct{ op, param string }{op: OpParserTypeJSON} - } - case 77: - exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:203 - { - exprVAL.LabelParser = struct{ op, param string }{op: OpParserTypeLogfmt} - } - case 78: - exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:204 - { - exprVAL.LabelParser = struct{ op, param string }{op: OpParserTypeRegexp, param: exprDollar[3].str} - } } goto exprstack /* stack new state and value */ } diff --git a/pkg/logql/labelfilter/duration.go b/pkg/logql/labelfilter/duration.go new file mode 100644 index 0000000000000..122945932896a --- /dev/null +++ b/pkg/logql/labelfilter/duration.go @@ -0,0 +1,93 @@ +package labelfilter + +import ( + "errors" + "fmt" + "time" + + "github.com/prometheus/prometheus/pkg/labels" +) + +// FilterType is an enum for label filtering types. +type FilterType int + +func (f FilterType) String() string { + switch f { + case FilterEqual: + return "==" + case FilterNotEqual: + return "!=" + case FilterGreaterThan: + return ">" + case FilterGreaterThanOrEqual: + return ">=" + case FilterLesserThan: + return "<" + case FilterLesserThanOrEqual: + return "<=" + default: + return "" + } +} + +// Possible FilterTypes. +const ( + FilterEqual FilterType = iota + FilterNotEqual + FilterGreaterThan + FilterGreaterThanOrEqual + FilterLesserThan + FilterLesserThanOrEqual +) + +var ( + errConversion = errors.New("converting label value failed") + errUnsupportedType = errors.New("unsupported filter type") +) + +type Duration struct { + Name string + Value time.Duration + Type FilterType +} + +func NewDuration(t FilterType, name string, d time.Duration) *Duration { + return &Duration{ + Name: name, + Type: t, + Value: d, + } +} + +func (d *Duration) Filter(lbs labels.Labels) (bool, error) { + for _, l := range lbs { + if l.Name == d.Name { + value, err := time.ParseDuration(l.Value) + if err != nil { + return false, errConversion + } + switch d.Type { + case FilterEqual: + return value == d.Value, nil + case FilterNotEqual: + return value != d.Value, nil + case FilterGreaterThan: + return value > d.Value, nil + case FilterGreaterThanOrEqual: + return value >= d.Value, nil + case FilterLesserThan: + return value < d.Value, nil + case FilterLesserThanOrEqual: + return value <= d.Value, nil + default: + return false, errUnsupportedType + } + } + } + // we have not found this label. + return false, nil +} + +func (d *Duration) String() string { + return fmt.Sprintf("%s%s%s", d.Name, d.Type, d.Value) +} diff --git a/pkg/logql/labelfilter/filter.go b/pkg/logql/labelfilter/filter.go new file mode 100644 index 0000000000000..3a71355ca58c5 --- /dev/null +++ b/pkg/logql/labelfilter/filter.go @@ -0,0 +1,64 @@ +package labelfilter + +import ( + "fmt" + "strings" + + "github.com/prometheus/prometheus/pkg/labels" +) + +type Filterer interface { + Filter(lbs labels.Labels) (bool, error) + fmt.Stringer +} + +type Binary struct { + Left Filterer + Right Filterer + and bool +} + +func NewAnd(left Filterer, right Filterer) *Binary { + return &Binary{ + Left: left, + Right: right, + and: true, + } +} + +func NewOr(left Filterer, right Filterer) *Binary { + return &Binary{ + Left: left, + Right: right, + } +} + +func (b *Binary) Filter(lbs labels.Labels) (bool, error) { + l, err := b.Left.Filter(lbs) + if err != nil { + return false, err + } + if !b.and && l { + return true, nil + } + r, err := b.Right.Filter(lbs) + if err != nil { + return false, err + } + if !b.and { + return l || r, nil + } + return l && r, nil +} + +func (b *Binary) String() string { + var sb strings.Builder + sb.WriteString(b.Left.String()) + if b.and { + sb.WriteString(" , ") + } else { + sb.WriteString(" or ") + } + sb.WriteString(b.Right.String()) + return sb.String() +} diff --git a/pkg/logql/labelfilter/number.go b/pkg/logql/labelfilter/number.go new file mode 100644 index 0000000000000..52510c93c6272 --- /dev/null +++ b/pkg/logql/labelfilter/number.go @@ -0,0 +1,54 @@ +package labelfilter + +import ( + "fmt" + "strconv" + + "github.com/prometheus/prometheus/pkg/labels" +) + +type Numeric struct { + Name string + Value float64 + Type FilterType +} + +func NewNumeric(t FilterType, name string, v float64) *Numeric { + return &Numeric{ + Name: name, + Type: t, + Value: v, + } +} + +func (n *Numeric) Filter(lbs labels.Labels) (bool, error) { + for _, l := range lbs { + if l.Name == n.Name { + value, err := strconv.ParseFloat(l.Value, 64) + if err != nil { + return false, errConversion + } + switch n.Type { + case FilterEqual: + return value == n.Value, nil + case FilterNotEqual: + return value != n.Value, nil + case FilterGreaterThan: + return value > n.Value, nil + case FilterGreaterThanOrEqual: + return value >= n.Value, nil + case FilterLesserThan: + return value < n.Value, nil + case FilterLesserThanOrEqual: + return value <= n.Value, nil + default: + return false, errUnsupportedType + } + } + } + return false, nil +} + +func (n *Numeric) String() string { + return fmt.Sprintf("%s%s%s", n.Name, n.Type, strconv.FormatFloat(n.Value, 'f', -1, 64)) +} diff --git a/pkg/logql/labelfilter/string.go b/pkg/logql/labelfilter/string.go new file mode 100644 index 0000000000000..bcd9ac94dd226 --- /dev/null +++ b/pkg/logql/labelfilter/string.go @@ -0,0 +1,24 @@ +package labelfilter + +import ( + "github.com/prometheus/prometheus/pkg/labels" +) + +type String struct { + *labels.Matcher +} + +func NewString(m *labels.Matcher) *String { + return &String{ + Matcher: m, + } +} + +func (s *String) Filter(lbs labels.Labels) (bool, error) { + for _, l := range lbs { + if l.Name == s.Name { + return s.Matches(l.Value), nil + } + } + return false, nil +} diff --git a/pkg/logql/metrics.go b/pkg/logql/metrics.go index 4627e05622e15..4930d2ec3a52e 100644 --- a/pkg/logql/metrics.go +++ b/pkg/logql/metrics.go @@ -113,7 +113,7 @@ func QueryType(query string) (string, error) { return QueryTypeMetric, nil case *matchersExpr: return QueryTypeLimited, nil - case *filterExpr: + case *pipelineExpr: return QueryTypeFilter, nil default: return "", nil diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 3ce2d0bb45dd0..107beabde4107 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -26,50 +26,13 @@ func TestParse(t *testing.T) { exp: &rangeAggregationExpr{ operation: "count_over_time", left: &logRange{ - left: &filterExpr{ - ty: labels.MatchRegexp, - match: "error\\", - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchRegexp, "foo", "bar\\w+"), - }, - }, - }, - interval: 12 * time.Hour, - }, - }, - }, - { - // test [12h] before filter expr - in: `count_over_time({foo="bar"}[12h] |= "error")`, - exp: &rangeAggregationExpr{ - operation: "count_over_time", - left: &logRange{ - left: &filterExpr{ - ty: labels.MatchEqual, - match: "error", - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, + left: &pipelineExpr{ + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchRegexp, "error\\"), }, - }, - interval: 12 * time.Hour, - }, - }, - }, - { - // test [12h] after filter expr - in: `count_over_time({foo="bar"} |= "error" [12h])`, - exp: &rangeAggregationExpr{ - operation: "count_over_time", - left: &logRange{ - left: &filterExpr{ - ty: labels.MatchEqual, - match: "error", left: &matchersExpr{ matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), + mustNewMatcher(labels.MatchRegexp, "foo", "bar\\w+"), }, }, }, @@ -77,873 +40,911 @@ func TestParse(t *testing.T) { }, }, }, - { - in: `{foo="bar"}`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - }, - { - in: `{ foo = "bar" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - }, - { - in: `{ foo != "bar" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotEqual, "foo", "bar")}}, - }, - { - in: `{ foo =~ "bar" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}}, - }, - { - in: `{ foo !~ "bar" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - }, - { - in: `count_over_time({ foo !~ "bar" }[12m])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 12 * time.Minute, - }, - operation: "count_over_time", - }, - }, - { - in: `bytes_over_time({ foo !~ "bar" }[12m])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 12 * time.Minute, - }, - operation: OpRangeTypeBytes, - }, - }, - { - in: `bytes_rate({ foo !~ "bar" }[12m])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 12 * time.Minute, - }, - operation: OpRangeTypeBytesRate, - }, - }, - { - in: `rate({ foo !~ "bar" }[5h])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "rate", - }, - }, - { - in: `rate({ foo !~ "bar" }[5d])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * 24 * time.Hour, - }, - operation: "rate", - }, - }, - { - in: `count_over_time({ foo !~ "bar" }[1w])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 7 * 24 * time.Hour, - }, - operation: "count_over_time", - }, - }, - { - in: `sum(rate({ foo !~ "bar" }[5h]))`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "rate", - }, "sum", nil, nil), - }, - { - in: `sum(rate({ foo !~ "bar" }[1y]))`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 365 * 24 * time.Hour, - }, - operation: "rate", - }, "sum", nil, nil), - }, - { - in: `avg(count_over_time({ foo !~ "bar" }[5h])) by (bar,foo)`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "count_over_time", - }, "avg", &grouping{ - without: false, - groups: []string{"bar", "foo"}, - }, nil), - }, - { - in: `max without (bar) (count_over_time({ foo !~ "bar" }[5h]))`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "count_over_time", - }, "max", &grouping{ - without: true, - groups: []string{"bar"}, - }, nil), - }, - { - in: `topk(10,count_over_time({ foo !~ "bar" }[5h])) without (bar)`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "count_over_time", - }, "topk", &grouping{ - without: true, - groups: []string{"bar"}, - }, newString("10")), - }, - { - in: `bottomk(30 ,sum(rate({ foo !~ "bar" }[5h])) by (foo))`, - exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "rate", - }, "sum", &grouping{ - groups: []string{"foo"}, - without: false, - }, nil), "bottomk", nil, - newString("30")), - }, - { - in: `max( sum(count_over_time({ foo !~ "bar" }[5h])) without (foo,bar) ) by (foo)`, - exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "count_over_time", - }, "sum", &grouping{ - groups: []string{"foo", "bar"}, - without: true, - }, nil), "max", &grouping{ - groups: []string{"foo"}, - without: false, - }, nil), - }, - { - in: `unk({ foo !~ "bar" }[5m])`, - err: ParseError{ - msg: "syntax error: unexpected IDENTIFIER", - line: 1, - col: 1, - }, - }, - { - in: `rate({ foo !~ "bar" }[5minutes])`, - err: ParseError{ - msg: `not a valid duration string: "5minutes"`, - line: 0, - col: 22, - }, - }, - { - in: `rate({ foo !~ "bar" }[5)`, - err: ParseError{ - msg: "missing closing ']' in duration", - line: 0, - col: 22, - }, - }, - { - in: `min({ foo !~ "bar" }[5m])`, - err: ParseError{ - msg: "syntax error: unexpected DURATION", - line: 0, - col: 21, - }, - }, - { - in: `sum(3 ,count_over_time({ foo !~ "bar" }[5h]))`, - err: ParseError{ - msg: "unsupported parameter for operation sum(3,", - line: 0, - col: 0, - }, - }, - { - in: `topk(count_over_time({ foo !~ "bar" }[5h]))`, - err: ParseError{ - msg: "parameter required for operation topk", - line: 0, - col: 0, - }, - }, - { - in: `bottomk(he,count_over_time({ foo !~ "bar" }[5h]))`, - err: ParseError{ - msg: "syntax error: unexpected IDENTIFIER", - line: 1, - col: 9, - }, - }, - { - in: `bottomk(1.2,count_over_time({ foo !~ "bar" }[5h]))`, - err: ParseError{ - msg: "invalid parameter bottomk(1.2,", - line: 0, - col: 0, - }, - }, - { - in: `stddev({ foo !~ "bar" })`, - err: ParseError{ - msg: "syntax error: unexpected )", - line: 1, - col: 24, - }, - }, - { - in: `{ foo = "bar", bar != "baz" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - mustNewMatcher(labels.MatchNotEqual, "bar", "baz"), - }}, - }, - { - in: `{foo="bar"} |= "baz"`, - exp: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - }, - { - in: `{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, - exp: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - }, - { - in: `count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, - exp: newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - }, - { - in: `bytes_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, - exp: newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeBytes), - }, - { - in: `sum(count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil), - }, - { - in: `sum(bytes_rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeBytesRate), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil), - }, - { - in: `topk(5,count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) without (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "topk", - &grouping{ - without: true, - groups: []string{"foo"}, - }, - newString("5")), - }, - { - in: `topk(5,sum(rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (app))`, - exp: mustNewVectorAggregationExpr( - mustNewVectorAggregationExpr( - newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeRate), - "sum", - &grouping{ - without: false, - groups: []string{"app"}, - }, - nil), - "topk", - nil, - newString("5")), - }, - { - in: `count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")`, - exp: newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - }, - { - in: `sum(count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil), - }, - { - in: `topk(5,count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) without (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "topk", - &grouping{ - without: true, - groups: []string{"foo"}, - }, - newString("5")), - }, - { - in: `topk(5,sum(rate({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (app))`, - exp: mustNewVectorAggregationExpr( - mustNewVectorAggregationExpr( - newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &filterExpr{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - ty: labels.MatchEqual, - match: "baz", - }, - ty: labels.MatchRegexp, - match: "blip", - }, - ty: labels.MatchNotEqual, - match: "flip", - }, - ty: labels.MatchNotRegexp, - match: "flap", - }, - interval: 5 * time.Minute, - }, OpRangeTypeRate), - "sum", - &grouping{ - without: false, - groups: []string{"app"}, - }, - nil), - "topk", - nil, - newString("5")), - }, - { - in: `{foo="bar}`, - err: ParseError{ - msg: "literal not terminated", - line: 1, - col: 6, - }, - }, - { - in: `{foo="bar"`, - err: ParseError{ - msg: "syntax error: unexpected $end, expecting } or ,", - line: 1, - col: 11, - }, - }, + // { + // // test [12h] before filter expr + // in: `count_over_time({foo="bar"}[12h] |= "error")`, + // exp: &rangeAggregationExpr{ + // operation: "count_over_time", + // left: &logRange{ + // left: &filterExpr{ + // ty: labels.MatchEqual, + // match: "error", + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // }, + // interval: 12 * time.Hour, + // }, + // }, + // }, + // { + // // test [12h] after filter expr + // in: `count_over_time({foo="bar"} |= "error" [12h])`, + // exp: &rangeAggregationExpr{ + // operation: "count_over_time", + // left: &logRange{ + // left: &filterExpr{ + // ty: labels.MatchEqual, + // match: "error", + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // }, + // interval: 12 * time.Hour, + // }, + // }, + // }, + // { + // in: `{foo="bar"}`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // }, + // { + // in: `{ foo = "bar" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // }, + // { + // in: `{ foo != "bar" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotEqual, "foo", "bar")}}, + // }, + // { + // in: `{ foo =~ "bar" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}}, + // }, + // { + // in: `{ foo !~ "bar" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // }, + // { + // in: `count_over_time({ foo !~ "bar" }[12m])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 12 * time.Minute, + // }, + // operation: "count_over_time", + // }, + // }, + // { + // in: `bytes_over_time({ foo !~ "bar" }[12m])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 12 * time.Minute, + // }, + // operation: OpRangeTypeBytes, + // }, + // }, + // { + // in: `bytes_rate({ foo !~ "bar" }[12m])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 12 * time.Minute, + // }, + // operation: OpRangeTypeBytesRate, + // }, + // }, + // { + // in: `rate({ foo !~ "bar" }[5h])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "rate", + // }, + // }, + // { + // in: `rate({ foo !~ "bar" }[5d])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * 24 * time.Hour, + // }, + // operation: "rate", + // }, + // }, + // { + // in: `count_over_time({ foo !~ "bar" }[1w])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 7 * 24 * time.Hour, + // }, + // operation: "count_over_time", + // }, + // }, + // { + // in: `sum(rate({ foo !~ "bar" }[5h]))`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "rate", + // }, "sum", nil, nil), + // }, + // { + // in: `sum(rate({ foo !~ "bar" }[1y]))`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 365 * 24 * time.Hour, + // }, + // operation: "rate", + // }, "sum", nil, nil), + // }, + // { + // in: `avg(count_over_time({ foo !~ "bar" }[5h])) by (bar,foo)`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "count_over_time", + // }, "avg", &grouping{ + // without: false, + // groups: []string{"bar", "foo"}, + // }, nil), + // }, + // { + // in: `max without (bar) (count_over_time({ foo !~ "bar" }[5h]))`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "count_over_time", + // }, "max", &grouping{ + // without: true, + // groups: []string{"bar"}, + // }, nil), + // }, + // { + // in: `topk(10,count_over_time({ foo !~ "bar" }[5h])) without (bar)`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "count_over_time", + // }, "topk", &grouping{ + // without: true, + // groups: []string{"bar"}, + // }, newString("10")), + // }, + // { + // in: `bottomk(30 ,sum(rate({ foo !~ "bar" }[5h])) by (foo))`, + // exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "rate", + // }, "sum", &grouping{ + // groups: []string{"foo"}, + // without: false, + // }, nil), "bottomk", nil, + // newString("30")), + // }, + // { + // in: `max( sum(count_over_time({ foo !~ "bar" }[5h])) without (foo,bar) ) by (foo)`, + // exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "count_over_time", + // }, "sum", &grouping{ + // groups: []string{"foo", "bar"}, + // without: true, + // }, nil), "max", &grouping{ + // groups: []string{"foo"}, + // without: false, + // }, nil), + // }, + // { + // in: `unk({ foo !~ "bar" }[5m])`, + // err: ParseError{ + // msg: "syntax error: unexpected IDENTIFIER", + // line: 1, + // col: 1, + // }, + // }, + // { + // in: `rate({ foo !~ "bar" }[5minutes])`, + // err: ParseError{ + // msg: `not a valid duration string: "5minutes"`, + // line: 0, + // col: 22, + // }, + // }, + // { + // in: `rate({ foo !~ "bar" }[5)`, + // err: ParseError{ + // msg: "missing closing ']' in duration", + // line: 0, + // col: 22, + // }, + // }, + // { + // in: `min({ foo !~ "bar" }[5m])`, + // err: ParseError{ + // msg: "syntax error: unexpected DURATION", + // line: 0, + // col: 21, + // }, + // }, + // { + // in: `sum(3 ,count_over_time({ foo !~ "bar" }[5h]))`, + // err: ParseError{ + // msg: "unsupported parameter for operation sum(3,", + // line: 0, + // col: 0, + // }, + // }, + // { + // in: `topk(count_over_time({ foo !~ "bar" }[5h]))`, + // err: ParseError{ + // msg: "parameter required for operation topk", + // line: 0, + // col: 0, + // }, + // }, + // { + // in: `bottomk(he,count_over_time({ foo !~ "bar" }[5h]))`, + // err: ParseError{ + // msg: "syntax error: unexpected IDENTIFIER", + // line: 1, + // col: 9, + // }, + // }, + // { + // in: `bottomk(1.2,count_over_time({ foo !~ "bar" }[5h]))`, + // err: ParseError{ + // msg: "invalid parameter bottomk(1.2,", + // line: 0, + // col: 0, + // }, + // }, + // { + // in: `stddev({ foo !~ "bar" })`, + // err: ParseError{ + // msg: "syntax error: unexpected )", + // line: 1, + // col: 24, + // }, + // }, + // { + // in: `{ foo = "bar", bar != "baz" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // mustNewMatcher(labels.MatchNotEqual, "bar", "baz"), + // }}, + // }, + // { + // in: `{foo="bar"} |= "baz"`, + // exp: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // }, + // { + // in: `{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, + // exp: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // }, + // { + // in: `count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, + // exp: newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // }, + // { + // in: `bytes_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, + // exp: newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeBytes), + // }, + // { + // in: `sum(count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil), + // }, + // { + // in: `sum(bytes_rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeBytesRate), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil), + // }, + // { + // in: `topk(5,count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) without (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "topk", + // &grouping{ + // without: true, + // groups: []string{"foo"}, + // }, + // newString("5")), + // }, + // { + // in: `topk(5,sum(rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (app))`, + // exp: mustNewVectorAggregationExpr( + // mustNewVectorAggregationExpr( + // newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeRate), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"app"}, + // }, + // nil), + // "topk", + // nil, + // newString("5")), + // }, + // { + // in: `count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")`, + // exp: newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // }, + // { + // in: `sum(count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil), + // }, + // { + // in: `topk(5,count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) without (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "topk", + // &grouping{ + // without: true, + // groups: []string{"foo"}, + // }, + // newString("5")), + // }, + // { + // in: `topk(5,sum(rate({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (app))`, + // exp: mustNewVectorAggregationExpr( + // mustNewVectorAggregationExpr( + // newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &filterExpr{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // ty: labels.MatchEqual, + // match: "baz", + // }, + // ty: labels.MatchRegexp, + // match: "blip", + // }, + // ty: labels.MatchNotEqual, + // match: "flip", + // }, + // ty: labels.MatchNotRegexp, + // match: "flap", + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeRate), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"app"}, + // }, + // nil), + // "topk", + // nil, + // newString("5")), + // }, + // { + // in: `{foo="bar}`, + // err: ParseError{ + // msg: "literal not terminated", + // line: 1, + // col: 6, + // }, + // }, + // { + // in: `{foo="bar"`, + // err: ParseError{ + // msg: "syntax error: unexpected $end, expecting } or ,", + // line: 1, + // col: 11, + // }, + // }, - { - in: `{foo="bar"} |~`, - err: ParseError{ - msg: "syntax error: unexpected $end, expecting STRING", - line: 1, - col: 15, - }, - }, + // { + // in: `{foo="bar"} |~`, + // err: ParseError{ + // msg: "syntax error: unexpected $end, expecting STRING", + // line: 1, + // col: 15, + // }, + // }, - { - in: `{foo="bar"} "foo"`, - err: ParseError{ - msg: "syntax error: unexpected STRING", - line: 1, - col: 13, - }, - }, - { - in: `{foo="bar"} foo`, - err: ParseError{ - msg: "syntax error: unexpected IDENTIFIER", - line: 1, - col: 13, - }, - }, - { - // require left associativity - in: ` - sum(count_over_time({foo="bar"}[5m])) by (foo) / - sum(count_over_time({foo="bar"}[5m])) by (foo) / - sum(count_over_time({foo="bar"}[5m])) by (foo) - `, - exp: mustNewBinOpExpr( - OpTypeDiv, - BinOpOptions{}, - mustNewBinOpExpr( - OpTypeDiv, - BinOpOptions{}, - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - }, - { - in: ` - sum(count_over_time({foo="bar"}[5m])) by (foo) ^ - sum(count_over_time({foo="bar"}[5m])) by (foo) / - sum(count_over_time({foo="bar"}[5m])) by (foo) - `, - exp: mustNewBinOpExpr( - OpTypeDiv, - BinOpOptions{}, - mustNewBinOpExpr( - OpTypePow, - BinOpOptions{}, - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - }, - { - // operator precedence before left associativity - in: ` - sum(count_over_time({foo="bar"}[5m])) by (foo) + - sum(count_over_time({foo="bar"}[5m])) by (foo) / - sum(count_over_time({foo="bar"}[5m])) by (foo) - `, - exp: mustNewBinOpExpr( - OpTypeAdd, - BinOpOptions{}, - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - mustNewBinOpExpr( - OpTypeDiv, - BinOpOptions{}, - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - ), - }, - { - in: `sum by (job) ( - count_over_time({namespace="tns"} |= "level=error"[5m]) - / - count_over_time({namespace="tns"}[5m]) - )`, - exp: mustNewVectorAggregationExpr( - mustNewBinOpExpr(OpTypeDiv, - BinOpOptions{}, - newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - }, - }, - match: "level=error", - ty: labels.MatchEqual, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), - }, - { - in: `sum by (job) ( - count_over_time({namespace="tns"} |= "level=error"[5m]) - / - count_over_time({namespace="tns"}[5m]) - ) * 100`, - exp: mustNewBinOpExpr(OpTypeMul, BinOpOptions{}, mustNewVectorAggregationExpr( - mustNewBinOpExpr(OpTypeDiv, - BinOpOptions{}, - newRangeAggregationExpr( - &logRange{ - left: &filterExpr{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - }, - }, - match: "level=error", - ty: labels.MatchEqual, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount), - newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), - mustNewLiteralExpr("100", false), - ), - }, + // { + // in: `{foo="bar"} "foo"`, + // err: ParseError{ + // msg: "syntax error: unexpected STRING", + // line: 1, + // col: 13, + // }, + // }, + // { + // in: `{foo="bar"} foo`, + // err: ParseError{ + // msg: "syntax error: unexpected IDENTIFIER", + // line: 1, + // col: 13, + // }, + // }, + // { + // // require left associativity + // in: ` + // sum(count_over_time({foo="bar"}[5m])) by (foo) / + // sum(count_over_time({foo="bar"}[5m])) by (foo) / + // sum(count_over_time({foo="bar"}[5m])) by (foo) + // `, + // exp: mustNewBinOpExpr( + // OpTypeDiv, + // BinOpOptions{}, + // mustNewBinOpExpr( + // OpTypeDiv, + // BinOpOptions{}, + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // }, + // { + // in: ` + // sum(count_over_time({foo="bar"}[5m])) by (foo) ^ + // sum(count_over_time({foo="bar"}[5m])) by (foo) / + // sum(count_over_time({foo="bar"}[5m])) by (foo) + // `, + // exp: mustNewBinOpExpr( + // OpTypeDiv, + // BinOpOptions{}, + // mustNewBinOpExpr( + // OpTypePow, + // BinOpOptions{}, + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // }, + // { + // // operator precedence before left associativity + // in: ` + // sum(count_over_time({foo="bar"}[5m])) by (foo) + + // sum(count_over_time({foo="bar"}[5m])) by (foo) / + // sum(count_over_time({foo="bar"}[5m])) by (foo) + // `, + // exp: mustNewBinOpExpr( + // OpTypeAdd, + // BinOpOptions{}, + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // mustNewBinOpExpr( + // OpTypeDiv, + // BinOpOptions{}, + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // ), + // }, + // { + // in: `sum by (job) ( + // count_over_time({namespace="tns"} |= "level=error"[5m]) + // / + // count_over_time({namespace="tns"}[5m]) + // )`, + // exp: mustNewVectorAggregationExpr( + // mustNewBinOpExpr(OpTypeDiv, + // BinOpOptions{}, + // newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + // }, + // }, + // match: "level=error", + // ty: labels.MatchEqual, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), + // }, + // { + // in: `sum by (job) ( + // count_over_time({namespace="tns"} |= "level=error"[5m]) + // / + // count_over_time({namespace="tns"}[5m]) + // ) * 100`, + // exp: mustNewBinOpExpr(OpTypeMul, BinOpOptions{}, mustNewVectorAggregationExpr( + // mustNewBinOpExpr(OpTypeDiv, + // BinOpOptions{}, + // newRangeAggregationExpr( + // &logRange{ + // left: &filterExpr{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + // }, + // }, + // match: "level=error", + // ty: labels.MatchEqual, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount), + // newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), + // mustNewLiteralExpr("100", false), + // ), + // }, { // reduces binop with two literalExprs in: `sum(count_over_time({foo="bar"}[5m])) by (foo) + 1 / 2`, @@ -990,6 +991,11 @@ func TestParse(t *testing.T) { &literalExpr{value: -1}, ), }, + { + // test signs/ops with equal associativity + in: `{app="foo"} |= "bar" | json | latency >= 250`, + exp: nil, + }, { // ensure binary ops with two literals are reduced recursively in: `1 + 1 + 1`, diff --git a/pkg/logql/shardmapper.go b/pkg/logql/shardmapper.go index e88ad2f8361aa..04a574223ed6a 100644 --- a/pkg/logql/shardmapper.go +++ b/pkg/logql/shardmapper.go @@ -129,8 +129,9 @@ func (m ShardMapper) Map(expr Expr, r *shardRecorder) (Expr, error) { switch e := expr.(type) { case *literalExpr: return e, nil - case *matchersExpr, *filterExpr: - return m.mapLogSelectorExpr(e.(LogSelectorExpr), r), nil + //todo(cyriltovena) enable sharding on logqlv2 + // case *matchersExpr, *filterExpr: + // return m.mapLogSelectorExpr(e.(LogSelectorExpr), r), nil case *vectorAggregationExpr: return m.mapVectorAggregationExpr(e, r) case *rangeAggregationExpr: diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index 84918da81bb72..48cc1484d011a 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -207,44 +207,45 @@ func TestMapping(t *testing.T) { }, }, }, - { - in: `{foo="bar"} |= "error"`, - expr: &ConcatLogSelectorExpr{ - DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ - Shard: 0, - Of: 2, - }, - LogSelectorExpr: &filterExpr{ - match: "error", - ty: labels.MatchEqual, - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - }, - }, - next: &ConcatLogSelectorExpr{ - DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - shard: &astmapper.ShardAnnotation{ - Shard: 1, - Of: 2, - }, - LogSelectorExpr: &filterExpr{ - match: "error", - ty: labels.MatchEqual, - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - }, - }, - next: nil, - }, - }, - }, + // todo(cyriltovena) fix + // { + // in: `{foo="bar"} |= "error"`, + // expr: &ConcatLogSelectorExpr{ + // DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ + // shard: &astmapper.ShardAnnotation{ + // Shard: 0, + // Of: 2, + // }, + // LogSelectorExpr: &filterExpr{ + // match: "error", + // ty: labels.MatchEqual, + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // }, + // }, + // next: &ConcatLogSelectorExpr{ + // DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ + // shard: &astmapper.ShardAnnotation{ + // Shard: 1, + // Of: 2, + // }, + // LogSelectorExpr: &filterExpr{ + // match: "error", + // ty: labels.MatchEqual, + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // }, + // }, + // next: nil, + // }, + // }, + // }, { in: `rate({foo="bar"}[5m])`, expr: &ConcatSampleExpr{ diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index 35b2eeb333428..68010e724b49e 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -6,7 +6,6 @@ import ( "log" "time" - "github.com/cespare/xxhash/v2" "github.com/cortexproject/cortex/pkg/querier/astmapper" "github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/promql/parser" @@ -33,7 +32,7 @@ func (q MockQuerier) SelectLogs(ctx context.Context, req SelectLogParams) (iter. if err != nil { return nil, err } - filter, err := expr.Filter() + pipeline, err := expr.Pipeline() if err != nil { return nil, err } @@ -69,27 +68,7 @@ outer: } // apply the LineFilter - filtered := make([]logproto.Stream, 0, len(matched)) - if filter == nil || filter == TrueFilter { - filtered = matched - } else { - for _, s := range matched { - var entries []logproto.Entry - for _, entry := range s.Entries { - if filter.Filter([]byte(entry.Line)) { - entries = append(entries, entry) - } - } - - if len(entries) > 0 { - filtered = append(filtered, logproto.Stream{ - Labels: s.Labels, - Entries: entries, - }) - } - } - - } + filtered := processStream(matched, pipeline) streamIters := make([]iter.EntryIterator, 0, len(filtered)) for i := range filtered { @@ -110,12 +89,22 @@ outer: return iter.NewHeapIterator(ctx, streamIters, req.Direction), nil } +func processStream(in []logproto.Stream, pipeline Pipeline) []logproto.Stream { + // todo(cyriltovena) + return in +} + +func processSeries(in []logproto.Stream, pipeline Pipeline, ex SampleExtractor) []logproto.Series { + // todo(cyriltovena) + return nil +} + func (q MockQuerier) SelectSamples(ctx context.Context, req SelectSampleParams) (iter.SampleIterator, error) { selector, err := req.LogSelector() if err != nil { return nil, err } - filter, err := selector.Filter() + pipeline, err := selector.Pipeline() if err != nil { return nil, err } @@ -160,30 +149,31 @@ outer: } // apply the LineFilter - filtered := make([]logproto.Series, 0, len(matched)) - for _, s := range matched { - var samples []logproto.Sample - for _, entry := range s.Entries { - if filter == nil || filter.Filter([]byte(entry.Line)) { - v, ok := extractor.Extract([]byte(entry.Line)) - if !ok { - continue - } - samples = append(samples, logproto.Sample{ - Timestamp: entry.Timestamp.UnixNano(), - Value: v, - Hash: xxhash.Sum64([]byte(entry.Line)), - }) - } - } - - if len(samples) > 0 { - filtered = append(filtered, logproto.Series{ - Labels: s.Labels, - Samples: samples, - }) - } - } + filtered := processSeries(matched, pipeline, extractor) + // for _, s := range matched { + // var samples []logproto.Sample + // for _, entry := range s.Entries { + // // todo(cyriltovena) + // // if filter == nil || filter.Filter([]byte(entry.Line)) { + // v, ok := extractor.Extract([]byte(entry.Line)) + // if !ok { + // continue + // } + // samples = append(samples, logproto.Sample{ + // Timestamp: entry.Timestamp.UnixNano(), + // Value: v, + // Hash: xxhash.Sum64([]byte(entry.Line)), + // }) + // // } + // } + + // if len(samples) > 0 { + // filtered = append(filtered, logproto.Series{ + // Labels: s.Labels, + // Samples: samples, + // }) + // } + // } return iter.NewTimeRangedSampleIterator( iter.NewMultiSeriesIterator(ctx, filtered), From 89c489ce978a39705a2e43289f51cf5be309bfdb Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 22 Sep 2020 17:06:20 +0200 Subject: [PATCH 08/45] Fixes the lexer for duration and range Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 4 +- pkg/logql/expr.y | 10 +- pkg/logql/expr.y.go | 430 ++++++++++++++++++++++-------------------- pkg/logql/lex.go | 54 +++++- pkg/logql/lex_test.go | 19 +- 5 files changed, 297 insertions(+), 220 deletions(-) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 43831b7e6da25..d320352cfd084 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -230,7 +230,9 @@ func newLineFilterExpr(left *lineFilterExpr, ty labels.MatchType, match string) func (e *lineFilterExpr) String() string { var sb strings.Builder - sb.WriteString(e.left.String()) + if e.left != nil { + sb.WriteString(e.left.String()) + } switch e.ty { case labels.MatchRegexp: sb.WriteString("|~") diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 0be94af99f16c..296027d55b5fe 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -67,7 +67,7 @@ import ( %token IDENTIFIER STRING NUMBER -%token DURATION +%token DURATION RANGE %token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE SUM AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE LINE_FMT LABEL_FMT @@ -106,9 +106,9 @@ logExpr: logRangeExpr: - logExpr DURATION { $$ = newLogRange($1, $2) } - | selector DURATION pipelineExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2 ) } - | selector DURATION { $$ = newLogRange(newMatcherExpr($1), $2 ) } + logExpr RANGE { $$ = newLogRange($1, $2) } + | selector RANGE pipelineExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2 ) } + | selector RANGE { $$ = newLogRange(newMatcherExpr($1), $2 ) } | OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = $2 } | logRangeExpr error ; @@ -200,6 +200,7 @@ durationFilter: | IDENTIFIER LTE DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, $1, $3) } | IDENTIFIER NEQ DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterNotEqual, $1, $3) } | IDENTIFIER EQ DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterEqual, $1, $3) } + | IDENTIFIER CMP_EQ DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterEqual, $1, $3) } ; numberFilter: @@ -209,6 +210,7 @@ numberFilter: | IDENTIFIER LTE NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, $1, mustNewFloat($3))} | IDENTIFIER NEQ NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterNotEqual, $1, mustNewFloat($3))} | IDENTIFIER EQ NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterEqual, $1, mustNewFloat($3))} + | IDENTIFIER CMP_EQ NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterEqual, $1, mustNewFloat($3))} ; // TODO(owen-d): add (on,ignoring) clauses to binOpExpr diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index 61d1115b67fa0..c152d929b96b5 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -49,58 +49,59 @@ const IDENTIFIER = 57346 const STRING = 57347 const NUMBER = 57348 const DURATION = 57349 -const MATCHERS = 57350 -const LABELS = 57351 -const EQ = 57352 -const RE = 57353 -const NRE = 57354 -const OPEN_BRACE = 57355 -const CLOSE_BRACE = 57356 -const OPEN_BRACKET = 57357 -const CLOSE_BRACKET = 57358 -const COMMA = 57359 -const DOT = 57360 -const PIPE_MATCH = 57361 -const PIPE_EXACT = 57362 -const OPEN_PARENTHESIS = 57363 -const CLOSE_PARENTHESIS = 57364 -const BY = 57365 -const WITHOUT = 57366 -const COUNT_OVER_TIME = 57367 -const RATE = 57368 -const SUM = 57369 -const AVG = 57370 -const MAX = 57371 -const MIN = 57372 -const COUNT = 57373 -const STDDEV = 57374 -const STDVAR = 57375 -const BOTTOMK = 57376 -const TOPK = 57377 -const BYTES_OVER_TIME = 57378 -const BYTES_RATE = 57379 -const BOOL = 57380 -const JSON = 57381 -const REGEXP = 57382 -const LOGFMT = 57383 -const PIPE = 57384 -const LINE_FMT = 57385 -const LABEL_FMT = 57386 -const OR = 57387 -const AND = 57388 -const UNLESS = 57389 -const CMP_EQ = 57390 -const NEQ = 57391 -const LT = 57392 -const LTE = 57393 -const GT = 57394 -const GTE = 57395 -const ADD = 57396 -const SUB = 57397 -const MUL = 57398 -const DIV = 57399 -const MOD = 57400 -const POW = 57401 +const RANGE = 57350 +const MATCHERS = 57351 +const LABELS = 57352 +const EQ = 57353 +const RE = 57354 +const NRE = 57355 +const OPEN_BRACE = 57356 +const CLOSE_BRACE = 57357 +const OPEN_BRACKET = 57358 +const CLOSE_BRACKET = 57359 +const COMMA = 57360 +const DOT = 57361 +const PIPE_MATCH = 57362 +const PIPE_EXACT = 57363 +const OPEN_PARENTHESIS = 57364 +const CLOSE_PARENTHESIS = 57365 +const BY = 57366 +const WITHOUT = 57367 +const COUNT_OVER_TIME = 57368 +const RATE = 57369 +const SUM = 57370 +const AVG = 57371 +const MAX = 57372 +const MIN = 57373 +const COUNT = 57374 +const STDDEV = 57375 +const STDVAR = 57376 +const BOTTOMK = 57377 +const TOPK = 57378 +const BYTES_OVER_TIME = 57379 +const BYTES_RATE = 57380 +const BOOL = 57381 +const JSON = 57382 +const REGEXP = 57383 +const LOGFMT = 57384 +const PIPE = 57385 +const LINE_FMT = 57386 +const LABEL_FMT = 57387 +const OR = 57388 +const AND = 57389 +const UNLESS = 57390 +const CMP_EQ = 57391 +const NEQ = 57392 +const LT = 57393 +const LTE = 57394 +const GT = 57395 +const GTE = 57396 +const ADD = 57397 +const SUB = 57398 +const MUL = 57399 +const DIV = 57400 +const MOD = 57401 +const POW = 57402 var exprToknames = [...]string{ "$end", @@ -110,6 +111,7 @@ var exprToknames = [...]string{ "STRING", "NUMBER", "DURATION", + "RANGE", "MATCHERS", "LABELS", "EQ", @@ -169,7 +171,7 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:274 +//line pkg/logql/expr.y:276 //line yacctab:1 var exprExca = [...]int{ @@ -178,8 +180,7 @@ var exprExca = [...]int{ -2, 0, -1, 3, 1, 2, - 22, 2, - 45, 2, + 23, 2, 46, 2, 47, 2, 48, 2, @@ -194,9 +195,9 @@ var exprExca = [...]int{ 57, 2, 58, 2, 59, 2, + 60, 2, -2, 0, -1, 56, - 45, 2, 46, 2, 47, 2, 48, 2, @@ -211,72 +212,75 @@ var exprExca = [...]int{ 57, 2, 58, 2, 59, 2, + 60, 2, -2, 0, } const exprPrivate = 57344 -const exprLast = 284 +const exprLast = 297 var exprAct = [...]int{ - 64, 47, 4, 133, 46, 159, 3, 5, 108, 55, + 64, 47, 4, 133, 46, 160, 3, 5, 108, 55, 92, 57, 2, 56, 33, 34, 35, 36, 37, 38, 38, 70, 60, 30, 31, 32, 39, 40, 43, 44, 41, 42, 33, 34, 35, 36, 37, 38, 35, 36, - 37, 38, 186, 104, 106, 107, 182, 63, 85, 65, - 66, 50, 156, 88, 39, 40, 43, 44, 41, 42, - 33, 34, 35, 36, 37, 38, 112, 153, 53, 109, + 37, 38, 189, 104, 106, 107, 185, 63, 85, 65, + 66, 50, 157, 88, 39, 40, 43, 44, 41, 42, + 33, 34, 35, 36, 37, 38, 112, 154, 53, 109, 110, 98, 53, 65, 66, 51, 52, 103, 116, 51, 52, 117, 105, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 96, 49, 138, - 86, 115, 49, 114, 53, 54, 139, 106, 107, 54, - 135, 51, 52, 145, 95, 62, 183, 158, 154, 110, - 155, 185, 161, 31, 32, 39, 40, 43, 44, 41, - 42, 33, 34, 35, 36, 37, 38, 96, 162, 163, - 164, 54, 150, 157, 14, 140, 143, 144, 141, 142, - 135, 11, 177, 176, 95, 165, 183, 180, 178, 6, - 181, 184, 179, 17, 18, 21, 22, 24, 25, 23, - 26, 27, 28, 29, 19, 20, 113, 69, 136, 134, - 85, 11, 68, 11, 147, 169, 168, 188, 150, 111, - 67, 6, 15, 16, 149, 17, 18, 21, 22, 24, - 25, 23, 26, 27, 28, 29, 19, 20, 151, 71, - 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, - 82, 83, 84, 96, 15, 16, 96, 45, 45, 101, - 175, 174, 152, 96, 96, 45, 135, 173, 172, 135, - 95, 100, 148, 95, 102, 171, 170, 99, 99, 147, - 95, 95, 146, 167, 166, 45, 146, 137, 48, 132, - 152, 97, 187, 160, 136, 134, 61, 93, 134, 89, - 91, 90, 59, 94, 61, 87, 10, 9, 13, 8, - 12, 7, 58, 1, + 86, 115, 49, 186, 114, 54, 45, 151, 188, 54, + 62, 151, 153, 146, 158, 95, 150, 159, 155, 110, + 156, 14, 162, 53, 139, 106, 107, 99, 182, 11, + 51, 52, 152, 89, 91, 90, 45, 6, 163, 164, + 165, 17, 18, 21, 22, 24, 25, 23, 26, 27, + 28, 29, 19, 20, 68, 96, 96, 99, 183, 181, + 54, 184, 145, 140, 143, 144, 141, 142, 96, 135, + 15, 16, 186, 95, 95, 67, 11, 187, 148, 170, + 169, 149, 135, 85, 111, 148, 95, 166, 180, 179, + 191, 31, 32, 39, 40, 43, 44, 41, 42, 33, + 34, 35, 36, 37, 38, 113, 69, 101, 178, 177, + 136, 134, 45, 11, 147, 168, 167, 147, 153, 137, + 100, 6, 132, 102, 97, 17, 18, 21, 22, 24, + 25, 23, 26, 27, 28, 29, 19, 20, 71, 72, + 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, + 83, 84, 96, 96, 15, 16, 176, 175, 174, 173, + 172, 171, 59, 190, 61, 161, 135, 135, 61, 45, + 95, 95, 48, 93, 94, 87, 10, 9, 13, 8, + 12, 7, 58, 1, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 136, 134, 134, } var exprPact = [...]int{ - 138, -1000, -22, 233, -1000, 56, 138, -1000, -1000, -1000, - -1000, 270, 94, 26, -1000, 184, 176, -1000, -1000, -1000, + 115, -1000, -23, 267, -1000, 55, 115, -1000, -1000, -1000, + -1000, 260, 88, 25, -1000, 169, 148, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -17, -17, -17, -17, -17, -17, -17, -17, -17, -17, - -17, -17, -17, -17, -17, -1000, 56, -1000, 92, 230, - 256, -1000, -1000, -1000, -1000, 49, 226, -22, 227, 63, - -1000, 33, 168, 170, 82, 80, 57, -1000, -1000, 138, - -1000, 138, 138, 138, 138, 138, 138, 138, 138, 138, - 138, 138, 138, 138, 138, -1000, 254, -1000, 219, -1000, - -1000, 252, -1000, -1000, -1000, 229, 96, -1000, -1000, -1000, - -1000, -1000, 262, -1000, 251, 244, 237, 189, 186, 253, - 60, 168, 30, 126, 138, 259, 259, 77, 6, 6, - -18, -18, -39, -39, -39, -39, -40, -40, -40, -40, - -40, -40, -1000, 219, 229, 229, 229, -1000, 133, 247, - 179, 239, 231, 224, 146, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, 56, 225, 140, 50, 138, 24, 139, - -1000, 99, 93, 219, 222, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 56, -1000, - -1000, 20, -1000, 258, -1000, -1000, 50, -1000, -1000, + -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, + -18, -18, -18, -18, -18, -1000, 55, -1000, 110, 93, + 219, -1000, -1000, -1000, -1000, 48, 134, -23, 205, 62, + -1000, 32, 162, 199, 82, 79, 56, -1000, -1000, 115, + -1000, 115, 115, 115, 115, 115, 115, 115, 115, 115, + 115, 115, 115, 115, 115, -1000, 217, -1000, 248, -1000, + -1000, 214, -1000, -1000, -1000, 152, 113, -1000, -1000, -1000, + -1000, -1000, 264, -1000, 212, 180, 176, 111, 109, 210, + 59, 162, 29, 96, 115, 261, 261, 144, 5, 5, + -19, -19, -40, -40, -40, -40, -41, -41, -41, -41, + -41, -41, -1000, 248, 152, 152, 152, -1000, 164, 209, + 173, 254, 252, 250, 202, 182, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, 55, 104, 105, 49, 115, 23, + 154, -1000, 85, 151, 248, 249, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, 55, -1000, -1000, 19, -1000, 259, -1000, -1000, 49, + -1000, -1000, } var exprPgo = [...]int{ 0, 283, 11, 51, 0, 5, 6, 2, 8, 10, - 282, 281, 280, 7, 279, 278, 277, 276, 177, 275, - 4, 1, 273, 267, 3, 258, + 282, 281, 280, 7, 279, 278, 277, 276, 206, 275, + 4, 1, 274, 273, 3, 272, } var exprR1 = [...]int{ @@ -286,11 +290,11 @@ var exprR1 = [...]int{ 13, 10, 10, 9, 9, 9, 9, 20, 20, 21, 21, 21, 25, 25, 19, 19, 19, 24, 24, 24, 24, 24, 24, 24, 24, 23, 23, 23, 23, 23, - 23, 22, 22, 22, 22, 22, 22, 16, 16, 16, + 23, 23, 22, 22, 22, 22, 22, 22, 22, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 18, 18, 17, 17, 17, 15, 15, 15, - 15, 15, 15, 15, 15, 15, 12, 12, 12, 12, - 5, 5, 4, 4, + 16, 16, 16, 16, 18, 18, 17, 17, 17, 15, + 15, 15, 15, 15, 15, 15, 15, 15, 12, 12, + 12, 12, 5, 5, 4, 4, } var exprR2 = [...]int{ @@ -300,55 +304,57 @@ var exprR2 = [...]int{ 3, 1, 3, 3, 3, 3, 3, 1, 2, 1, 2, 2, 2, 3, 1, 1, 2, 1, 1, 1, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 0, 1, 1, 2, 2, 1, 1, 1, + 4, 4, 4, 4, 0, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 3, 4, 4, + 1, 1, 1, 3, 4, 4, } var exprChk = [...]int{ - -1000, -1, -2, -6, -7, -13, 21, -11, -14, -16, - -17, 13, -12, -15, 6, 54, 55, 25, 26, 36, - 37, 27, 28, 31, 29, 30, 32, 33, 34, 35, - 45, 46, 47, 54, 55, 56, 57, 58, 59, 48, - 49, 52, 53, 50, 51, 2, -20, -21, -25, 42, - -3, 19, 20, 12, 49, -7, -6, -2, -10, 2, - -9, 4, 21, 21, -4, 23, 24, 6, 6, -18, - 38, -18, -18, -18, -18, -18, -18, -18, -18, -18, - -18, -18, -18, -18, -18, -21, -3, -19, -24, 39, - 41, 40, -9, -23, -22, 21, 4, 5, 22, 22, - 14, 2, 17, 14, 10, 49, 11, 12, -8, -6, - -13, 21, -7, 6, 21, 21, 21, -2, -2, -2, + -1000, -1, -2, -6, -7, -13, 22, -11, -14, -16, + -17, 14, -12, -15, 6, 55, 56, 26, 27, 37, + 38, 28, 29, 32, 30, 31, 33, 34, 35, 36, + 46, 47, 48, 55, 56, 57, 58, 59, 60, 49, + 50, 53, 54, 51, 52, 2, -20, -21, -25, 43, + -3, 20, 21, 13, 50, -7, -6, -2, -10, 2, + -9, 4, 22, 22, -4, 24, 25, 6, 6, -18, + 39, -18, -18, -18, -18, -18, -18, -18, -18, -18, + -18, -18, -18, -18, -18, -21, -3, -19, -24, 40, + 42, 41, -9, -23, -22, 22, 4, 5, 23, 23, + 15, 2, 18, 15, 11, 50, 12, 13, -8, -6, + -13, 22, -7, 6, 22, 22, 22, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, 5, -24, 46, 17, 45, 5, -24, 10, - 49, 52, 53, 50, 51, -9, 5, 5, 5, 5, - 2, 22, 7, 7, -6, -8, 22, 17, -7, -5, - 4, -5, -24, -24, -24, 22, 7, 6, 7, 6, - 7, 6, 7, 6, 7, 6, 7, 6, -20, 22, - -4, -7, 22, 17, 22, 22, 22, 4, -4, + -2, -2, 5, -24, 47, 18, 46, 5, -24, 11, + 50, 53, 54, 51, 52, 49, -9, 5, 5, 5, + 5, 2, 23, 8, 8, -6, -8, 23, 18, -7, + -5, 4, -5, -24, -24, -24, 23, 7, 6, 7, + 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, + 6, -20, 23, -4, -7, 23, 18, 23, 23, 23, + 4, -4, } var exprDef = [...]int{ 0, -2, 1, -2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 84, 0, 0, 96, 97, 98, - 99, 87, 88, 89, 90, 91, 92, 93, 94, 95, - 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, - 82, 82, 82, 82, 82, 12, 10, 37, 39, 0, + 7, 0, 0, 0, 86, 0, 0, 98, 99, 100, + 101, 89, 90, 91, 92, 93, 94, 95, 96, 97, + 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, + 84, 84, 84, 84, 84, 12, 10, 37, 39, 0, 0, 24, 25, 26, 27, 3, -2, 0, 0, 0, - 31, 0, 0, 0, 0, 0, 0, 85, 86, 0, - 83, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 31, 0, 0, 0, 0, 0, 0, 87, 88, 0, + 85, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 0, 40, 41, 44, 45, 0, 47, 48, 49, 0, 0, 42, 8, 11, 28, 29, 0, 30, 0, 0, 0, 0, 0, 0, - 9, 0, 3, 84, 0, 0, 0, 67, 68, 69, - 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 43, 51, 0, 0, 0, 46, 0, 0, - 0, 0, 0, 0, 0, 32, 33, 34, 35, 36, - 17, 18, 13, 15, 0, 0, 19, 0, 3, 0, - 100, 0, 52, 53, 54, 50, 60, 66, 59, 65, - 55, 61, 56, 62, 57, 63, 58, 64, 14, 16, - 21, 3, 20, 0, 102, 103, 22, 101, 23, + 9, 0, 3, 86, 0, 0, 0, 69, 70, 71, + 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, + 82, 83, 43, 51, 0, 0, 0, 46, 0, 0, + 0, 0, 0, 0, 0, 0, 32, 33, 34, 35, + 36, 17, 18, 13, 15, 0, 0, 19, 0, 3, + 0, 102, 0, 52, 53, 54, 50, 60, 67, 59, + 66, 55, 62, 56, 63, 57, 64, 58, 65, 61, + 68, 14, 16, 21, 3, 20, 0, 104, 105, 22, + 103, 23, } var exprTok1 = [...]int{ @@ -361,7 +367,7 @@ var exprTok2 = [...]int{ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, + 52, 53, 54, 55, 56, 57, 58, 59, 60, } var exprTok3 = [...]int{ 0, @@ -1053,259 +1059,271 @@ exprdefault: } case 61: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:206 +//line pkg/logql/expr.y:203 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 62: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:207 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 63: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:208 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 64: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:209 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 65: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:210 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 66: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:211 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 67: - exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:217 + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:212 { - exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 68: - exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:218 + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:213 { - exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 69: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:219 { - exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 70: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:220 { - exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 71: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:221 { - exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 72: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:222 { - exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 73: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:223 { - exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 74: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:224 { - exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 75: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:225 { - exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 76: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:226 { - exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 77: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:227 { - exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 78: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:228 { - exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 79: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:229 { - exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 80: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:230 { - exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 81: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:231 { - exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 82: - exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:235 + exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:232 { - exprVAL.BinOpModifier = BinOpOptions{} + exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 83: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:236 + exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:233 { - exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} + exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 84: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:240 + exprDollar = exprS[exprpt-0 : exprpt+1] +//line pkg/logql/expr.y:237 { - exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) + exprVAL.BinOpModifier = BinOpOptions{} } case 85: - exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:241 + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:238 { - exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) + exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } case 86: - exprDollar = exprS[exprpt-2 : exprpt+1] + exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:242 { - exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) + exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } case 87: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:246 + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:243 { - exprVAL.VectorOp = OpTypeSum + exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } case 88: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:247 + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:244 { - exprVAL.VectorOp = OpTypeAvg + exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } case 89: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:248 { - exprVAL.VectorOp = OpTypeCount + exprVAL.VectorOp = OpTypeSum } case 90: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:249 { - exprVAL.VectorOp = OpTypeMax + exprVAL.VectorOp = OpTypeAvg } case 91: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:250 { - exprVAL.VectorOp = OpTypeMin + exprVAL.VectorOp = OpTypeCount } case 92: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:251 { - exprVAL.VectorOp = OpTypeStddev + exprVAL.VectorOp = OpTypeMax } case 93: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:252 { - exprVAL.VectorOp = OpTypeStdvar + exprVAL.VectorOp = OpTypeMin } case 94: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:253 { - exprVAL.VectorOp = OpTypeBottomK + exprVAL.VectorOp = OpTypeStddev } case 95: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:254 { - exprVAL.VectorOp = OpTypeTopK + exprVAL.VectorOp = OpTypeStdvar } case 96: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:258 +//line pkg/logql/expr.y:255 { - exprVAL.RangeOp = OpRangeTypeCount + exprVAL.VectorOp = OpTypeBottomK } case 97: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:259 +//line pkg/logql/expr.y:256 { - exprVAL.RangeOp = OpRangeTypeRate + exprVAL.VectorOp = OpTypeTopK } case 98: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:260 { - exprVAL.RangeOp = OpRangeTypeBytes + exprVAL.RangeOp = OpRangeTypeCount } case 99: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:261 { - exprVAL.RangeOp = OpRangeTypeBytesRate + exprVAL.RangeOp = OpRangeTypeRate } case 100: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:266 +//line pkg/logql/expr.y:262 { - exprVAL.Labels = []string{exprDollar[1].str} + exprVAL.RangeOp = OpRangeTypeBytes } case 101: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:263 + { + exprVAL.RangeOp = OpRangeTypeBytesRate + } + case 102: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:268 + { + exprVAL.Labels = []string{exprDollar[1].str} + } + case 103: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:267 +//line pkg/logql/expr.y:269 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 102: + case 104: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:271 +//line pkg/logql/expr.y:273 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } - case 103: + case 105: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:272 +//line pkg/logql/expr.y:274 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go index 768a14d173778..52fd967c1c0cc 100644 --- a/pkg/logql/lex.go +++ b/pkg/logql/lex.go @@ -2,8 +2,10 @@ package logql import ( "strconv" + "strings" "text/scanner" "time" + "unicode" "github.com/prometheus/common/model" ) @@ -78,8 +80,14 @@ func (l *lexer) Lex(lval *exprSymType) int { return 0 case scanner.Int, scanner.Float: - lval.str = l.TokenText() - return NUMBER + numberText := l.TokenText() + duration, ok := tryScanDuration(numberText, &l.Scanner) + if !ok { + lval.str = numberText + return NUMBER + } + lval.duration = duration + return DURATION case scanner.String, scanner.RawString: var err error @@ -102,7 +110,7 @@ func (l *lexer) Lex(lval *exprSymType) int { return 0 } lval.duration = time.Duration(i) - return DURATION + return RANGE } d += string(r) } @@ -126,3 +134,43 @@ func (l *lexer) Lex(lval *exprSymType) int { func (l *lexer) Error(msg string) { l.errs = append(l.errs, newParseError(msg, l.Line, l.Column)) } + +func tryScanDuration(number string, l *scanner.Scanner) (time.Duration, bool) { + var sb strings.Builder + sb.WriteString(number) + //copy the scanner to avoid advancing it in case it's not a duration. + s := *l + consumed := 0 + for r := s.Peek(); r != scanner.EOF && !unicode.IsSpace(r); r = s.Peek() { + if !unicode.IsNumber(r) && !isDurationRune(r) && r != '.' { + break + } + _, _ = sb.WriteRune(r) + _ = s.Next() + consumed++ + } + + if consumed == 0 { + return 0, false + } + // we've found more characters before a whitespace or the end + d, err := time.ParseDuration(sb.String()) + if err != nil { + return 0, false + } + // we need to consume the scanner, now that we know this is a duration. + for i := 0; i <= consumed; i++ { + _ = l.Next() + } + return d, true +} + +func isDurationRune(r rune) bool { + // "ns", "us" (or "µs"), "ms", "s", "m", "h". + switch r { + case 'n', 's', 'u', 'm', 'h', 'µ': + return true + default: + return false + } +} diff --git a/pkg/logql/lex_test.go b/pkg/logql/lex_test.go index 8c523661fbb61..ddd61750f7638 100644 --- a/pkg/logql/lex_test.go +++ b/pkg/logql/lex_test.go @@ -16,6 +16,13 @@ func TestLex(t *testing.T) { {`{foo="bar"}`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, {"{foo=\"bar\"} |~ `\\w+`", []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING}}, {`{foo="bar"} |~ "\\w+"`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING}}, + {`{foo="bar"} |~ "\\w+" | latency > 250ms`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION}}, + {`{foo="bar"} |~ "\\w+" | foo = 0ms`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, EQ, DURATION}}, + {`{foo="bar"} |~ "\\w+" | latency > 1h15m30.918273645s`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION}}, + {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION}}, + {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s or foo ==4.00 and bar ="foo"`, + []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, + PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING}}, {`{ foo = "bar" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, {`{ foo != "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, {`{ foo =~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, RE, STRING, CLOSE_BRACE}}, @@ -23,12 +30,12 @@ func TestLex(t *testing.T) { {`{ foo = "bar", bar != "baz" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, COMMA, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, {`{ foo = "ba\"r" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, - {`rate({foo="bar"}[10s])`, []int{RATE, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS}}, - {`count_over_time({foo="bar"}[5m])`, []int{COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS}}, - {`sum(count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}}, - {`topk(3,count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{TOPK, OPEN_PARENTHESIS, NUMBER, COMMA, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}}, - {`bottomk(10,sum(count_over_time({foo="bar"}[5m])) by (foo,bar))`, []int{BOTTOMK, OPEN_PARENTHESIS, NUMBER, COMMA, SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS}}, - {`sum(max(rate({foo="bar"}[5m])) by (foo,bar)) by (foo)`, []int{SUM, OPEN_PARENTHESIS, MAX, OPEN_PARENTHESIS, RATE, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, CLOSE_PARENTHESIS}}, + {`rate({foo="bar"}[10s])`, []int{RATE, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS}}, + {`count_over_time({foo="bar"}[5m])`, []int{COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS}}, + {`sum(count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}}, + {`topk(3,count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{TOPK, OPEN_PARENTHESIS, NUMBER, COMMA, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}}, + {`bottomk(10,sum(count_over_time({foo="bar"}[5m])) by (foo,bar))`, []int{BOTTOMK, OPEN_PARENTHESIS, NUMBER, COMMA, SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS}}, + {`sum(max(rate({foo="bar"}[5m])) by (foo,bar)) by (foo)`, []int{SUM, OPEN_PARENTHESIS, MAX, OPEN_PARENTHESIS, RATE, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, CLOSE_PARENTHESIS}}, } { t.Run(tc.input, func(t *testing.T) { actual := []int{} From 42381732caf1accd3ff92bc71cd91c62f34ce545 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 23 Sep 2020 12:31:31 +0200 Subject: [PATCH 09/45] Fixes all tests and add some for label filters Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 13 +- pkg/logql/lex_test.go | 2 +- pkg/logql/parser_test.go | 1791 +++++++++++++++++++------------------- 3 files changed, 889 insertions(+), 917 deletions(-) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index d320352cfd084..aa948b48f1e64 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -335,16 +335,9 @@ func (e *labelFilterExpr) Pipeline() (Pipeline, error) { }), nil } -// func (e *parserExpr) String() string { -// var sb strings.Builder -// sb.WriteString(e.left.String()) -// sb.WriteString("|") -// sb.WriteString(e.op) -// if e.param != "" { -// sb.WriteString(strconv.Quote(e.param)) -// } -// return sb.String() -// } +func (e *labelFilterExpr) String() string { + return fmt.Sprintf("|%s", e.Filterer.String()) +} func mustNewMatcher(t labels.MatchType, n, v string) *labels.Matcher { m, err := labels.NewMatcher(t, n, v) diff --git a/pkg/logql/lex_test.go b/pkg/logql/lex_test.go index ddd61750f7638..174253ea44601 100644 --- a/pkg/logql/lex_test.go +++ b/pkg/logql/lex_test.go @@ -20,7 +20,7 @@ func TestLex(t *testing.T) { {`{foo="bar"} |~ "\\w+" | foo = 0ms`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, EQ, DURATION}}, {`{foo="bar"} |~ "\\w+" | latency > 1h15m30.918273645s`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION}}, {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION}}, - {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s or foo ==4.00 and bar ="foo"`, + {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s or foo == 4.00 and bar ="foo"`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING}}, {`{ foo = "bar" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 107beabde4107..8dfa9aa099d1f 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -6,6 +6,7 @@ import ( "testing" "time" + "github.com/grafana/loki/pkg/logql/labelfilter" "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/require" ) @@ -40,911 +41,843 @@ func TestParse(t *testing.T) { }, }, }, - // { - // // test [12h] before filter expr - // in: `count_over_time({foo="bar"}[12h] |= "error")`, - // exp: &rangeAggregationExpr{ - // operation: "count_over_time", - // left: &logRange{ - // left: &filterExpr{ - // ty: labels.MatchEqual, - // match: "error", - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // }, - // interval: 12 * time.Hour, - // }, - // }, - // }, - // { - // // test [12h] after filter expr - // in: `count_over_time({foo="bar"} |= "error" [12h])`, - // exp: &rangeAggregationExpr{ - // operation: "count_over_time", - // left: &logRange{ - // left: &filterExpr{ - // ty: labels.MatchEqual, - // match: "error", - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // }, - // interval: 12 * time.Hour, - // }, - // }, - // }, - // { - // in: `{foo="bar"}`, - // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // }, - // { - // in: `{ foo = "bar" }`, - // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // }, - // { - // in: `{ foo != "bar" }`, - // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotEqual, "foo", "bar")}}, - // }, - // { - // in: `{ foo =~ "bar" }`, - // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}}, - // }, - // { - // in: `{ foo !~ "bar" }`, - // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // }, - // { - // in: `count_over_time({ foo !~ "bar" }[12m])`, - // exp: &rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 12 * time.Minute, - // }, - // operation: "count_over_time", - // }, - // }, - // { - // in: `bytes_over_time({ foo !~ "bar" }[12m])`, - // exp: &rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 12 * time.Minute, - // }, - // operation: OpRangeTypeBytes, - // }, - // }, - // { - // in: `bytes_rate({ foo !~ "bar" }[12m])`, - // exp: &rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 12 * time.Minute, - // }, - // operation: OpRangeTypeBytesRate, - // }, - // }, - // { - // in: `rate({ foo !~ "bar" }[5h])`, - // exp: &rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 5 * time.Hour, - // }, - // operation: "rate", - // }, - // }, - // { - // in: `rate({ foo !~ "bar" }[5d])`, - // exp: &rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 5 * 24 * time.Hour, - // }, - // operation: "rate", - // }, - // }, - // { - // in: `count_over_time({ foo !~ "bar" }[1w])`, - // exp: &rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 7 * 24 * time.Hour, - // }, - // operation: "count_over_time", - // }, - // }, - // { - // in: `sum(rate({ foo !~ "bar" }[5h]))`, - // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 5 * time.Hour, - // }, - // operation: "rate", - // }, "sum", nil, nil), - // }, - // { - // in: `sum(rate({ foo !~ "bar" }[1y]))`, - // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 365 * 24 * time.Hour, - // }, - // operation: "rate", - // }, "sum", nil, nil), - // }, - // { - // in: `avg(count_over_time({ foo !~ "bar" }[5h])) by (bar,foo)`, - // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 5 * time.Hour, - // }, - // operation: "count_over_time", - // }, "avg", &grouping{ - // without: false, - // groups: []string{"bar", "foo"}, - // }, nil), - // }, - // { - // in: `max without (bar) (count_over_time({ foo !~ "bar" }[5h]))`, - // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 5 * time.Hour, - // }, - // operation: "count_over_time", - // }, "max", &grouping{ - // without: true, - // groups: []string{"bar"}, - // }, nil), - // }, - // { - // in: `topk(10,count_over_time({ foo !~ "bar" }[5h])) without (bar)`, - // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 5 * time.Hour, - // }, - // operation: "count_over_time", - // }, "topk", &grouping{ - // without: true, - // groups: []string{"bar"}, - // }, newString("10")), - // }, - // { - // in: `bottomk(30 ,sum(rate({ foo !~ "bar" }[5h])) by (foo))`, - // exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 5 * time.Hour, - // }, - // operation: "rate", - // }, "sum", &grouping{ - // groups: []string{"foo"}, - // without: false, - // }, nil), "bottomk", nil, - // newString("30")), - // }, - // { - // in: `max( sum(count_over_time({ foo !~ "bar" }[5h])) without (foo,bar) ) by (foo)`, - // exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ - // left: &logRange{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - // interval: 5 * time.Hour, - // }, - // operation: "count_over_time", - // }, "sum", &grouping{ - // groups: []string{"foo", "bar"}, - // without: true, - // }, nil), "max", &grouping{ - // groups: []string{"foo"}, - // without: false, - // }, nil), - // }, - // { - // in: `unk({ foo !~ "bar" }[5m])`, - // err: ParseError{ - // msg: "syntax error: unexpected IDENTIFIER", - // line: 1, - // col: 1, - // }, - // }, - // { - // in: `rate({ foo !~ "bar" }[5minutes])`, - // err: ParseError{ - // msg: `not a valid duration string: "5minutes"`, - // line: 0, - // col: 22, - // }, - // }, - // { - // in: `rate({ foo !~ "bar" }[5)`, - // err: ParseError{ - // msg: "missing closing ']' in duration", - // line: 0, - // col: 22, - // }, - // }, - // { - // in: `min({ foo !~ "bar" }[5m])`, - // err: ParseError{ - // msg: "syntax error: unexpected DURATION", - // line: 0, - // col: 21, - // }, - // }, - // { - // in: `sum(3 ,count_over_time({ foo !~ "bar" }[5h]))`, - // err: ParseError{ - // msg: "unsupported parameter for operation sum(3,", - // line: 0, - // col: 0, - // }, - // }, - // { - // in: `topk(count_over_time({ foo !~ "bar" }[5h]))`, - // err: ParseError{ - // msg: "parameter required for operation topk", - // line: 0, - // col: 0, - // }, - // }, - // { - // in: `bottomk(he,count_over_time({ foo !~ "bar" }[5h]))`, - // err: ParseError{ - // msg: "syntax error: unexpected IDENTIFIER", - // line: 1, - // col: 9, - // }, - // }, - // { - // in: `bottomk(1.2,count_over_time({ foo !~ "bar" }[5h]))`, - // err: ParseError{ - // msg: "invalid parameter bottomk(1.2,", - // line: 0, - // col: 0, - // }, - // }, - // { - // in: `stddev({ foo !~ "bar" })`, - // err: ParseError{ - // msg: "syntax error: unexpected )", - // line: 1, - // col: 24, - // }, - // }, - // { - // in: `{ foo = "bar", bar != "baz" }`, - // exp: &matchersExpr{matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // mustNewMatcher(labels.MatchNotEqual, "bar", "baz"), - // }}, - // }, - // { - // in: `{foo="bar"} |= "baz"`, - // exp: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // }, - // { - // in: `{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, - // exp: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // }, - // { - // in: `count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, - // exp: newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // }, - // { - // in: `bytes_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, - // exp: newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeBytes), - // }, - // { - // in: `sum(count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, - // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil), - // }, - // { - // in: `sum(bytes_rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, - // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeBytesRate), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil), - // }, - // { - // in: `topk(5,count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) without (foo)`, - // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "topk", - // &grouping{ - // without: true, - // groups: []string{"foo"}, - // }, - // newString("5")), - // }, - // { - // in: `topk(5,sum(rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (app))`, - // exp: mustNewVectorAggregationExpr( - // mustNewVectorAggregationExpr( - // newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeRate), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"app"}, - // }, - // nil), - // "topk", - // nil, - // newString("5")), - // }, - // { - // in: `count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")`, - // exp: newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // }, - // { - // in: `sum(count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (foo)`, - // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil), - // }, - // { - // in: `topk(5,count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) without (foo)`, - // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "topk", - // &grouping{ - // without: true, - // groups: []string{"foo"}, - // }, - // newString("5")), - // }, - // { - // in: `topk(5,sum(rate({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (app))`, - // exp: mustNewVectorAggregationExpr( - // mustNewVectorAggregationExpr( - // newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &filterExpr{ - // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - // ty: labels.MatchEqual, - // match: "baz", - // }, - // ty: labels.MatchRegexp, - // match: "blip", - // }, - // ty: labels.MatchNotEqual, - // match: "flip", - // }, - // ty: labels.MatchNotRegexp, - // match: "flap", - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeRate), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"app"}, - // }, - // nil), - // "topk", - // nil, - // newString("5")), - // }, - // { - // in: `{foo="bar}`, - // err: ParseError{ - // msg: "literal not terminated", - // line: 1, - // col: 6, - // }, - // }, - // { - // in: `{foo="bar"`, - // err: ParseError{ - // msg: "syntax error: unexpected $end, expecting } or ,", - // line: 1, - // col: 11, - // }, - // }, + { + // test [12h] before filter expr + in: `count_over_time({foo="bar"}[12h] |= "error")`, + exp: &rangeAggregationExpr{ + operation: "count_over_time", + left: &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), + MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "error"), + }, + ), + interval: 12 * time.Hour, + }, + }, + }, + { + // test [12h] after filter expr + in: `count_over_time({foo="bar"} |= "error" [12h])`, + exp: &rangeAggregationExpr{ + operation: "count_over_time", + left: &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), + MultiPipelineExpr{newLineFilterExpr(nil, labels.MatchEqual, "error")}, + ), + interval: 12 * time.Hour, + }, + }, + }, + { + in: `{foo="bar"}`, + exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + }, + { + in: `{ foo = "bar" }`, + exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + }, + { + in: `{ foo != "bar" }`, + exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotEqual, "foo", "bar")}}, + }, + { + in: `{ foo =~ "bar" }`, + exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}}, + }, + { + in: `{ foo !~ "bar" }`, + exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + }, + { + in: `count_over_time({ foo !~ "bar" }[12m])`, + exp: &rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 12 * time.Minute, + }, + operation: "count_over_time", + }, + }, + { + in: `bytes_over_time({ foo !~ "bar" }[12m])`, + exp: &rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 12 * time.Minute, + }, + operation: OpRangeTypeBytes, + }, + }, + { + in: `bytes_rate({ foo !~ "bar" }[12m])`, + exp: &rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 12 * time.Minute, + }, + operation: OpRangeTypeBytesRate, + }, + }, + { + in: `rate({ foo !~ "bar" }[5h])`, + exp: &rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 5 * time.Hour, + }, + operation: "rate", + }, + }, + { + in: `rate({ foo !~ "bar" }[5d])`, + exp: &rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 5 * 24 * time.Hour, + }, + operation: "rate", + }, + }, + { + in: `count_over_time({ foo !~ "bar" }[1w])`, + exp: &rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 7 * 24 * time.Hour, + }, + operation: "count_over_time", + }, + }, + { + in: `sum(rate({ foo !~ "bar" }[5h]))`, + exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 5 * time.Hour, + }, + operation: "rate", + }, "sum", nil, nil), + }, + { + in: `sum(rate({ foo !~ "bar" }[1y]))`, + exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 365 * 24 * time.Hour, + }, + operation: "rate", + }, "sum", nil, nil), + }, + { + in: `avg(count_over_time({ foo !~ "bar" }[5h])) by (bar,foo)`, + exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 5 * time.Hour, + }, + operation: "count_over_time", + }, "avg", &grouping{ + without: false, + groups: []string{"bar", "foo"}, + }, nil), + }, + { + in: `max without (bar) (count_over_time({ foo !~ "bar" }[5h]))`, + exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 5 * time.Hour, + }, + operation: "count_over_time", + }, "max", &grouping{ + without: true, + groups: []string{"bar"}, + }, nil), + }, + { + in: `topk(10,count_over_time({ foo !~ "bar" }[5h])) without (bar)`, + exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 5 * time.Hour, + }, + operation: "count_over_time", + }, "topk", &grouping{ + without: true, + groups: []string{"bar"}, + }, newString("10")), + }, + { + in: `bottomk(30 ,sum(rate({ foo !~ "bar" }[5h])) by (foo))`, + exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 5 * time.Hour, + }, + operation: "rate", + }, "sum", &grouping{ + groups: []string{"foo"}, + without: false, + }, nil), "bottomk", nil, + newString("30")), + }, + { + in: `max( sum(count_over_time({ foo !~ "bar" }[5h])) without (foo,bar) ) by (foo)`, + exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ + left: &logRange{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + interval: 5 * time.Hour, + }, + operation: "count_over_time", + }, "sum", &grouping{ + groups: []string{"foo", "bar"}, + without: true, + }, nil), "max", &grouping{ + groups: []string{"foo"}, + without: false, + }, nil), + }, + { + in: `unk({ foo !~ "bar" }[5m])`, + err: ParseError{ + msg: "syntax error: unexpected IDENTIFIER", + line: 1, + col: 1, + }, + }, + { + in: `rate({ foo !~ "bar" }[5minutes])`, + err: ParseError{ + msg: `not a valid duration string: "5minutes"`, + line: 0, + col: 22, + }, + }, + { + in: `rate({ foo !~ "bar" }[5)`, + err: ParseError{ + msg: "missing closing ']' in duration", + line: 0, + col: 22, + }, + }, + { + in: `min({ foo !~ "bar" }[5m])`, + err: ParseError{ + msg: "syntax error: unexpected RANGE", + line: 0, + col: 21, + }, + }, + { + in: `sum(3 ,count_over_time({ foo !~ "bar" }[5h]))`, + err: ParseError{ + msg: "unsupported parameter for operation sum(3,", + line: 0, + col: 0, + }, + }, + { + in: `topk(count_over_time({ foo !~ "bar" }[5h]))`, + err: ParseError{ + msg: "parameter required for operation topk", + line: 0, + col: 0, + }, + }, + { + in: `bottomk(he,count_over_time({ foo !~ "bar" }[5h]))`, + err: ParseError{ + msg: "syntax error: unexpected IDENTIFIER", + line: 1, + col: 9, + }, + }, + { + in: `bottomk(1.2,count_over_time({ foo !~ "bar" }[5h]))`, + err: ParseError{ + msg: "invalid parameter bottomk(1.2,", + line: 0, + col: 0, + }, + }, + { + in: `stddev({ foo !~ "bar" })`, + err: ParseError{ + msg: "syntax error: unexpected )", + line: 1, + col: 24, + }, + }, + { + in: `{ foo = "bar", bar != "baz" }`, + exp: &matchersExpr{matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + mustNewMatcher(labels.MatchNotEqual, "bar", "baz"), + }}, + }, + { + in: `{foo="bar"} |= "baz"`, + exp: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{newLineFilterExpr(nil, labels.MatchEqual, "baz")}, + ), + }, + { + in: `{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, + exp: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + }, + { + in: `count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, + exp: newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeCount), + }, + { + in: `bytes_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, + exp: newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeBytes), + }, + { + in: `sum(count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, + exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil), + }, + { + in: `sum(bytes_rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, + exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeBytesRate), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil), + }, + { + in: `topk(5,count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) without (foo)`, + exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "topk", + &grouping{ + without: true, + groups: []string{"foo"}, + }, + newString("5")), + }, + { + in: `topk(5,sum(rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (app))`, + exp: mustNewVectorAggregationExpr( + mustNewVectorAggregationExpr( + newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeRate), + "sum", + &grouping{ + without: false, + groups: []string{"app"}, + }, + nil), + "topk", + nil, + newString("5")), + }, + { + in: `count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")`, + exp: newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeCount), + }, + { + in: `sum(count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (foo)`, + exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil), + }, + { + in: `topk(5,count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) without (foo)`, + exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "topk", + &grouping{ + without: true, + groups: []string{"foo"}, + }, + newString("5")), + }, + { + in: `topk(5,sum(rate({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (app))`, + exp: mustNewVectorAggregationExpr( + mustNewVectorAggregationExpr( + newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr( + newLineFilterExpr(nil, labels.MatchEqual, "baz"), + labels.MatchRegexp, "blip"), + labels.MatchNotEqual, "flip"), + labels.MatchNotRegexp, "flap"), + }, + ), + interval: 5 * time.Minute, + }, OpRangeTypeRate), + "sum", + &grouping{ + without: false, + groups: []string{"app"}, + }, + nil), + "topk", + nil, + newString("5")), + }, + { + in: `{foo="bar}`, + err: ParseError{ + msg: "literal not terminated", + line: 1, + col: 6, + }, + }, + { + in: `{foo="bar"`, + err: ParseError{ + msg: "syntax error: unexpected $end, expecting } or ,", + line: 1, + col: 11, + }, + }, - // { - // in: `{foo="bar"} |~`, - // err: ParseError{ - // msg: "syntax error: unexpected $end, expecting STRING", - // line: 1, - // col: 15, - // }, - // }, + { + in: `{foo="bar"} |~`, + err: ParseError{ + msg: "syntax error: unexpected $end, expecting STRING", + line: 1, + col: 15, + }, + }, - // { - // in: `{foo="bar"} "foo"`, - // err: ParseError{ - // msg: "syntax error: unexpected STRING", - // line: 1, - // col: 13, - // }, - // }, - // { - // in: `{foo="bar"} foo`, - // err: ParseError{ - // msg: "syntax error: unexpected IDENTIFIER", - // line: 1, - // col: 13, - // }, - // }, - // { - // // require left associativity - // in: ` - // sum(count_over_time({foo="bar"}[5m])) by (foo) / - // sum(count_over_time({foo="bar"}[5m])) by (foo) / - // sum(count_over_time({foo="bar"}[5m])) by (foo) - // `, - // exp: mustNewBinOpExpr( - // OpTypeDiv, - // BinOpOptions{}, - // mustNewBinOpExpr( - // OpTypeDiv, - // BinOpOptions{}, - // mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil, - // ), - // mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil, - // ), - // ), - // mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil, - // ), - // ), - // }, - // { - // in: ` - // sum(count_over_time({foo="bar"}[5m])) by (foo) ^ - // sum(count_over_time({foo="bar"}[5m])) by (foo) / - // sum(count_over_time({foo="bar"}[5m])) by (foo) - // `, - // exp: mustNewBinOpExpr( - // OpTypeDiv, - // BinOpOptions{}, - // mustNewBinOpExpr( - // OpTypePow, - // BinOpOptions{}, - // mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil, - // ), - // mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil, - // ), - // ), - // mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil, - // ), - // ), - // }, - // { - // // operator precedence before left associativity - // in: ` - // sum(count_over_time({foo="bar"}[5m])) by (foo) + - // sum(count_over_time({foo="bar"}[5m])) by (foo) / - // sum(count_over_time({foo="bar"}[5m])) by (foo) - // `, - // exp: mustNewBinOpExpr( - // OpTypeAdd, - // BinOpOptions{}, - // mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil, - // ), - // mustNewBinOpExpr( - // OpTypeDiv, - // BinOpOptions{}, - // mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil, - // ), - // mustNewVectorAggregationExpr(newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // "sum", - // &grouping{ - // without: false, - // groups: []string{"foo"}, - // }, - // nil, - // ), - // ), - // ), - // }, - // { - // in: `sum by (job) ( - // count_over_time({namespace="tns"} |= "level=error"[5m]) - // / - // count_over_time({namespace="tns"}[5m]) - // )`, - // exp: mustNewVectorAggregationExpr( - // mustNewBinOpExpr(OpTypeDiv, - // BinOpOptions{}, - // newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - // }, - // }, - // match: "level=error", - // ty: labels.MatchEqual, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), - // }, - // { - // in: `sum by (job) ( - // count_over_time({namespace="tns"} |= "level=error"[5m]) - // / - // count_over_time({namespace="tns"}[5m]) - // ) * 100`, - // exp: mustNewBinOpExpr(OpTypeMul, BinOpOptions{}, mustNewVectorAggregationExpr( - // mustNewBinOpExpr(OpTypeDiv, - // BinOpOptions{}, - // newRangeAggregationExpr( - // &logRange{ - // left: &filterExpr{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - // }, - // }, - // match: "level=error", - // ty: labels.MatchEqual, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount), - // newRangeAggregationExpr( - // &logRange{ - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - // }, - // }, - // interval: 5 * time.Minute, - // }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), - // mustNewLiteralExpr("100", false), - // ), - // }, + { + in: `{foo="bar"} "foo"`, + err: ParseError{ + msg: "syntax error: unexpected STRING", + line: 1, + col: 13, + }, + }, + { + in: `{foo="bar"} foo`, + err: ParseError{ + msg: "syntax error: unexpected IDENTIFIER", + line: 1, + col: 13, + }, + }, + { + // require left associativity + in: ` + sum(count_over_time({foo="bar"}[5m])) by (foo) / + sum(count_over_time({foo="bar"}[5m])) by (foo) / + sum(count_over_time({foo="bar"}[5m])) by (foo) + `, + exp: mustNewBinOpExpr( + OpTypeDiv, + BinOpOptions{}, + mustNewBinOpExpr( + OpTypeDiv, + BinOpOptions{}, + mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil, + ), + mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil, + ), + ), + mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil, + ), + ), + }, + { + in: ` + sum(count_over_time({foo="bar"}[5m])) by (foo) ^ + sum(count_over_time({foo="bar"}[5m])) by (foo) / + sum(count_over_time({foo="bar"}[5m])) by (foo) + `, + exp: mustNewBinOpExpr( + OpTypeDiv, + BinOpOptions{}, + mustNewBinOpExpr( + OpTypePow, + BinOpOptions{}, + mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil, + ), + mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil, + ), + ), + mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil, + ), + ), + }, + { + // operator precedence before left associativity + in: ` + sum(count_over_time({foo="bar"}[5m])) by (foo) + + sum(count_over_time({foo="bar"}[5m])) by (foo) / + sum(count_over_time({foo="bar"}[5m])) by (foo) + `, + exp: mustNewBinOpExpr( + OpTypeAdd, + BinOpOptions{}, + mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil, + ), + mustNewBinOpExpr( + OpTypeDiv, + BinOpOptions{}, + mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil, + ), + mustNewVectorAggregationExpr(newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount), + "sum", + &grouping{ + without: false, + groups: []string{"foo"}, + }, + nil, + ), + ), + ), + }, + { + in: `sum by (job) ( + count_over_time({namespace="tns"} |= "level=error"[5m]) + / + count_over_time({namespace="tns"}[5m]) + )`, + exp: mustNewVectorAggregationExpr( + mustNewBinOpExpr(OpTypeDiv, + BinOpOptions{}, + newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + }), + MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "level=error"), + }), + interval: 5 * time.Minute, + }, OpRangeTypeCount), + newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), + }, + { + in: `sum by (job) ( + count_over_time({namespace="tns"} |= "level=error"[5m]) + / + count_over_time({namespace="tns"}[5m]) + ) * 100`, + exp: mustNewBinOpExpr(OpTypeMul, BinOpOptions{}, mustNewVectorAggregationExpr( + mustNewBinOpExpr(OpTypeDiv, + BinOpOptions{}, + newRangeAggregationExpr( + &logRange{ + left: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + }), + MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "level=error"), + }), + interval: 5 * time.Minute, + }, OpRangeTypeCount), + newRangeAggregationExpr( + &logRange{ + left: &matchersExpr{ + matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + }, + }, + interval: 5 * time.Minute, + }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), + mustNewLiteralExpr("100", false), + ), + }, { // reduces binop with two literalExprs in: `sum(count_over_time({foo="bar"}[5m])) by (foo) + 1 / 2`, @@ -992,9 +925,55 @@ func TestParse(t *testing.T) { ), }, { - // test signs/ops with equal associativity - in: `{app="foo"} |= "bar" | json | latency >= 250`, - exp: nil, + in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)`, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + }, + }, + }, + { + in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | foo="bar" buzz!="blip", blop=~"boop" or fuzz==5`, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + &labelFilterExpr{ + Filterer: labelfilter.NewAnd( + labelfilter.NewString(mustNewMatcher(labels.MatchEqual, "foo", "bar")), + labelfilter.NewAnd( + labelfilter.NewString(mustNewMatcher(labels.MatchNotEqual, "buzz", "blip")), + labelfilter.NewOr( + labelfilter.NewString(mustNewMatcher(labels.MatchRegexp, "blop", "boop")), + labelfilter.NewNumeric(labelfilter.FilterEqual, "fuzz", 5), + ), + ), + ), + }, + }, + }, }, { // ensure binary ops with two literals are reduced recursively From 88ad10405ecf9a8d42c7453d2feaae7f52dd6429 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 23 Sep 2020 17:15:35 +0200 Subject: [PATCH 10/45] Add label and line format. Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 97 +++++++ pkg/logql/expr.y | 32 ++- pkg/logql/expr.y.go | 575 +++++++++++++++++++++------------------ pkg/logql/fmt.go | 41 +++ pkg/logql/lex.go | 4 + pkg/logql/parser_test.go | 56 ++++ 6 files changed, 536 insertions(+), 269 deletions(-) create mode 100644 pkg/logql/fmt.go diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index aa948b48f1e64..0e457da008915 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -1,10 +1,12 @@ package logql import ( + "bytes" "context" "fmt" "strconv" "strings" + "text/template" "time" "github.com/prometheus/common/model" @@ -339,6 +341,98 @@ func (e *labelFilterExpr) String() string { return fmt.Sprintf("|%s", e.Filterer.String()) } +type lineFmtExpr struct { + value string + implicit +} + +func newLineFmtExpr(value string) *lineFmtExpr { + + return &lineFmtExpr{ + value: value, + // t: t, + } +} + +func (e *lineFmtExpr) Pipeline() (Pipeline, error) { + t, err := template.New("line").Funcs(functionMap).Parse(e.value) + if err != nil { + return nil, err + } + return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + buf := &bytes.Buffer{} + //todo (cyriltovena): handle error + _ = t.Execute(buf, lbs.Map()) + return buf.Bytes(), lbs, true + }), nil +} + +func (e *lineFmtExpr) String() string { + return fmt.Sprintf("| line_format %s", strconv.Quote(e.value)) +} + +type labelFmt struct { + name string + + value string + rename bool +} + +func newRenameLabelFmt(old, new string) labelFmt { + return labelFmt{ + name: old, + rename: true, + value: new, + } +} +func newTemplateLabelFmt(dst, template string) labelFmt { + return labelFmt{ + name: dst, + rename: true, + value: template, + } +} + +type labelFmtExpr struct { + formats []labelFmt + + implicit +} + +func newLabelFmtExpr(fmts []labelFmt) *labelFmtExpr { + return &labelFmtExpr{ + formats: fmts, + } +} + +func (e *labelFmtExpr) Pipeline() (Pipeline, error) { + //todo pipeline for labels. + return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + // buf := &bytes.Buffer{} + // //todo (cyriltovena): handle error + // _ = e.t.Execute(buf, lbs.Map()) + return line, lbs, true + }), nil +} + +func (e *labelFmtExpr) String() string { + var sb strings.Builder + sb.WriteString("| label_format ") + for i, f := range e.formats { + sb.WriteString(f.name) + sb.WriteString("=") + if f.rename { + sb.WriteString(f.value) + } else { + sb.WriteString(strconv.Quote(f.value)) + } + if i+1 != len(e.formats) { + sb.WriteString(",") + } + } + return sb.String() +} + func mustNewMatcher(t labels.MatchType, n, v string) *labels.Matcher { m, err := labels.NewMatcher(t, n, v) if err != nil { @@ -427,6 +521,9 @@ const ( OpParserTypeJSON = "json" OpParserTypeLogfmt = "logfmt" OpParserTypeRegexp = "regexp" + + OpFmtLine = "line_format" + OpFmtLabel = "label_format" ) func IsComparisonOperator(op string) bool { diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 296027d55b5fe..7ac01a249048f 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -36,6 +36,10 @@ import ( NumberFilter labelfilter.Filterer DurationFilter labelfilter.Filterer LabelFilter labelfilter.Filterer + LineFormatExpr *lineFmtExpr + LabelFormatExpr *labelFmtExpr + LabelFormat labelFmt + LabelsFormat []labelFmt } %start root @@ -64,6 +68,10 @@ import ( %type durationFilter %type labelFilter %type lineFilters +%type lineFormatExpr +%type labelFormatExpr +%type labelFormat +%type labelsFormat %token IDENTIFIER STRING NUMBER @@ -159,8 +167,8 @@ pipelineStage: lineFilters { $$ = $1 } | PIPE labelParser { $$ = $2 } | PIPE labelFilter { $$ = &labelFilterExpr{Filterer: $2 }} -// | PIPE lineFormat -// | PIPE labelFormat + | PIPE lineFormatExpr { $$ = $2 } + | PIPE labelFormatExpr { $$ = $2 } lineFilters: filter STRING { $$ = newLineFilterExpr(nil, $1, $2 ) } @@ -172,15 +180,19 @@ labelParser: | REGEXP STRING { $$ = newLabelParserExpr(OpParserTypeRegexp, $2) } ; -// lineFormat: -// LINE_FMT IDENTIFIER -// | LINE_FMT STRING -// ; +lineFormatExpr: LINE_FMT STRING { $$ = newLineFmtExpr($2) }; -// labelFormat: -// LABEL_FMT IDENTIFIER EQ IDENTIFIER -// | LABEL_FMT IDENTIFIER EQ STRING -// ; +labelFormat: + IDENTIFIER EQ IDENTIFIER { $$ = newRenameLabelFmt($1, $3)} + | IDENTIFIER EQ STRING { $$ = newTemplateLabelFmt($1, $3)} + ; + +labelsFormat: + labelFormat { $$ = []labelFmt{ $1 } } + | labelsFormat COMMA labelFormat { $$ = append($1, $3) } + ; + +labelFormatExpr: LABEL_FMT labelsFormat { $$ = newLabelFmtExpr($2) }; labelFilter: matcher { $$ = labelfilter.NewString($1) } diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index c152d929b96b5..4d2b949c8823f 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -43,6 +43,10 @@ type exprSymType struct { NumberFilter labelfilter.Filterer DurationFilter labelfilter.Filterer LabelFilter labelfilter.Filterer + LineFormatExpr *lineFmtExpr + LabelFormatExpr *labelFmtExpr + LabelFormat labelFmt + LabelsFormat []labelFmt } const IDENTIFIER = 57346 @@ -171,7 +175,7 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:276 +//line pkg/logql/expr.y:288 //line yacctab:1 var exprExca = [...]int{ @@ -218,69 +222,70 @@ var exprExca = [...]int{ const exprPrivate = 57344 -const exprLast = 297 +const exprLast = 300 var exprAct = [...]int{ - 64, 47, 4, 133, 46, 160, 3, 5, 108, 55, - 92, 57, 2, 56, 33, 34, 35, 36, 37, 38, - 38, 70, 60, 30, 31, 32, 39, 40, 43, 44, - 41, 42, 33, 34, 35, 36, 37, 38, 35, 36, - 37, 38, 189, 104, 106, 107, 185, 63, 85, 65, - 66, 50, 157, 88, 39, 40, 43, 44, 41, 42, - 33, 34, 35, 36, 37, 38, 112, 154, 53, 109, - 110, 98, 53, 65, 66, 51, 52, 103, 116, 51, - 52, 117, 105, 118, 119, 120, 121, 122, 123, 124, - 125, 126, 127, 128, 129, 130, 131, 96, 49, 138, - 86, 115, 49, 186, 114, 54, 45, 151, 188, 54, - 62, 151, 153, 146, 158, 95, 150, 159, 155, 110, - 156, 14, 162, 53, 139, 106, 107, 99, 182, 11, - 51, 52, 152, 89, 91, 90, 45, 6, 163, 164, - 165, 17, 18, 21, 22, 24, 25, 23, 26, 27, - 28, 29, 19, 20, 68, 96, 96, 99, 183, 181, - 54, 184, 145, 140, 143, 144, 141, 142, 96, 135, - 15, 16, 186, 95, 95, 67, 11, 187, 148, 170, - 169, 149, 135, 85, 111, 148, 95, 166, 180, 179, - 191, 31, 32, 39, 40, 43, 44, 41, 42, 33, - 34, 35, 36, 37, 38, 113, 69, 101, 178, 177, - 136, 134, 45, 11, 147, 168, 167, 147, 153, 137, - 100, 6, 132, 102, 97, 17, 18, 21, 22, 24, - 25, 23, 26, 27, 28, 29, 19, 20, 71, 72, - 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, - 83, 84, 96, 96, 15, 16, 176, 175, 174, 173, - 172, 171, 59, 190, 61, 161, 135, 135, 61, 45, - 95, 95, 48, 93, 94, 87, 10, 9, 13, 8, - 12, 7, 58, 1, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 136, 134, 134, + 64, 47, 145, 4, 46, 137, 168, 3, 5, 112, + 55, 94, 57, 2, 56, 33, 34, 35, 36, 37, + 38, 38, 70, 60, 30, 31, 32, 39, 40, 43, + 44, 41, 42, 33, 34, 35, 36, 37, 38, 35, + 36, 37, 38, 53, 108, 110, 111, 196, 85, 202, + 51, 52, 198, 50, 63, 88, 65, 66, 100, 65, + 66, 53, 195, 100, 165, 102, 120, 116, 51, 52, + 113, 114, 139, 49, 196, 119, 97, 174, 159, 197, + 54, 97, 121, 109, 122, 123, 124, 125, 126, 127, + 128, 129, 130, 131, 132, 133, 134, 135, 54, 192, + 140, 138, 86, 142, 31, 32, 39, 40, 43, 44, + 41, 42, 33, 34, 35, 36, 37, 38, 154, 159, + 45, 45, 167, 163, 114, 164, 161, 170, 11, 118, + 62, 14, 176, 147, 110, 111, 115, 68, 175, 11, + 160, 103, 103, 107, 171, 172, 173, 6, 166, 67, + 100, 17, 18, 21, 22, 24, 25, 23, 26, 27, + 28, 29, 19, 20, 158, 100, 193, 191, 97, 105, + 194, 153, 148, 151, 152, 149, 150, 157, 199, 139, + 15, 16, 104, 97, 117, 106, 91, 93, 92, 156, + 98, 99, 11, 85, 156, 180, 179, 155, 178, 177, + 6, 190, 189, 204, 17, 18, 21, 22, 24, 25, + 23, 26, 27, 28, 29, 19, 20, 39, 40, 43, + 44, 41, 42, 33, 34, 35, 36, 37, 38, 69, + 45, 188, 187, 15, 16, 155, 161, 100, 200, 201, + 162, 143, 100, 186, 185, 53, 184, 183, 182, 181, + 141, 139, 51, 52, 136, 97, 139, 101, 203, 146, + 97, 71, 72, 73, 74, 75, 76, 77, 78, 79, + 80, 81, 82, 83, 84, 49, 59, 169, 61, 140, + 138, 61, 54, 45, 144, 138, 90, 89, 48, 95, + 96, 87, 10, 9, 13, 8, 12, 7, 58, 1, } var exprPact = [...]int{ - 115, -1000, -23, 267, -1000, 55, 115, -1000, -1000, -1000, - -1000, 260, 88, 25, -1000, 169, 148, -1000, -1000, -1000, + 125, -1000, -22, 281, -1000, 30, 125, -1000, -1000, -1000, + -1000, 274, 108, 32, -1000, 143, 131, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, - -18, -18, -18, -18, -18, -1000, 55, -1000, 110, 93, - 219, -1000, -1000, -1000, -1000, 48, 134, -23, 205, 62, - -1000, 32, 162, 199, 82, 79, 56, -1000, -1000, 115, - -1000, 115, 115, 115, 115, 115, 115, 115, 115, 115, - 115, 115, 115, 115, 115, -1000, 217, -1000, 248, -1000, - -1000, 214, -1000, -1000, -1000, 152, 113, -1000, -1000, -1000, - -1000, -1000, 264, -1000, 212, 180, 176, 111, 109, 210, - 59, 162, 29, 96, 115, 261, 261, 144, 5, 5, - -19, -19, -40, -40, -40, -40, -41, -41, -41, -41, - -41, -41, -1000, 248, 152, 152, 152, -1000, 164, 209, - 173, 254, 252, 250, 202, 182, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, 55, 104, 105, 49, 115, 23, - 154, -1000, 85, 151, 248, 249, -1000, -1000, -1000, -1000, + -17, -17, -17, -17, -17, -17, -17, -17, -17, -17, + -17, -17, -17, -17, -17, -1000, 30, -1000, 48, 146, + 252, -1000, -1000, -1000, -1000, 42, 119, -22, 167, 128, + -1000, 33, 114, 178, 107, 53, 44, -1000, -1000, 125, + -1000, 125, 125, 125, 125, 125, 125, 125, 125, 125, + 125, 125, 125, 125, 125, -1000, 249, -1000, 233, -1000, + -1000, -1000, -1000, 245, -1000, -1000, -1000, 59, 236, 255, + 122, -1000, -1000, -1000, -1000, -1000, 277, -1000, 230, 184, + 172, 159, 117, 228, 232, 114, 41, 130, 125, 273, + 273, 57, 168, 168, -18, -18, -39, -39, -39, -39, + -40, -40, -40, -40, -40, -40, -1000, 233, 59, 59, + 59, -1000, 54, -1000, 120, -1000, 121, 192, 189, 242, + 240, 237, 225, 195, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, 30, 118, 76, 35, 125, 39, 56, -1000, + 29, 161, 233, 238, -1000, 255, 234, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 55, -1000, -1000, 19, -1000, 259, -1000, -1000, 49, - -1000, -1000, + -1000, 30, -1000, -1000, 26, -1000, 254, -1000, -1000, -1000, + -1000, -1000, 35, -1000, -1000, } var exprPgo = [...]int{ - 0, 283, 11, 51, 0, 5, 6, 2, 8, 10, - 282, 281, 280, 7, 279, 278, 277, 276, 206, 275, - 4, 1, 274, 273, 3, 272, + 0, 299, 12, 53, 0, 6, 7, 3, 9, 11, + 298, 297, 296, 8, 295, 294, 293, 292, 229, 291, + 4, 1, 290, 289, 5, 288, 287, 286, 2, 284, } var exprR1 = [...]int{ @@ -288,13 +293,14 @@ var exprR1 = [...]int{ 6, 6, 6, 8, 8, 8, 8, 8, 11, 14, 14, 14, 14, 14, 3, 3, 3, 3, 13, 13, 13, 10, 10, 9, 9, 9, 9, 20, 20, 21, - 21, 21, 25, 25, 19, 19, 19, 24, 24, 24, - 24, 24, 24, 24, 24, 23, 23, 23, 23, 23, - 23, 23, 22, 22, 22, 22, 22, 22, 22, 16, + 21, 21, 21, 21, 25, 25, 19, 19, 19, 26, + 28, 28, 29, 29, 27, 24, 24, 24, 24, 24, + 24, 24, 24, 23, 23, 23, 23, 23, 23, 23, + 22, 22, 22, 22, 22, 22, 22, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 18, 18, 17, 17, 17, 15, - 15, 15, 15, 15, 15, 15, 15, 15, 12, 12, - 12, 12, 5, 5, 4, 4, + 16, 16, 18, 18, 17, 17, 17, 15, 15, 15, + 15, 15, 15, 15, 15, 15, 12, 12, 12, 12, + 5, 5, 4, 4, } var exprR2 = [...]int{ @@ -302,13 +308,14 @@ var exprR2 = [...]int{ 2, 3, 2, 2, 3, 2, 3, 2, 4, 4, 5, 5, 6, 7, 1, 1, 1, 1, 3, 3, 3, 1, 3, 3, 3, 3, 3, 1, 2, 1, - 2, 2, 2, 3, 1, 1, 2, 1, 1, 1, - 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, + 2, 2, 2, 2, 2, 3, 1, 1, 2, 2, + 3, 3, 1, 3, 2, 1, 1, 1, 3, 2, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 0, 1, 1, 2, 2, 1, + 4, 4, 0, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 3, 4, 4, + 1, 3, 4, 4, } var exprChk = [...]int{ @@ -320,41 +327,43 @@ var exprChk = [...]int{ -3, 20, 21, 13, 50, -7, -6, -2, -10, 2, -9, 4, 22, 22, -4, 24, 25, 6, 6, -18, 39, -18, -18, -18, -18, -18, -18, -18, -18, -18, - -18, -18, -18, -18, -18, -21, -3, -19, -24, 40, - 42, 41, -9, -23, -22, 22, 4, 5, 23, 23, - 15, 2, 18, 15, 11, 50, 12, 13, -8, -6, - -13, 22, -7, 6, 22, 22, 22, -2, -2, -2, - -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, 5, -24, 47, 18, 46, 5, -24, 11, - 50, 53, 54, 51, 52, 49, -9, 5, 5, 5, - 5, 2, 23, 8, 8, -6, -8, 23, 18, -7, - -5, 4, -5, -24, -24, -24, 23, 7, 6, 7, + -18, -18, -18, -18, -18, -21, -3, -19, -24, -26, + -27, 40, 42, 41, -9, -23, -22, 22, 44, 45, + 4, 5, 23, 23, 15, 2, 18, 15, 11, 50, + 12, 13, -8, -6, -13, 22, -7, 6, 22, 22, + 22, -2, -2, -2, -2, -2, -2, -2, -2, -2, + -2, -2, -2, -2, -2, -2, 5, -24, 47, 18, + 46, 5, -24, 5, -29, -28, 4, 11, 50, 53, + 54, 51, 52, 49, -9, 5, 5, 5, 5, 2, + 23, 8, 8, -6, -8, 23, 18, -7, -5, 4, + -5, -24, -24, -24, 23, 18, 11, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, - 6, -20, 23, -4, -7, 23, 18, 23, 23, 23, - 4, -4, + 6, -20, 23, -4, -7, 23, 18, 23, 23, -28, + 4, 5, 23, 4, -4, } var exprDef = [...]int{ 0, -2, 1, -2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 86, 0, 0, 98, 99, 100, - 101, 89, 90, 91, 92, 93, 94, 95, 96, 97, - 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, - 84, 84, 84, 84, 84, 12, 10, 37, 39, 0, + 7, 0, 0, 0, 94, 0, 0, 106, 107, 108, + 109, 97, 98, 99, 100, 101, 102, 103, 104, 105, + 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, + 92, 92, 92, 92, 92, 12, 10, 37, 39, 0, 0, 24, 25, 26, 27, 3, -2, 0, 0, 0, - 31, 0, 0, 0, 0, 0, 0, 87, 88, 0, - 85, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 38, 0, 40, 41, 44, - 45, 0, 47, 48, 49, 0, 0, 42, 8, 11, - 28, 29, 0, 30, 0, 0, 0, 0, 0, 0, - 9, 0, 3, 86, 0, 0, 0, 69, 70, 71, - 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, - 82, 83, 43, 51, 0, 0, 0, 46, 0, 0, - 0, 0, 0, 0, 0, 0, 32, 33, 34, 35, - 36, 17, 18, 13, 15, 0, 0, 19, 0, 3, - 0, 102, 0, 52, 53, 54, 50, 60, 67, 59, - 66, 55, 62, 56, 63, 57, 64, 58, 65, 61, - 68, 14, 16, 21, 3, 20, 0, 104, 105, 22, - 103, 23, + 31, 0, 0, 0, 0, 0, 0, 95, 96, 0, + 93, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 38, 0, 40, 41, 42, + 43, 46, 47, 0, 55, 56, 57, 0, 0, 0, + 0, 44, 8, 11, 28, 29, 0, 30, 0, 0, + 0, 0, 0, 0, 9, 0, 3, 94, 0, 0, + 0, 77, 78, 79, 80, 81, 82, 83, 84, 85, + 86, 87, 88, 89, 90, 91, 45, 59, 0, 0, + 0, 48, 0, 49, 54, 52, 0, 0, 0, 0, + 0, 0, 0, 0, 32, 33, 34, 35, 36, 17, + 18, 13, 15, 0, 0, 19, 0, 3, 0, 110, + 0, 60, 61, 62, 58, 0, 0, 68, 75, 67, + 74, 63, 70, 64, 71, 65, 72, 66, 73, 69, + 76, 14, 16, 21, 3, 20, 0, 112, 113, 53, + 50, 51, 22, 111, 23, } var exprTok1 = [...]int{ @@ -712,618 +721,666 @@ exprdefault: case 1: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:85 +//line pkg/logql/expr.y:93 { exprlex.(*lexer).expr = exprDollar[1].Expr } case 2: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:88 +//line pkg/logql/expr.y:96 { exprVAL.Expr = exprDollar[1].LogExpr } case 3: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:89 +//line pkg/logql/expr.y:97 { exprVAL.Expr = exprDollar[1].MetricExpr } case 4: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:93 +//line pkg/logql/expr.y:101 { exprVAL.MetricExpr = exprDollar[1].RangeAggregationExpr } case 5: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:94 +//line pkg/logql/expr.y:102 { exprVAL.MetricExpr = exprDollar[1].VectorAggregationExpr } case 6: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:95 +//line pkg/logql/expr.y:103 { exprVAL.MetricExpr = exprDollar[1].BinOpExpr } case 7: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:96 +//line pkg/logql/expr.y:104 { exprVAL.MetricExpr = exprDollar[1].LiteralExpr } case 8: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:97 +//line pkg/logql/expr.y:105 { exprVAL.MetricExpr = exprDollar[2].MetricExpr } case 9: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:101 +//line pkg/logql/expr.y:109 { exprVAL.LogExpr = newMatcherExpr(exprDollar[1].Selector) } case 10: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:102 +//line pkg/logql/expr.y:110 { exprVAL.LogExpr = newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr) } case 11: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:103 +//line pkg/logql/expr.y:111 { exprVAL.LogExpr = exprDollar[2].LogExpr } case 13: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:109 +//line pkg/logql/expr.y:117 { exprVAL.LogRangeExpr = newLogRange(exprDollar[1].LogExpr, exprDollar[2].duration) } case 14: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:110 +//line pkg/logql/expr.y:118 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration) } case 15: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:111 +//line pkg/logql/expr.y:119 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration) } case 16: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:112 +//line pkg/logql/expr.y:120 { exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr } case 18: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:116 +//line pkg/logql/expr.y:124 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp) } case 19: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:120 +//line pkg/logql/expr.y:128 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } case 20: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:121 +//line pkg/logql/expr.y:129 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } case 21: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:122 +//line pkg/logql/expr.y:130 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } case 22: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:124 +//line pkg/logql/expr.y:132 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } case 23: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:125 +//line pkg/logql/expr.y:133 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 24: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:129 +//line pkg/logql/expr.y:137 { exprVAL.Filter = labels.MatchRegexp } case 25: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:130 +//line pkg/logql/expr.y:138 { exprVAL.Filter = labels.MatchEqual } case 26: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:131 +//line pkg/logql/expr.y:139 { exprVAL.Filter = labels.MatchNotRegexp } case 27: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:132 +//line pkg/logql/expr.y:140 { exprVAL.Filter = labels.MatchNotEqual } case 28: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:136 +//line pkg/logql/expr.y:144 { exprVAL.Selector = exprDollar[2].Matchers } case 29: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:137 +//line pkg/logql/expr.y:145 { exprVAL.Selector = exprDollar[2].Matchers } case 30: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:138 +//line pkg/logql/expr.y:146 { } case 31: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:142 +//line pkg/logql/expr.y:150 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } case 32: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:143 +//line pkg/logql/expr.y:151 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } case 33: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:147 +//line pkg/logql/expr.y:155 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } case 34: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:148 +//line pkg/logql/expr.y:156 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } case 35: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:149 +//line pkg/logql/expr.y:157 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } case 36: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:150 +//line pkg/logql/expr.y:158 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } case 37: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:154 +//line pkg/logql/expr.y:162 { exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} } case 38: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:155 +//line pkg/logql/expr.y:163 { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } case 39: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:159 +//line pkg/logql/expr.y:167 { exprVAL.PipelineStage = exprDollar[1].LineFilters } case 40: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:160 +//line pkg/logql/expr.y:168 { exprVAL.PipelineStage = exprDollar[2].LabelParser } case 41: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:161 +//line pkg/logql/expr.y:169 { exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} } case 42: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:166 +//line pkg/logql/expr.y:170 { - exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) + exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } case 43: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:171 + { + exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr + } + case 44: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:174 + { + exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) + } + case 45: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:167 +//line pkg/logql/expr.y:175 { exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } - case 44: + case 46: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:170 +//line pkg/logql/expr.y:178 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } - case 45: + case 47: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:171 +//line pkg/logql/expr.y:179 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } - case 46: + case 48: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:172 +//line pkg/logql/expr.y:180 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } - case 47: - exprDollar = exprS[exprpt-1 : exprpt+1] + case 49: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:183 + { + exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) + } + case 50: + exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:186 + { + exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) + } + case 51: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:187 + { + exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) + } + case 52: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:191 + { + exprVAL.LabelsFormat = []labelFmt{exprDollar[1].LabelFormat} + } + case 53: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:192 + { + exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) + } + case 54: + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:195 + { + exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) + } + case 55: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:198 { exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) } - case 48: + case 56: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:187 +//line pkg/logql/expr.y:199 { exprVAL.LabelFilter = exprDollar[1].DurationFilter } - case 49: + case 57: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:188 +//line pkg/logql/expr.y:200 { exprVAL.LabelFilter = exprDollar[1].NumberFilter } - case 50: + case 58: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:189 +//line pkg/logql/expr.y:201 { exprVAL.LabelFilter = exprDollar[2].LabelFilter } - case 51: + case 59: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:190 +//line pkg/logql/expr.y:202 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } - case 52: + case 60: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:191 +//line pkg/logql/expr.y:203 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 53: + case 61: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:192 +//line pkg/logql/expr.y:204 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 54: + case 62: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:193 +//line pkg/logql/expr.y:205 { exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 55: + case 63: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:197 +//line pkg/logql/expr.y:209 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } - case 56: + case 64: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:198 +//line pkg/logql/expr.y:210 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 57: + case 65: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:199 +//line pkg/logql/expr.y:211 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } - case 58: + case 66: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:200 +//line pkg/logql/expr.y:212 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 59: + case 67: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:201 +//line pkg/logql/expr.y:213 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } - case 60: + case 68: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:202 +//line pkg/logql/expr.y:214 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 61: + case 69: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:203 +//line pkg/logql/expr.y:215 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 62: + case 70: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:207 +//line pkg/logql/expr.y:219 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 63: + case 71: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:208 +//line pkg/logql/expr.y:220 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 64: + case 72: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:209 +//line pkg/logql/expr.y:221 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 65: + case 73: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:210 +//line pkg/logql/expr.y:222 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 66: + case 74: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:211 +//line pkg/logql/expr.y:223 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 67: + case 75: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:212 +//line pkg/logql/expr.y:224 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 68: + case 76: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:213 +//line pkg/logql/expr.y:225 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 69: + case 77: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:219 +//line pkg/logql/expr.y:231 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 70: + case 78: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:220 +//line pkg/logql/expr.y:232 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 71: + case 79: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:221 +//line pkg/logql/expr.y:233 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 72: + case 80: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:222 +//line pkg/logql/expr.y:234 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 73: + case 81: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:223 +//line pkg/logql/expr.y:235 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 74: + case 82: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:224 +//line pkg/logql/expr.y:236 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 75: + case 83: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:225 +//line pkg/logql/expr.y:237 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 76: + case 84: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:226 +//line pkg/logql/expr.y:238 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 77: + case 85: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:227 +//line pkg/logql/expr.y:239 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 78: + case 86: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:228 +//line pkg/logql/expr.y:240 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 79: + case 87: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:229 +//line pkg/logql/expr.y:241 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 80: + case 88: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:230 +//line pkg/logql/expr.y:242 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 81: + case 89: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:231 +//line pkg/logql/expr.y:243 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 82: + case 90: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:232 +//line pkg/logql/expr.y:244 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 83: + case 91: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:233 +//line pkg/logql/expr.y:245 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 84: + case 92: exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:237 +//line pkg/logql/expr.y:249 { exprVAL.BinOpModifier = BinOpOptions{} } - case 85: + case 93: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:238 +//line pkg/logql/expr.y:250 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } - case 86: + case 94: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:242 +//line pkg/logql/expr.y:254 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 87: + case 95: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:243 +//line pkg/logql/expr.y:255 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 88: + case 96: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:244 +//line pkg/logql/expr.y:256 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 89: + case 97: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:248 +//line pkg/logql/expr.y:260 { exprVAL.VectorOp = OpTypeSum } - case 90: + case 98: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:249 +//line pkg/logql/expr.y:261 { exprVAL.VectorOp = OpTypeAvg } - case 91: + case 99: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:250 +//line pkg/logql/expr.y:262 { exprVAL.VectorOp = OpTypeCount } - case 92: + case 100: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:251 +//line pkg/logql/expr.y:263 { exprVAL.VectorOp = OpTypeMax } - case 93: + case 101: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:252 +//line pkg/logql/expr.y:264 { exprVAL.VectorOp = OpTypeMin } - case 94: + case 102: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:253 +//line pkg/logql/expr.y:265 { exprVAL.VectorOp = OpTypeStddev } - case 95: + case 103: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:254 +//line pkg/logql/expr.y:266 { exprVAL.VectorOp = OpTypeStdvar } - case 96: + case 104: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:255 +//line pkg/logql/expr.y:267 { exprVAL.VectorOp = OpTypeBottomK } - case 97: + case 105: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:256 +//line pkg/logql/expr.y:268 { exprVAL.VectorOp = OpTypeTopK } - case 98: + case 106: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:260 +//line pkg/logql/expr.y:272 { exprVAL.RangeOp = OpRangeTypeCount } - case 99: + case 107: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:261 +//line pkg/logql/expr.y:273 { exprVAL.RangeOp = OpRangeTypeRate } - case 100: + case 108: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:262 +//line pkg/logql/expr.y:274 { exprVAL.RangeOp = OpRangeTypeBytes } - case 101: + case 109: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:263 +//line pkg/logql/expr.y:275 { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 102: + case 110: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:268 +//line pkg/logql/expr.y:280 { exprVAL.Labels = []string{exprDollar[1].str} } - case 103: + case 111: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:269 +//line pkg/logql/expr.y:281 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 104: + case 112: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:273 +//line pkg/logql/expr.y:285 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } - case 105: + case 113: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:274 +//line pkg/logql/expr.y:286 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/fmt.go b/pkg/logql/fmt.go new file mode 100644 index 0000000000000..9f8f5a277160f --- /dev/null +++ b/pkg/logql/fmt.go @@ -0,0 +1,41 @@ +package logql + +import ( + "regexp" + "strings" + "text/template" +) + +var ( + functionMap = template.FuncMap{ + "ToLower": strings.ToLower, + "ToUpper": strings.ToUpper, + "Replace": strings.Replace, + "Trim": strings.Trim, + "TrimLeft": strings.TrimLeft, + "TrimRight": strings.TrimRight, + "TrimPrefix": strings.TrimPrefix, + "TrimSuffix": strings.TrimSuffix, + "TrimSpace": strings.TrimSpace, + "regexReplaceAll": func(regex string, s string, repl string) string { + r := regexp.MustCompile(regex) + return r.ReplaceAllString(s, repl) + }, + "regexReplaceAllLiteral": func(regex string, s string, repl string) string { + r := regexp.MustCompile(regex) + return r.ReplaceAllLiteralString(s, repl) + }, + } +) + +// type Formatter interface { +// Format([]byte, labels.Labels) ([]byte, labels.Labels) +// } + +// type line struct { +// tpl *template.Template +// } + +// func NewLineFmt() *line { + +// } diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go index 52fd967c1c0cc..f0e5f1240cc0a 100644 --- a/pkg/logql/lex.go +++ b/pkg/logql/lex.go @@ -64,6 +64,10 @@ var tokens = map[string]int{ OpParserTypeJSON: JSON, OpParserTypeRegexp: REGEXP, OpParserTypeLogfmt: LOGFMT, + + // fmt + OpFmtLabel: LABEL_FMT, + OpFmtLine: LINE_FMT, } type lexer struct { diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 8dfa9aa099d1f..46282fff5e0ca 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -975,6 +975,62 @@ func TestParse(t *testing.T) { }, }, }, + { + in: `{app="foo"} |= "bar" | line_format "blip{{ .foo }}blop"`, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLineFmtExpr("blip{{ .foo }}blop"), + }, + }, + }, + { + in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}"`, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + }, + }, + }, + { + in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"`, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + }, { // ensure binary ops with two literals are reduced recursively in: `1 + 1 + 1`, From c7791a43c1cb4f5821974be97d81c5a02b0d42e7 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 24 Sep 2020 18:35:01 +0200 Subject: [PATCH 11/45] Add tests for fmt label and line with validations. Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 62 +++++----------------- pkg/logql/fmt.go | 119 +++++++++++++++++++++++++++++++++++++++--- pkg/logql/fmt_test.go | 99 +++++++++++++++++++++++++++++++++++ 3 files changed, 224 insertions(+), 56 deletions(-) create mode 100644 pkg/logql/fmt_test.go diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 0e457da008915..19644d711b2b4 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -1,12 +1,10 @@ package logql import ( - "bytes" "context" "fmt" "strconv" "strings" - "text/template" "time" "github.com/prometheus/common/model" @@ -338,7 +336,7 @@ func (e *labelFilterExpr) Pipeline() (Pipeline, error) { } func (e *labelFilterExpr) String() string { - return fmt.Sprintf("|%s", e.Filterer.String()) + return fmt.Sprintf("| %s", e.Filterer.String()) } type lineFmtExpr struct { @@ -347,50 +345,24 @@ type lineFmtExpr struct { } func newLineFmtExpr(value string) *lineFmtExpr { - return &lineFmtExpr{ value: value, - // t: t, } } func (e *lineFmtExpr) Pipeline() (Pipeline, error) { - t, err := template.New("line").Funcs(functionMap).Parse(e.value) + f, err := newLineFormatter(e.value) if err != nil { return nil, err } return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - buf := &bytes.Buffer{} - //todo (cyriltovena): handle error - _ = t.Execute(buf, lbs.Map()) - return buf.Bytes(), lbs, true + line, lbs = f.Format(line, lbs) + return line, lbs, true }), nil } func (e *lineFmtExpr) String() string { - return fmt.Sprintf("| line_format %s", strconv.Quote(e.value)) -} - -type labelFmt struct { - name string - - value string - rename bool -} - -func newRenameLabelFmt(old, new string) labelFmt { - return labelFmt{ - name: old, - rename: true, - value: new, - } -} -func newTemplateLabelFmt(dst, template string) labelFmt { - return labelFmt{ - name: dst, - rename: true, - value: template, - } + return fmt.Sprintf("| %s %s", OpFmtLine, strconv.Quote(e.value)) } type labelFmtExpr struct { @@ -400,24 +372,27 @@ type labelFmtExpr struct { } func newLabelFmtExpr(fmts []labelFmt) *labelFmtExpr { + if err := validate(fmts); err != nil { + panic(newParseError(err.Error(), 0, 0)) + } return &labelFmtExpr{ formats: fmts, } } func (e *labelFmtExpr) Pipeline() (Pipeline, error) { - //todo pipeline for labels. + f, err := newLabelsFormatter(e.formats) + if err != nil { + return nil, err + } return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - // buf := &bytes.Buffer{} - // //todo (cyriltovena): handle error - // _ = e.t.Execute(buf, lbs.Map()) - return line, lbs, true + return line, f.Format(lbs), true }), nil } func (e *labelFmtExpr) String() string { var sb strings.Builder - sb.WriteString("| label_format ") + sb.WriteString(fmt.Sprintf("| %s ", OpFmtLabel)) for i, f := range e.formats { sb.WriteString(f.name) sb.WriteString("=") @@ -469,15 +444,6 @@ func newLogRange(left LogSelectorExpr, interval time.Duration) *logRange { } } -// func addFilterToLogRangeExpr(left *logRange, ty labels.MatchType, match string) *logRange { -// left.left = &filterExpr{ -// left: left.left, -// ty: ty, -// match: match, -// } -// return left -// } - const ( // vector ops OpTypeSum = "sum" diff --git a/pkg/logql/fmt.go b/pkg/logql/fmt.go index 9f8f5a277160f..1e96aa180cc62 100644 --- a/pkg/logql/fmt.go +++ b/pkg/logql/fmt.go @@ -1,9 +1,13 @@ package logql import ( + "bytes" + "fmt" "regexp" "strings" "text/template" + + "github.com/prometheus/prometheus/pkg/labels" ) var ( @@ -28,14 +32,113 @@ var ( } ) -// type Formatter interface { -// Format([]byte, labels.Labels) ([]byte, labels.Labels) -// } +type lineFormatter struct { + *template.Template + buf *bytes.Buffer +} + +func newLineFormatter(tmpl string) (*lineFormatter, error) { + t, err := template.New(OpFmtLine).Option("missingkey=zero").Funcs(functionMap).Parse(tmpl) + if err != nil { + return nil, fmt.Errorf("invalid line template: %s", err) + } + return &lineFormatter{ + Template: t, + buf: bytes.NewBuffer(make([]byte, 4096)), + }, nil +} + +func (lf *lineFormatter) Format(_ []byte, lbs labels.Labels) ([]byte, labels.Labels) { + lf.buf.Reset() + // todo(cyriltovena) handle error + _ = lf.Template.Execute(lf.buf, lbs.Map()) + // todo we might want to reuse the input line. + res := make([]byte, len(lf.buf.Bytes())) + copy(res, lf.buf.Bytes()) + return res, lbs +} -// type line struct { -// tpl *template.Template -// } +type labelFmt struct { + name string + + value string + rename bool +} + +func newRenameLabelFmt(dst, target string) labelFmt { + return labelFmt{ + name: dst, + rename: true, + value: target, + } +} +func newTemplateLabelFmt(dst, template string) labelFmt { + return labelFmt{ + name: dst, + rename: false, + value: template, + } +} -// func NewLineFmt() *line { +type labelFormatter struct { + *template.Template + labelFmt +} -// } +type labelsFormatter struct { + formats []labelFormatter + builder *labels.Builder + buf *bytes.Buffer +} + +func newLabelsFormatter(fmts []labelFmt) (*labelsFormatter, error) { + if err := validate(fmts); err != nil { + return nil, err + } + formats := make([]labelFormatter, 0, len(fmts)) + for _, fm := range fmts { + toAdd := labelFormatter{labelFmt: fm} + if !fm.rename { + t, err := template.New(OpFmtLabel).Option("missingkey=zero").Funcs(functionMap).Parse(fm.value) + if err != nil { + return nil, fmt.Errorf("invalid template for label '%s': %s", fm.name, err) + } + toAdd.Template = t + } + formats = append(formats, toAdd) + } + return &labelsFormatter{ + formats: formats, + builder: labels.NewBuilder(nil), + buf: bytes.NewBuffer(make([]byte, 1024)), + }, nil +} + +func validate(fmts []labelFmt) error { + // it would be too confusing to rename and change the same label value. + // To avoid confusion we allow to have a label name only once per stage. + uniqueLabelName := map[string]struct{}{} + for _, f := range fmts { + if _, ok := uniqueLabelName[f.name]; ok { + return fmt.Errorf("multiple label name '%s' not allowed in a single format operation", f.name) + } + uniqueLabelName[f.name] = struct{}{} + } + return nil +} + +func (lf *labelsFormatter) Format(lbs labels.Labels) labels.Labels { + lf.builder.Reset(lbs) + for _, f := range lf.formats { + if f.rename { + lf.builder.Set(f.name, lbs.Get(f.value)) + lf.builder.Del(f.value) + continue + } + lf.buf.Reset() + //todo (cyriltovena): handle error + _ = f.Template.Execute(lf.buf, lbs.Map()) + lf.builder.Set(f.name, lf.buf.String()) + } + return lf.builder.Labels() +} diff --git a/pkg/logql/fmt_test.go b/pkg/logql/fmt_test.go new file mode 100644 index 0000000000000..63be2ee553dee --- /dev/null +++ b/pkg/logql/fmt_test.go @@ -0,0 +1,99 @@ +package logql + +import ( + "sort" + "testing" + + "github.com/prometheus/prometheus/pkg/labels" + "github.com/stretchr/testify/require" +) + +func Test_lineFormatter_Format(t *testing.T) { + tests := []struct { + name string + fmter *lineFormatter + lbs labels.Labels + + want []byte + wantLbs labels.Labels + }{ + { + "combining", + newMustLineFormatter("foo{{.foo}}buzz{{ .bar }}"), + labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + []byte("fooblipbuzzblop"), + labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + }, + { + "missing", + newMustLineFormatter("foo {{.foo}}buzz{{ .bar }}"), + labels.Labels{{Name: "bar", Value: "blop"}}, + []byte("foo buzzblop"), + labels.Labels{{Name: "bar", Value: "blop"}}, + }, + { + "function", + newMustLineFormatter("foo {{.foo | ToUpper }} buzz{{ .bar }}"), + labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + []byte("foo BLIP buzzblop"), + labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + outLine, outLbs := tt.fmter.Format(nil, tt.lbs) + require.Equal(t, tt.want, outLine) + sort.Sort(tt.wantLbs) + sort.Sort(outLbs) + require.Equal(t, tt.wantLbs, outLbs) + }) + } +} + +func newMustLineFormatter(tmpl string) *lineFormatter { + l, err := newLineFormatter(tmpl) + if err != nil { + panic(err) + } + return l +} + +func Test_labelsFormatter_Format(t *testing.T) { + tests := []struct { + name string + fmter *labelsFormatter + in labels.Labels + want labels.Labels + }{ + { + "combined with template", + mustNewLabelsFormatter([]labelFmt{newTemplateLabelFmt("foo", "{{.foo}} and {{.bar}}")}), + labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + labels.Labels{{Name: "foo", Value: "blip and blop"}, {Name: "bar", Value: "blop"}}, + }, + { + "combined with template and rename", + mustNewLabelsFormatter([]labelFmt{ + newTemplateLabelFmt("blip", "{{.foo}} and {{.bar}}"), + newRenameLabelFmt("bar", "foo"), + }), + labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + labels.Labels{{Name: "blip", Value: "blip and blop"}, {Name: "bar", Value: "blip"}}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + sort.Sort(tt.want) + out := tt.fmter.Format(tt.in) + require.Equal(t, tt.want, out) + }) + } +} + +func mustNewLabelsFormatter(fmts []labelFmt) *labelsFormatter { + lf, err := newLabelsFormatter(fmts) + if err != nil { + panic(err) + } + return lf +} From 0651e251ca730a093174325aee95c3291cf76dea Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 25 Sep 2020 17:43:09 +0200 Subject: [PATCH 12/45] Polishing parsers and add some more test cases Signed-off-by: Cyril Tovena --- pkg/logql/fmt_test.go | 27 +++++++++++++++++++++++++++ pkg/logql/labels_parser.go | 17 ++++++++++++++++- pkg/logql/labels_parser_test.go | 15 +++++++++++++++ 3 files changed, 58 insertions(+), 1 deletion(-) diff --git a/pkg/logql/fmt_test.go b/pkg/logql/fmt_test.go index 63be2ee553dee..ec984b26addf6 100644 --- a/pkg/logql/fmt_test.go +++ b/pkg/logql/fmt_test.go @@ -80,6 +80,15 @@ func Test_labelsFormatter_Format(t *testing.T) { labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, labels.Labels{{Name: "blip", Value: "blip and blop"}, {Name: "bar", Value: "blip"}}, }, + { + "fn", + mustNewLabelsFormatter([]labelFmt{ + newTemplateLabelFmt("blip", "{{.foo | ToUpper }} and {{.bar}}"), + newRenameLabelFmt("bar", "foo"), + }), + labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + labels.Labels{{Name: "blip", Value: "BLIP and blop"}, {Name: "bar", Value: "blip"}}, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -97,3 +106,21 @@ func mustNewLabelsFormatter(fmts []labelFmt) *labelsFormatter { } return lf } + +func Test_validate(t *testing.T) { + tests := []struct { + name string + fmts []labelFmt + wantErr bool + }{ + {"no dup", []labelFmt{newRenameLabelFmt("foo", "bar"), newRenameLabelFmt("bar", "foo")}, false}, + {"dup", []labelFmt{newRenameLabelFmt("foo", "bar"), newRenameLabelFmt("foo", "blip")}, true}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := validate(tt.fmts); (err != nil) != tt.wantErr { + t.Errorf("validate() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} diff --git a/pkg/logql/labels_parser.go b/pkg/logql/labels_parser.go index ae79a742e2232..cd9a13201292f 100644 --- a/pkg/logql/labels_parser.go +++ b/pkg/logql/labels_parser.go @@ -1,6 +1,7 @@ package logql import ( + "bytes" "errors" "fmt" "regexp" @@ -8,6 +9,7 @@ import ( "github.com/grafana/loki/pkg/logql/logfmt" jsoniter "github.com/json-iterator/go" + "github.com/prometheus/common/model" "github.com/prometheus/prometheus/pkg/labels" ) @@ -24,6 +26,10 @@ const ( var ( errMissingCapture = errors.New("at least one named capture must be supplied") NoopLabelParser = noopParser{} + + underscore = []byte("_") + point = []byte(".") + dash = []byte("-") ) type LabelParser interface { @@ -103,9 +109,17 @@ func NewRegexpParser(re string) (*regexpParser, error) { return nil, errMissingCapture } nameIndex := map[int]string{} + uniqueNames := map[string]struct{}{} for i, n := range regex.SubexpNames() { if n != "" { + if !model.LabelName(n).IsValid() { + return nil, fmt.Errorf("invalid extracted label name '%s'", n) + } + if _, ok := uniqueNames[n]; ok { + return nil, fmt.Errorf("duplicate extracted label name '%s'", n) + } nameIndex[i] = n + uniqueNames[n] = struct{}{} } } if len(nameIndex) == 0 { @@ -153,7 +167,8 @@ func (l *logfmtParser) Parse(line []byte, lbs labels.Labels) labels.Labels { l.dec.Reset(line) for l.dec.ScanKeyval() { - addLabel(l.builder, lbs)(string(l.dec.Key()), string(l.dec.Value())) + k := string(bytes.ReplaceAll(bytes.ReplaceAll(l.dec.Key(), point, underscore), dash, underscore)) + addLabel(l.builder, lbs)(k, string(l.dec.Value())) } if l.dec.Err() != nil { l.builder.Set(errorLabel, errLogfmt) diff --git a/pkg/logql/labels_parser_test.go b/pkg/logql/labels_parser_test.go index 595cdebfc498f..4f952ab885b8d 100644 --- a/pkg/logql/labels_parser_test.go +++ b/pkg/logql/labels_parser_test.go @@ -91,6 +91,8 @@ func TestNewRegexpParser(t *testing.T) { {"sub but not named", "f(.*) (foo|bar|buzz)", true}, {"named and unamed", "blah (.*) (?P)", false}, {"named", "blah (.*) (?Pfoo)(?Pbarr)", false}, + {"invalid name", "blah (.*) (?Pfoo)(?Pbarr)", true}, + {"duplicate", "blah (.*) (?Pfoo)(?Pbarr)", true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -244,6 +246,19 @@ func Test_logfmtParser_Parse(t *testing.T) { labels.Label{Name: "foobar", Value: "10ms"}, }, }, + { + "invalid key names", + []byte(`foo="foo bar" foo.bar=10ms test-dash=foo`), + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + }, + labels.Labels{ + labels.Label{Name: "foo", Value: "bar"}, + labels.Label{Name: "foo_extracted", Value: "foo bar"}, + labels.Label{Name: "foo_bar", Value: "10ms"}, + labels.Label{Name: "test_dash", Value: "foo"}, + }, + }, } p := NewLogfmtParser() for _, tt := range tests { From 4c0570d58c7f327a57f232af8b8e4f59d5781413 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 29 Sep 2020 18:12:10 +0200 Subject: [PATCH 13/45] Finish the unwrap parser, still need to add more tests Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 49 +- pkg/logql/expr.y | 47 +- pkg/logql/expr.y.go | 837 +++++++++++++++++++--------------- pkg/logql/functions.go | 87 +++- pkg/logql/lex.go | 65 +-- pkg/logql/lex_test.go | 4 + pkg/logql/parser.go | 2 + pkg/logql/parser_test.go | 41 ++ pkg/logql/series_extractor.go | 45 +- 9 files changed, 758 insertions(+), 419 deletions(-) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 19644d711b2b4..53b200280deaa 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -424,23 +424,37 @@ func mustNewFloat(s string) float64 { return n } +type unwrapExpr struct { + identifier string +} + +func newUnwrapExpr(id string) *unwrapExpr { + return &unwrapExpr{identifier: id} +} + type logRange struct { left LogSelectorExpr interval time.Duration + + unwrap *unwrapExpr } // impls Stringer func (r logRange) String() string { var sb strings.Builder sb.WriteString(r.left.String()) + if r.unwrap != nil { + sb.WriteString(fmt.Sprintf("%s %s %s", OpPipe, OpUnwrap, r.unwrap.identifier)) + } sb.WriteString(fmt.Sprintf("[%v]", model.Duration(r.interval))) return sb.String() } -func newLogRange(left LogSelectorExpr, interval time.Duration) *logRange { +func newLogRange(left LogSelectorExpr, interval time.Duration, u *unwrapExpr) *logRange { return &logRange{ left: left, interval: interval, + unwrap: u, } } @@ -461,6 +475,13 @@ const ( OpRangeTypeRate = "rate" OpRangeTypeBytes = "bytes_over_time" OpRangeTypeBytesRate = "bytes_rate" + OpRangeTypeAvg = "avg_over_time" + OpRangeTypeSum = "sum_over_time" + OpRangeTypeMin = "min_over_time" + OpRangeTypeMax = "max_over_time" + OpRangeTypeStdvar = "stdvar_over_time" + OpRangeTypeStddev = "stddev_over_time" + OpRangeTypeQuantile = "quantile_over_time" // binops - logical/set OpTypeOr = "or" @@ -490,6 +511,9 @@ const ( OpFmtLine = "line_format" OpFmtLabel = "label_format" + + OpPipe = "|" + OpUnwrap = "unwrap" ) func IsComparisonOperator(op string) bool { @@ -528,16 +552,37 @@ type rangeAggregationExpr struct { } func newRangeAggregationExpr(left *logRange, operation string) SampleExpr { - return &rangeAggregationExpr{ + e := &rangeAggregationExpr{ left: left, operation: operation, } + if err := e.validate(); err != nil { + panic(newParseError(err.Error(), 0, 0)) + } + return e } func (e *rangeAggregationExpr) Selector() LogSelectorExpr { return e.left.left } +func (e rangeAggregationExpr) validate() error { + if e.left.unwrap != nil { + switch e.operation { + case OpRangeTypeAvg, OpRangeTypeSum, OpRangeTypeMax, OpRangeTypeMin, OpRangeTypeStddev, OpRangeTypeStdvar, OpRangeTypeQuantile: + return nil + default: + return fmt.Errorf("invalid aggregation %s with unwrap", e.operation) + } + } + switch e.operation { + case OpRangeTypeBytes, OpRangeTypeBytesRate, OpRangeTypeCount, OpRangeTypeRate: + return nil + default: + return fmt.Errorf("invalid aggregation %s without unwrap", e.operation) + } +} + // impls Stringer func (e *rangeAggregationExpr) String() string { return formatOperation(e.operation, nil, e.left.String()) diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 7ac01a249048f..16da95fa1b289 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -40,6 +40,7 @@ import ( LabelFormatExpr *labelFmtExpr LabelFormat labelFmt LabelsFormat []labelFmt + UnwrapExpr *unwrapExpr } %start root @@ -72,13 +73,14 @@ import ( %type labelFormatExpr %type labelFormat %type labelsFormat - +%type unwrapExpr %token IDENTIFIER STRING NUMBER %token DURATION RANGE %token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE SUM AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK - BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE LINE_FMT LABEL_FMT + BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE LINE_FMT LABEL_FMT UNWRAP AVG_OVER_TIME SUM_OVER_TIME MIN_OVER_TIME + MAX_OVER_TIME STDVAR_OVER_TIME STDDEV_OVER_TIME // Operators are listed with increasing precedence. %left OR @@ -109,19 +111,30 @@ logExpr: selector { $$ = newMatcherExpr($1)} | selector pipelineExpr { $$ = newPipelineExpr(newMatcherExpr($1), $2)} | OPEN_PARENTHESIS logExpr CLOSE_PARENTHESIS { $$ = $2 } - | logExpr error ; - logRangeExpr: - logExpr RANGE { $$ = newLogRange($1, $2) } - | selector RANGE pipelineExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2 ) } - | selector RANGE { $$ = newLogRange(newMatcherExpr($1), $2 ) } - | OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = $2 } + selector RANGE { $$ = newLogRange(newMatcherExpr($1), $2, nil) } + | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $4, nil) } + | selector RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($1), $2 , $3) } + | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($2), $4 , $5) } + | selector unwrapExpr RANGE { $$ = newLogRange(newMatcherExpr($1), $3, $2 ) } + | OPEN_PARENTHESIS selector unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $5, $3 ) } + | selector pipelineExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $3, nil ) } + | OPEN_PARENTHESIS selector pipelineExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $5, nil ) } + | selector pipelineExpr unwrapExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $4, $3) } + | OPEN_PARENTHESIS selector pipelineExpr unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $6, $4) } + | selector RANGE pipelineExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, nil) } + | selector RANGE pipelineExpr unwrapExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, $4 ) } + | OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = $2 } | logRangeExpr error ; -rangeAggregationExpr: rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($3,$1) }; +unwrapExpr: + PIPE UNWRAP IDENTIFIER { $$ = newUnwrapExpr($3)}; + +rangeAggregationExpr: + rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($3,$1) } ; vectorAggregationExpr: // Aggregations with 1 argument. @@ -169,6 +182,7 @@ pipelineStage: | PIPE labelFilter { $$ = &labelFilterExpr{Filterer: $2 }} | PIPE lineFormatExpr { $$ = $2 } | PIPE labelFormatExpr { $$ = $2 } + ; lineFilters: filter STRING { $$ = newLineFilterExpr(nil, $1, $2 ) } @@ -190,6 +204,7 @@ labelFormat: labelsFormat: labelFormat { $$ = []labelFmt{ $1 } } | labelsFormat COMMA labelFormat { $$ = append($1, $3) } + | labelsFormat COMMA error ; labelFormatExpr: LABEL_FMT labelsFormat { $$ = newLabelFmtExpr($2) }; @@ -269,10 +284,16 @@ vectorOp: ; rangeOp: - COUNT_OVER_TIME { $$ = OpRangeTypeCount } - | RATE { $$ = OpRangeTypeRate } - | BYTES_OVER_TIME { $$ = OpRangeTypeBytes } - | BYTES_RATE { $$ = OpRangeTypeBytesRate } + COUNT_OVER_TIME { $$ = OpRangeTypeCount } + | RATE { $$ = OpRangeTypeRate } + | BYTES_OVER_TIME { $$ = OpRangeTypeBytes } + | BYTES_RATE { $$ = OpRangeTypeBytesRate } + | AVG_OVER_TIME { $$ = OpRangeTypeAvg } + | SUM_OVER_TIME { $$ = OpRangeTypeSum } + | MIN_OVER_TIME { $$ = OpRangeTypeMin } + | MAX_OVER_TIME { $$ = OpRangeTypeMax } + | STDVAR_OVER_TIME { $$ = OpRangeTypeStdvar } + | STDDEV_OVER_TIME { $$ = OpRangeTypeStddev } ; diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index 4d2b949c8823f..05d97cc5e92a9 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -47,6 +47,7 @@ type exprSymType struct { LabelFormatExpr *labelFmtExpr LabelFormat labelFmt LabelsFormat []labelFmt + UnwrapExpr *unwrapExpr } const IDENTIFIER = 57346 @@ -91,21 +92,28 @@ const LOGFMT = 57384 const PIPE = 57385 const LINE_FMT = 57386 const LABEL_FMT = 57387 -const OR = 57388 -const AND = 57389 -const UNLESS = 57390 -const CMP_EQ = 57391 -const NEQ = 57392 -const LT = 57393 -const LTE = 57394 -const GT = 57395 -const GTE = 57396 -const ADD = 57397 -const SUB = 57398 -const MUL = 57399 -const DIV = 57400 -const MOD = 57401 -const POW = 57402 +const UNWRAP = 57388 +const AVG_OVER_TIME = 57389 +const SUM_OVER_TIME = 57390 +const MIN_OVER_TIME = 57391 +const MAX_OVER_TIME = 57392 +const STDVAR_OVER_TIME = 57393 +const STDDEV_OVER_TIME = 57394 +const OR = 57395 +const AND = 57396 +const UNLESS = 57397 +const CMP_EQ = 57398 +const NEQ = 57399 +const LT = 57400 +const LTE = 57401 +const GT = 57402 +const GTE = 57403 +const ADD = 57404 +const SUB = 57405 +const MUL = 57406 +const DIV = 57407 +const MOD = 57408 +const POW = 57409 var exprToknames = [...]string{ "$end", @@ -153,6 +161,13 @@ var exprToknames = [...]string{ "PIPE", "LINE_FMT", "LABEL_FMT", + "UNWRAP", + "AVG_OVER_TIME", + "SUM_OVER_TIME", + "MIN_OVER_TIME", + "MAX_OVER_TIME", + "STDVAR_OVER_TIME", + "STDDEV_OVER_TIME", "OR", "AND", "UNLESS", @@ -175,195 +190,178 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:288 +//line pkg/logql/expr.y:309 //line yacctab:1 var exprExca = [...]int{ -1, 1, 1, -1, -2, 0, - -1, 3, - 1, 2, - 23, 2, - 46, 2, - 47, 2, - 48, 2, - 49, 2, - 50, 2, - 51, 2, - 52, 2, - 53, 2, - 54, 2, - 55, 2, - 56, 2, - 57, 2, - 58, 2, - 59, 2, - 60, 2, - -2, 0, - -1, 56, - 46, 2, - 47, 2, - 48, 2, - 49, 2, - 50, 2, - 51, 2, - 52, 2, - 53, 2, - 54, 2, - 55, 2, - 56, 2, - 57, 2, - 58, 2, - 59, 2, - 60, 2, - -2, 0, } const exprPrivate = 57344 -const exprLast = 300 +const exprLast = 363 var exprAct = [...]int{ - 64, 47, 145, 4, 46, 137, 168, 3, 5, 112, - 55, 94, 57, 2, 56, 33, 34, 35, 36, 37, - 38, 38, 70, 60, 30, 31, 32, 39, 40, 43, - 44, 41, 42, 33, 34, 35, 36, 37, 38, 35, - 36, 37, 38, 53, 108, 110, 111, 196, 85, 202, - 51, 52, 198, 50, 63, 88, 65, 66, 100, 65, - 66, 53, 195, 100, 165, 102, 120, 116, 51, 52, - 113, 114, 139, 49, 196, 119, 97, 174, 159, 197, - 54, 97, 121, 109, 122, 123, 124, 125, 126, 127, - 128, 129, 130, 131, 132, 133, 134, 135, 54, 192, - 140, 138, 86, 142, 31, 32, 39, 40, 43, 44, - 41, 42, 33, 34, 35, 36, 37, 38, 154, 159, - 45, 45, 167, 163, 114, 164, 161, 170, 11, 118, - 62, 14, 176, 147, 110, 111, 115, 68, 175, 11, - 160, 103, 103, 107, 171, 172, 173, 6, 166, 67, - 100, 17, 18, 21, 22, 24, 25, 23, 26, 27, - 28, 29, 19, 20, 158, 100, 193, 191, 97, 105, - 194, 153, 148, 151, 152, 149, 150, 157, 199, 139, - 15, 16, 104, 97, 117, 106, 91, 93, 92, 156, - 98, 99, 11, 85, 156, 180, 179, 155, 178, 177, - 6, 190, 189, 204, 17, 18, 21, 22, 24, 25, - 23, 26, 27, 28, 29, 19, 20, 39, 40, 43, - 44, 41, 42, 33, 34, 35, 36, 37, 38, 69, - 45, 188, 187, 15, 16, 155, 161, 100, 200, 201, - 162, 143, 100, 186, 185, 53, 184, 183, 182, 181, - 141, 139, 51, 52, 136, 97, 139, 101, 203, 146, - 97, 71, 72, 73, 74, 75, 76, 77, 78, 79, - 80, 81, 82, 83, 84, 49, 59, 169, 61, 140, - 138, 61, 54, 45, 144, 138, 90, 89, 48, 95, - 96, 87, 10, 9, 13, 8, 12, 7, 58, 1, + 166, 69, 149, 52, 4, 141, 51, 174, 5, 117, + 99, 60, 62, 2, 39, 40, 41, 42, 43, 44, + 44, 14, 65, 41, 42, 43, 44, 202, 227, 11, + 75, 58, 55, 224, 151, 115, 116, 6, 56, 57, + 209, 17, 18, 27, 28, 30, 31, 29, 32, 33, + 34, 35, 19, 20, 105, 90, 105, 113, 115, 116, + 93, 168, 21, 22, 23, 24, 25, 26, 143, 124, + 143, 171, 102, 120, 102, 59, 118, 15, 16, 157, + 152, 155, 156, 153, 154, 108, 91, 125, 107, 126, + 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, + 137, 138, 139, 114, 142, 144, 142, 123, 146, 45, + 46, 49, 50, 47, 48, 39, 40, 41, 42, 43, + 44, 210, 158, 58, 122, 167, 212, 173, 169, 170, + 56, 57, 176, 36, 37, 38, 45, 46, 49, 50, + 47, 48, 39, 40, 41, 42, 43, 44, 177, 178, + 179, 37, 38, 45, 46, 49, 50, 47, 48, 39, + 40, 41, 42, 43, 44, 67, 197, 59, 201, 181, + 204, 90, 198, 207, 93, 200, 205, 208, 58, 68, + 58, 70, 71, 218, 213, 56, 57, 56, 57, 199, + 222, 210, 70, 71, 105, 163, 211, 11, 230, 217, + 105, 112, 90, 172, 221, 119, 223, 121, 54, 90, + 168, 182, 102, 232, 143, 11, 206, 229, 102, 228, + 163, 226, 59, 6, 59, 220, 231, 17, 18, 27, + 28, 30, 31, 29, 32, 33, 34, 35, 19, 20, + 74, 164, 110, 160, 186, 185, 162, 218, 21, 22, + 23, 24, 25, 26, 199, 109, 73, 165, 111, 215, + 216, 200, 58, 15, 16, 72, 58, 196, 195, 56, + 57, 161, 203, 56, 57, 159, 184, 183, 76, 77, + 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 168, 160, 3, 165, 168, 105, 225, 105, + 58, 61, 194, 193, 192, 191, 59, 56, 57, 159, + 59, 143, 214, 105, 150, 102, 180, 102, 190, 189, + 188, 187, 147, 145, 140, 106, 64, 219, 66, 175, + 168, 102, 66, 150, 148, 96, 98, 97, 95, 103, + 104, 202, 94, 53, 59, 100, 144, 142, 101, 96, + 98, 97, 92, 103, 104, 10, 9, 13, 8, 12, + 7, 63, 1, } var exprPact = [...]int{ - 125, -1000, -22, 281, -1000, 30, 125, -1000, -1000, -1000, - -1000, 274, 108, 32, -1000, 143, 131, -1000, -1000, -1000, + 15, -1000, 80, -1000, -1000, 165, 15, -1000, -1000, -1000, + -1000, 324, 143, 157, -1000, 259, 250, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -17, -17, -17, -17, -17, -17, -17, -17, -17, -17, - -17, -17, -17, -17, -17, -1000, 30, -1000, 48, 146, - 252, -1000, -1000, -1000, -1000, 42, 119, -22, 167, 128, - -1000, 33, 114, 178, 107, 53, 44, -1000, -1000, 125, - -1000, 125, 125, 125, 125, 125, 125, 125, 125, 125, - 125, 125, 125, 125, 125, -1000, 249, -1000, 233, -1000, - -1000, -1000, -1000, 245, -1000, -1000, -1000, 59, 236, 255, - 122, -1000, -1000, -1000, -1000, -1000, 277, -1000, 230, 184, - 172, 159, 117, 228, 232, 114, 41, 130, 125, 273, - 273, 57, 168, 168, -18, -18, -39, -39, -39, -39, - -40, -40, -40, -40, -40, -40, -1000, 233, 59, 59, - 59, -1000, 54, -1000, 120, -1000, 121, 192, 189, 242, - 240, 237, 225, 195, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, 30, 118, 76, 35, 125, 39, 56, -1000, - 29, 161, 233, 238, -1000, 255, 234, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 30, -1000, -1000, 26, -1000, 254, -1000, -1000, -1000, - -1000, -1000, 35, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -9, -9, -9, -9, + -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, + -9, 165, -1000, 110, 309, 320, -1000, -1000, -1000, -1000, + 65, 62, 80, 240, 186, -1000, 46, 183, 201, 102, + 85, 47, -1000, -1000, 15, -1000, 15, 15, 15, 15, + 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, + -1000, 319, -1000, 52, -1000, -1000, -1000, -1000, 318, -1000, + -1000, -1000, 190, 317, 329, 23, -1000, -1000, -1000, -1000, + -1000, 328, -1000, 304, 288, 266, 241, 218, 287, 183, + 48, 185, 15, 325, 325, 97, 53, 53, -41, -41, + -47, -47, -47, -47, -48, -48, -48, -48, -48, -48, + -1000, 52, 190, 190, 190, -1000, 293, -1000, 151, -1000, + 200, 270, 238, 314, 312, 298, 296, 261, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, 18, 246, 253, 295, 249, + 193, 168, 15, 17, 173, -1000, 103, 196, 52, 50, + -1000, 310, 255, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 18, -1000, + -1000, 239, 323, 217, 181, 167, -1000, -1000, 10, -1000, + 294, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -15, 211, 209, 175, 168, -1000, -1000, -19, -1000, -1000, + 205, -1000, -1000, } var exprPgo = [...]int{ - 0, 299, 12, 53, 0, 6, 7, 3, 9, 11, - 298, 297, 296, 8, 295, 294, 293, 292, 229, 291, - 4, 1, 290, 289, 5, 288, 287, 286, 2, 284, + 0, 362, 12, 32, 1, 7, 294, 4, 9, 10, + 361, 360, 359, 8, 358, 357, 356, 355, 240, 352, + 6, 3, 348, 345, 5, 343, 342, 338, 2, 334, + 0, } var exprR1 = [...]int{ 0, 1, 2, 2, 7, 7, 7, 7, 7, 6, - 6, 6, 6, 8, 8, 8, 8, 8, 11, 14, - 14, 14, 14, 14, 3, 3, 3, 3, 13, 13, - 13, 10, 10, 9, 9, 9, 9, 20, 20, 21, - 21, 21, 21, 21, 25, 25, 19, 19, 19, 26, - 28, 28, 29, 29, 27, 24, 24, 24, 24, 24, + 6, 6, 8, 8, 8, 8, 8, 8, 8, 8, + 8, 8, 8, 8, 8, 8, 30, 11, 14, 14, + 14, 14, 14, 3, 3, 3, 3, 13, 13, 13, + 10, 10, 9, 9, 9, 9, 20, 20, 21, 21, + 21, 21, 21, 25, 25, 19, 19, 19, 26, 28, + 28, 29, 29, 29, 27, 24, 24, 24, 24, 24, 24, 24, 24, 23, 23, 23, 23, 23, 23, 23, 22, 22, 22, 22, 22, 22, 22, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 18, 18, 17, 17, 17, 15, 15, 15, 15, 15, 15, 15, 15, 15, 12, 12, 12, 12, - 5, 5, 4, 4, + 12, 12, 12, 12, 12, 12, 5, 5, 4, 4, } var exprR2 = [...]int{ 0, 1, 1, 1, 1, 1, 1, 1, 3, 1, - 2, 3, 2, 2, 3, 2, 3, 2, 4, 4, - 5, 5, 6, 7, 1, 1, 1, 1, 3, 3, - 3, 1, 3, 3, 3, 3, 3, 1, 2, 1, - 2, 2, 2, 2, 2, 3, 1, 1, 2, 2, - 3, 3, 1, 3, 2, 1, 1, 1, 3, 2, + 2, 3, 2, 4, 3, 5, 3, 5, 3, 5, + 4, 6, 3, 4, 3, 2, 3, 4, 4, 5, + 5, 6, 7, 1, 1, 1, 1, 3, 3, 3, + 1, 3, 3, 3, 3, 3, 1, 2, 1, 2, + 2, 2, 2, 2, 3, 1, 1, 2, 2, 3, + 3, 1, 3, 3, 2, 1, 1, 1, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 3, 4, 4, + 1, 1, 1, 1, 1, 1, 1, 3, 4, 4, } var exprChk = [...]int{ -1000, -1, -2, -6, -7, -13, 22, -11, -14, -16, - -17, 14, -12, -15, 6, 55, 56, 26, 27, 37, - 38, 28, 29, 32, 30, 31, 33, 34, 35, 36, - 46, 47, 48, 55, 56, 57, 58, 59, 60, 49, - 50, 53, 54, 51, 52, 2, -20, -21, -25, 43, - -3, 20, 21, 13, 50, -7, -6, -2, -10, 2, - -9, 4, 22, 22, -4, 24, 25, 6, 6, -18, - 39, -18, -18, -18, -18, -18, -18, -18, -18, -18, - -18, -18, -18, -18, -18, -21, -3, -19, -24, -26, - -27, 40, 42, 41, -9, -23, -22, 22, 44, 45, - 4, 5, 23, 23, 15, 2, 18, 15, 11, 50, - 12, 13, -8, -6, -13, 22, -7, 6, 22, 22, - 22, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, -2, -2, -2, -2, 5, -24, 47, 18, - 46, 5, -24, 5, -29, -28, 4, 11, 50, 53, - 54, 51, 52, 49, -9, 5, 5, 5, 5, 2, - 23, 8, 8, -6, -8, 23, 18, -7, -5, 4, - -5, -24, -24, -24, 23, 18, 11, 7, 6, 7, - 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, - 6, -20, 23, -4, -7, 23, 18, 23, 23, -28, - 4, 5, 23, 4, -4, + -17, 14, -12, -15, 6, 62, 63, 26, 27, 37, + 38, 47, 48, 49, 50, 51, 52, 28, 29, 32, + 30, 31, 33, 34, 35, 36, 53, 54, 55, 62, + 63, 64, 65, 66, 67, 56, 57, 60, 61, 58, + 59, -20, -21, -25, 43, -3, 20, 21, 13, 57, + -7, -6, -2, -10, 2, -9, 4, 22, 22, -4, + 24, 25, 6, 6, -18, 39, -18, -18, -18, -18, + -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, + -21, -3, -19, -24, -26, -27, 40, 42, 41, -9, + -23, -22, 22, 44, 45, 4, 5, 23, 23, 15, + 2, 18, 15, 11, 57, 12, 13, -8, -13, 22, + -7, 6, 22, 22, 22, -2, -2, -2, -2, -2, + -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, + 5, -24, 54, 18, 53, 5, -24, 5, -29, -28, + 4, 11, 57, 60, 61, 58, 59, 56, -9, 5, + 5, 5, 5, 2, 23, 8, -30, -20, 43, -13, + -8, 23, 18, -7, -5, 4, -5, -24, -24, -24, + 23, 18, 11, 7, 6, 7, 6, 7, 6, 7, + 6, 7, 6, 7, 6, 7, 6, -30, -20, 8, + 8, -30, 46, 23, -30, -20, 23, -4, -7, 23, + 18, 23, 23, -28, 2, 4, 5, -30, 8, 4, + 8, 23, 23, -30, 23, 4, -30, 43, 8, 8, + 23, -4, 8, } var exprDef = [...]int{ - 0, -2, 1, -2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 94, 0, 0, 106, 107, 108, - 109, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, - 92, 92, 92, 92, 92, 12, 10, 37, 39, 0, - 0, 24, 25, 26, 27, 3, -2, 0, 0, 0, - 31, 0, 0, 0, 0, 0, 0, 95, 96, 0, - 93, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 38, 0, 40, 41, 42, - 43, 46, 47, 0, 55, 56, 57, 0, 0, 0, - 0, 44, 8, 11, 28, 29, 0, 30, 0, 0, - 0, 0, 0, 0, 9, 0, 3, 94, 0, 0, - 0, 77, 78, 79, 80, 81, 82, 83, 84, 85, - 86, 87, 88, 89, 90, 91, 45, 59, 0, 0, - 0, 48, 0, 49, 54, 52, 0, 0, 0, 0, - 0, 0, 0, 0, 32, 33, 34, 35, 36, 17, - 18, 13, 15, 0, 0, 19, 0, 3, 0, 110, - 0, 60, 61, 62, 58, 0, 0, 68, 75, 67, - 74, 63, 70, 64, 71, 65, 72, 66, 73, 69, - 76, 14, 16, 21, 3, 20, 0, 112, 113, 53, - 50, 51, 22, 111, 23, + 0, -2, 1, 2, 3, 9, 0, 4, 5, 6, + 7, 0, 0, 0, 104, 0, 0, 116, 117, 118, + 119, 120, 121, 122, 123, 124, 125, 107, 108, 109, + 110, 111, 112, 113, 114, 115, 102, 102, 102, 102, + 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, + 102, 10, 46, 48, 0, 0, 33, 34, 35, 36, + 3, 2, 0, 0, 0, 40, 0, 0, 0, 0, + 0, 0, 105, 106, 0, 103, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 47, 0, 49, 50, 51, 52, 55, 56, 0, 65, + 66, 67, 0, 0, 0, 0, 53, 8, 11, 37, + 38, 0, 39, 0, 0, 0, 0, 0, 0, 0, + 3, 104, 0, 0, 0, 87, 88, 89, 90, 91, + 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, + 54, 69, 0, 0, 0, 57, 0, 58, 64, 61, + 0, 0, 0, 0, 0, 0, 0, 0, 41, 42, + 43, 44, 45, 25, 27, 12, 0, 0, 0, 0, + 0, 28, 0, 3, 0, 126, 0, 70, 71, 72, + 68, 0, 0, 78, 85, 77, 84, 73, 80, 74, + 81, 75, 82, 76, 83, 79, 86, 14, 22, 16, + 18, 0, 0, 0, 0, 0, 24, 30, 3, 29, + 0, 128, 129, 62, 63, 59, 60, 23, 20, 26, + 13, 0, 0, 0, 31, 127, 15, 0, 17, 19, + 0, 32, 21, } var exprTok1 = [...]int{ @@ -376,7 +374,8 @@ var exprTok2 = [...]int{ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 55, 56, 57, 58, 59, 60, + 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, + 62, 63, 64, 65, 66, 67, } var exprTok3 = [...]int{ 0, @@ -721,666 +720,762 @@ exprdefault: case 1: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:93 +//line pkg/logql/expr.y:95 { exprlex.(*lexer).expr = exprDollar[1].Expr } case 2: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:96 +//line pkg/logql/expr.y:98 { exprVAL.Expr = exprDollar[1].LogExpr } case 3: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:97 +//line pkg/logql/expr.y:99 { exprVAL.Expr = exprDollar[1].MetricExpr } case 4: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:101 +//line pkg/logql/expr.y:103 { exprVAL.MetricExpr = exprDollar[1].RangeAggregationExpr } case 5: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:102 +//line pkg/logql/expr.y:104 { exprVAL.MetricExpr = exprDollar[1].VectorAggregationExpr } case 6: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:103 +//line pkg/logql/expr.y:105 { exprVAL.MetricExpr = exprDollar[1].BinOpExpr } case 7: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:104 +//line pkg/logql/expr.y:106 { exprVAL.MetricExpr = exprDollar[1].LiteralExpr } case 8: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:105 +//line pkg/logql/expr.y:107 { exprVAL.MetricExpr = exprDollar[2].MetricExpr } case 9: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:109 +//line pkg/logql/expr.y:111 { exprVAL.LogExpr = newMatcherExpr(exprDollar[1].Selector) } case 10: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:110 +//line pkg/logql/expr.y:112 { exprVAL.LogExpr = newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr) } case 11: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:111 +//line pkg/logql/expr.y:113 { exprVAL.LogExpr = exprDollar[2].LogExpr } - case 13: + case 12: exprDollar = exprS[exprpt-2 : exprpt+1] //line pkg/logql/expr.y:117 { - exprVAL.LogRangeExpr = newLogRange(exprDollar[1].LogExpr, exprDollar[2].duration) + exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, nil) + } + case 13: + exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:118 + { + exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, nil) } case 14: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:118 +//line pkg/logql/expr.y:119 { - exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration) + exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, exprDollar[3].UnwrapExpr) } case 15: - exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:119 + exprDollar = exprS[exprpt-5 : exprpt+1] +//line pkg/logql/expr.y:120 { - exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration) + exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, exprDollar[5].UnwrapExpr) } case 16: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:120 +//line pkg/logql/expr.y:121 { - exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr + exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].duration, exprDollar[2].UnwrapExpr) + } + case 17: + exprDollar = exprS[exprpt-5 : exprpt+1] +//line pkg/logql/expr.y:122 + { + exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[5].duration, exprDollar[3].UnwrapExpr) } case 18: - exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:124 + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:123 { - exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp) + exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[3].duration, nil) } case 19: + exprDollar = exprS[exprpt-5 : exprpt+1] +//line pkg/logql/expr.y:124 + { + exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[5].duration, nil) + } + case 20: + exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:125 + { + exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[4].duration, exprDollar[3].UnwrapExpr) + } + case 21: + exprDollar = exprS[exprpt-6 : exprpt+1] +//line pkg/logql/expr.y:126 + { + exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[6].duration, exprDollar[4].UnwrapExpr) + } + case 22: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:127 + { + exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, nil) + } + case 23: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:128 + { + exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, exprDollar[4].UnwrapExpr) + } + case 24: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:129 + { + exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr + } + case 26: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:134 + { + exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[3].str) + } + case 27: + exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:137 + { + exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp) + } + case 28: + exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:141 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } - case 20: + case 29: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:129 +//line pkg/logql/expr.y:142 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } - case 21: + case 30: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:130 +//line pkg/logql/expr.y:143 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } - case 22: + case 31: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:132 +//line pkg/logql/expr.y:145 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } - case 23: + case 32: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:133 +//line pkg/logql/expr.y:146 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } - case 24: + case 33: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:137 +//line pkg/logql/expr.y:150 { exprVAL.Filter = labels.MatchRegexp } - case 25: + case 34: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:138 +//line pkg/logql/expr.y:151 { exprVAL.Filter = labels.MatchEqual } - case 26: + case 35: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:139 +//line pkg/logql/expr.y:152 { exprVAL.Filter = labels.MatchNotRegexp } - case 27: + case 36: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:140 +//line pkg/logql/expr.y:153 { exprVAL.Filter = labels.MatchNotEqual } - case 28: + case 37: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:144 +//line pkg/logql/expr.y:157 { exprVAL.Selector = exprDollar[2].Matchers } - case 29: + case 38: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:145 +//line pkg/logql/expr.y:158 { exprVAL.Selector = exprDollar[2].Matchers } - case 30: + case 39: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:146 +//line pkg/logql/expr.y:159 { } - case 31: + case 40: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:150 +//line pkg/logql/expr.y:163 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } - case 32: + case 41: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:151 +//line pkg/logql/expr.y:164 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } - case 33: + case 42: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:155 +//line pkg/logql/expr.y:168 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } - case 34: + case 43: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:156 +//line pkg/logql/expr.y:169 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } - case 35: + case 44: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:157 +//line pkg/logql/expr.y:170 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } - case 36: + case 45: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:158 +//line pkg/logql/expr.y:171 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } - case 37: + case 46: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:162 +//line pkg/logql/expr.y:175 { exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} } - case 38: + case 47: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:163 +//line pkg/logql/expr.y:176 { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } - case 39: + case 48: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:167 +//line pkg/logql/expr.y:180 { exprVAL.PipelineStage = exprDollar[1].LineFilters } - case 40: + case 49: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:168 +//line pkg/logql/expr.y:181 { exprVAL.PipelineStage = exprDollar[2].LabelParser } - case 41: + case 50: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:169 +//line pkg/logql/expr.y:182 { exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} } - case 42: + case 51: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:170 +//line pkg/logql/expr.y:183 { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } - case 43: + case 52: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:171 +//line pkg/logql/expr.y:184 { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } - case 44: + case 53: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:174 +//line pkg/logql/expr.y:188 { exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) } - case 45: + case 54: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:175 +//line pkg/logql/expr.y:189 { exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } - case 46: + case 55: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:178 +//line pkg/logql/expr.y:192 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } - case 47: + case 56: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:179 +//line pkg/logql/expr.y:193 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } - case 48: + case 57: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:180 +//line pkg/logql/expr.y:194 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } - case 49: + case 58: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:183 +//line pkg/logql/expr.y:197 { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } - case 50: + case 59: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:186 +//line pkg/logql/expr.y:200 { exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 51: + case 60: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:187 +//line pkg/logql/expr.y:201 { exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 52: + case 61: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:191 +//line pkg/logql/expr.y:205 { exprVAL.LabelsFormat = []labelFmt{exprDollar[1].LabelFormat} } - case 53: + case 62: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:192 +//line pkg/logql/expr.y:206 { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } - case 54: + case 64: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:195 +//line pkg/logql/expr.y:210 { exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } - case 55: + case 65: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:198 +//line pkg/logql/expr.y:213 { exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) } - case 56: + case 66: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:199 +//line pkg/logql/expr.y:214 { exprVAL.LabelFilter = exprDollar[1].DurationFilter } - case 57: + case 67: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:200 +//line pkg/logql/expr.y:215 { exprVAL.LabelFilter = exprDollar[1].NumberFilter } - case 58: + case 68: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:201 +//line pkg/logql/expr.y:216 { exprVAL.LabelFilter = exprDollar[2].LabelFilter } - case 59: + case 69: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:202 +//line pkg/logql/expr.y:217 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } - case 60: + case 70: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:203 +//line pkg/logql/expr.y:218 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 61: + case 71: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:204 +//line pkg/logql/expr.y:219 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 62: + case 72: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:205 +//line pkg/logql/expr.y:220 { exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 63: + case 73: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:209 +//line pkg/logql/expr.y:224 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } - case 64: + case 74: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:210 +//line pkg/logql/expr.y:225 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 65: + case 75: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:211 +//line pkg/logql/expr.y:226 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } - case 66: + case 76: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:212 +//line pkg/logql/expr.y:227 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 67: + case 77: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:213 +//line pkg/logql/expr.y:228 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } - case 68: + case 78: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:214 +//line pkg/logql/expr.y:229 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 69: + case 79: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:215 +//line pkg/logql/expr.y:230 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 70: + case 80: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:219 +//line pkg/logql/expr.y:234 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 71: + case 81: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:220 +//line pkg/logql/expr.y:235 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 72: + case 82: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:221 +//line pkg/logql/expr.y:236 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 73: + case 83: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:222 +//line pkg/logql/expr.y:237 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 74: + case 84: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:223 +//line pkg/logql/expr.y:238 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 75: + case 85: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:224 +//line pkg/logql/expr.y:239 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 76: + case 86: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:225 +//line pkg/logql/expr.y:240 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 77: + case 87: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:231 +//line pkg/logql/expr.y:246 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 78: + case 88: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:232 +//line pkg/logql/expr.y:247 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 79: + case 89: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:233 +//line pkg/logql/expr.y:248 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 80: + case 90: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:234 +//line pkg/logql/expr.y:249 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 81: + case 91: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:235 +//line pkg/logql/expr.y:250 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 82: + case 92: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:236 +//line pkg/logql/expr.y:251 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 83: + case 93: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:237 +//line pkg/logql/expr.y:252 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 84: + case 94: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:238 +//line pkg/logql/expr.y:253 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 85: + case 95: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:239 +//line pkg/logql/expr.y:254 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 86: + case 96: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:240 +//line pkg/logql/expr.y:255 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 87: + case 97: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:241 +//line pkg/logql/expr.y:256 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 88: + case 98: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:242 +//line pkg/logql/expr.y:257 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 89: + case 99: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:243 +//line pkg/logql/expr.y:258 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 90: + case 100: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:244 +//line pkg/logql/expr.y:259 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 91: + case 101: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:245 +//line pkg/logql/expr.y:260 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 92: + case 102: exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:249 +//line pkg/logql/expr.y:264 { exprVAL.BinOpModifier = BinOpOptions{} } - case 93: + case 103: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:250 +//line pkg/logql/expr.y:265 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } - case 94: + case 104: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:254 +//line pkg/logql/expr.y:269 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 95: + case 105: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:255 +//line pkg/logql/expr.y:270 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 96: + case 106: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:256 +//line pkg/logql/expr.y:271 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 97: + case 107: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:260 +//line pkg/logql/expr.y:275 { exprVAL.VectorOp = OpTypeSum } - case 98: + case 108: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:261 +//line pkg/logql/expr.y:276 { exprVAL.VectorOp = OpTypeAvg } - case 99: + case 109: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:262 +//line pkg/logql/expr.y:277 { exprVAL.VectorOp = OpTypeCount } - case 100: + case 110: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:263 +//line pkg/logql/expr.y:278 { exprVAL.VectorOp = OpTypeMax } - case 101: + case 111: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:264 +//line pkg/logql/expr.y:279 { exprVAL.VectorOp = OpTypeMin } - case 102: + case 112: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:265 +//line pkg/logql/expr.y:280 { exprVAL.VectorOp = OpTypeStddev } - case 103: + case 113: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:266 +//line pkg/logql/expr.y:281 { exprVAL.VectorOp = OpTypeStdvar } - case 104: + case 114: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:267 +//line pkg/logql/expr.y:282 { exprVAL.VectorOp = OpTypeBottomK } - case 105: + case 115: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:268 +//line pkg/logql/expr.y:283 { exprVAL.VectorOp = OpTypeTopK } - case 106: + case 116: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:272 +//line pkg/logql/expr.y:287 { exprVAL.RangeOp = OpRangeTypeCount } - case 107: + case 117: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:273 +//line pkg/logql/expr.y:288 { exprVAL.RangeOp = OpRangeTypeRate } - case 108: + case 118: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:274 +//line pkg/logql/expr.y:289 { exprVAL.RangeOp = OpRangeTypeBytes } - case 109: + case 119: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:275 +//line pkg/logql/expr.y:290 { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 110: + case 120: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:280 +//line pkg/logql/expr.y:291 + { + exprVAL.RangeOp = OpRangeTypeAvg + } + case 121: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:292 + { + exprVAL.RangeOp = OpRangeTypeSum + } + case 122: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:293 + { + exprVAL.RangeOp = OpRangeTypeMin + } + case 123: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:294 + { + exprVAL.RangeOp = OpRangeTypeMax + } + case 124: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:295 + { + exprVAL.RangeOp = OpRangeTypeStdvar + } + case 125: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:296 + { + exprVAL.RangeOp = OpRangeTypeStddev + } + case 126: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:301 { exprVAL.Labels = []string{exprDollar[1].str} } - case 111: + case 127: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:281 +//line pkg/logql/expr.y:302 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 112: + case 128: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:285 +//line pkg/logql/expr.y:306 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } - case 113: + case 129: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:286 +//line pkg/logql/expr.y:307 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/functions.go b/pkg/logql/functions.go index 852352c9eb138..7afdaa524dd6d 100644 --- a/pkg/logql/functions.go +++ b/pkg/logql/functions.go @@ -2,6 +2,7 @@ package logql import ( "fmt" + "math" "time" "github.com/prometheus/prometheus/promql" @@ -10,6 +11,12 @@ import ( const unsupportedErr = "unsupported range vector aggregation operation: %s" func (r rangeAggregationExpr) Extractor() (SampleExtractor, error) { + if err := r.validate(); err != nil { + return nil, err + } + if r.left.unwrap != nil { + return newLabelSampleExtractor(r.left.unwrap.identifier), nil + } switch r.operation { case OpRangeTypeRate, OpRangeTypeCount: return ExtractCount, nil @@ -28,8 +35,18 @@ func (r rangeAggregationExpr) aggregator() (RangeVectorAggregator, error) { return countOverTime, nil case OpRangeTypeBytesRate: return rateLogBytes(r.left.interval), nil - case OpRangeTypeBytes: + case OpRangeTypeBytes, OpRangeTypeSum: return sumOverTime, nil + case OpRangeTypeAvg: + return avgOverTime, nil + case OpRangeTypeMax: + return maxOverTime, nil + case OpRangeTypeMin: + return minOverTime, nil + case OpRangeTypeStddev: + return stddevOverTime, nil + case OpRangeTypeStdvar: + return stdvarOverTime, nil default: return nil, fmt.Errorf(unsupportedErr, r.operation) } @@ -61,3 +78,71 @@ func sumOverTime(samples []promql.Point) float64 { } return sum } + +func avgOverTime(samples []promql.Point) float64 { + var mean, count float64 + for _, v := range samples { + count++ + if math.IsInf(mean, 0) { + if math.IsInf(v.V, 0) && (mean > 0) == (v.V > 0) { + // The `mean` and `v.V` values are `Inf` of the same sign. They + // can't be subtracted, but the value of `mean` is correct + // already. + continue + } + if !math.IsInf(v.V, 0) && !math.IsNaN(v.V) { + // At this stage, the mean is an infinite. If the added + // value is neither an Inf or a Nan, we can keep that mean + // value. + // This is required because our calculation below removes + // the mean value, which would look like Inf += x - Inf and + // end up as a NaN. + continue + } + } + mean += v.V/count - mean/count + } + return mean +} + +func maxOverTime(samples []promql.Point) float64 { + max := samples[0].V + for _, v := range samples { + if v.V > max || math.IsNaN(max) { + max = v.V + } + } + return max +} + +func minOverTime(samples []promql.Point) float64 { + min := samples[0].V + for _, v := range samples { + if v.V < min || math.IsNaN(min) { + min = v.V + } + } + return min +} + +func stdvarOverTime(samples []promql.Point) float64 { + var aux, count, mean float64 + for _, v := range samples { + count++ + delta := v.V - mean + mean += delta / count + aux += delta * (v.V - mean) + } + return aux / count +} + +func stddevOverTime(samples []promql.Point) float64 { + var aux, count, mean float64 + for _, v := range samples { + count++ + delta := v.V - mean + mean += delta / count + aux += delta * (v.V - mean) + } + return math.Sqrt(aux / count) +} diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go index f0e5f1240cc0a..065893d57e4bd 100644 --- a/pkg/logql/lex.go +++ b/pkg/logql/lex.go @@ -11,37 +11,48 @@ import ( ) var tokens = map[string]int{ - ",": COMMA, - ".": DOT, - "{": OPEN_BRACE, - "}": CLOSE_BRACE, - "=": EQ, - OpTypeNEQ: NEQ, - "=~": RE, - "!~": NRE, - "|=": PIPE_EXACT, - "|~": PIPE_MATCH, - "|": PIPE, - "(": OPEN_PARENTHESIS, - ")": CLOSE_PARENTHESIS, - "by": BY, - "without": WITHOUT, - "bool": BOOL, - "[": OPEN_BRACKET, - "]": CLOSE_BRACKET, + ",": COMMA, + ".": DOT, + "{": OPEN_BRACE, + "}": CLOSE_BRACE, + "=": EQ, + OpTypeNEQ: NEQ, + "=~": RE, + "!~": NRE, + "|=": PIPE_EXACT, + "|~": PIPE_MATCH, + OpPipe: PIPE, + OpUnwrap: UNWRAP, + "(": OPEN_PARENTHESIS, + ")": CLOSE_PARENTHESIS, + "by": BY, + "without": WITHOUT, + "bool": BOOL, + "[": OPEN_BRACKET, + "]": CLOSE_BRACKET, + + // range vec ops OpRangeTypeRate: RATE, OpRangeTypeCount: COUNT_OVER_TIME, OpRangeTypeBytesRate: BYTES_RATE, OpRangeTypeBytes: BYTES_OVER_TIME, - OpTypeSum: SUM, - OpTypeAvg: AVG, - OpTypeMax: MAX, - OpTypeMin: MIN, - OpTypeCount: COUNT, - OpTypeStddev: STDDEV, - OpTypeStdvar: STDVAR, - OpTypeBottomK: BOTTOMK, - OpTypeTopK: TOPK, + OpRangeTypeAvg: AVG_OVER_TIME, + OpRangeTypeSum: SUM_OVER_TIME, + OpRangeTypeMin: MIN_OVER_TIME, + OpRangeTypeMax: MAX_OVER_TIME, + OpRangeTypeStdvar: STDVAR_OVER_TIME, + OpRangeTypeStddev: STDDEV_OVER_TIME, + + // vec ops + OpTypeSum: SUM, + OpTypeAvg: AVG, + OpTypeMax: MAX, + OpTypeMin: MIN, + OpTypeCount: COUNT, + OpTypeStddev: STDDEV, + OpTypeStdvar: STDVAR, + OpTypeBottomK: BOTTOMK, + OpTypeTopK: TOPK, // binops OpTypeOr: OR, diff --git a/pkg/logql/lex_test.go b/pkg/logql/lex_test.go index 174253ea44601..c347e3b51eb86 100644 --- a/pkg/logql/lex_test.go +++ b/pkg/logql/lex_test.go @@ -23,6 +23,9 @@ func TestLex(t *testing.T) { {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s or foo == 4.00 and bar ="foo"`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING}}, + {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s or foo == 4.00 and bar ="foo" | unwrap foo`, + []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, + PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING, PIPE, UNWRAP, IDENTIFIER}}, {`{ foo = "bar" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, {`{ foo != "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, {`{ foo =~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, RE, STRING, CLOSE_BRACE}}, @@ -32,6 +35,7 @@ func TestLex(t *testing.T) { {`{ foo = "ba\"r" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, {`rate({foo="bar"}[10s])`, []int{RATE, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS}}, {`count_over_time({foo="bar"}[5m])`, []int{COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS}}, + {`count_over_time({foo="bar"} |~ "\\w+" | unwrap foo[5m])`, []int{COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, UNWRAP, IDENTIFIER, RANGE, CLOSE_PARENTHESIS}}, {`sum(count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}}, {`topk(3,count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{TOPK, OPEN_PARENTHESIS, NUMBER, COMMA, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}}, {`bottomk(10,sum(count_over_time({foo="bar"}[5m])) by (foo,bar))`, []int{BOTTOMK, OPEN_PARENTHESIS, NUMBER, COMMA, SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, RANGE, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS}}, diff --git a/pkg/logql/parser.go b/pkg/logql/parser.go index 164a6e3a09e49..cecc0f578ebf4 100644 --- a/pkg/logql/parser.go +++ b/pkg/logql/parser.go @@ -12,6 +12,8 @@ import ( func init() { // Improve the error messages coming out of yacc. exprErrorVerbose = true + // uncomment when you need to understand yacc rule tree. + // exprDebug = 3 for str, tok := range tokens { exprToknames[tok-exprPrivate+1] = str } diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 46282fff5e0ca..c821fa8c491e2 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -1031,6 +1031,47 @@ func TestParse(t *testing.T) { }, }, }, + { + in: `count_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"[5m])`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + nil), + OpRangeTypeCount, + ), + }, + { + in: `sum_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"[5m])`, + exp: nil, + err: ParseError{msg: "invalid aggregation sum_over_time without unwrap"}, + }, + { + in: `count_over_time({app="foo"} |= "foo" | json | unwrap foo [5m])`, + exp: nil, + err: ParseError{msg: "invalid aggregation count_over_time with unwrap"}, + }, { // ensure binary ops with two literals are reduced recursively in: `1 + 1 + 1`, diff --git a/pkg/logql/series_extractor.go b/pkg/logql/series_extractor.go index 615982871445d..b04edc10e22f6 100644 --- a/pkg/logql/series_extractor.go +++ b/pkg/logql/series_extractor.go @@ -1,5 +1,11 @@ package logql +import ( + "strconv" + + "github.com/prometheus/prometheus/pkg/labels" +) + var ( ExtractBytes = bytesSampleExtractor{} ExtractCount = countSampleExtractor{} @@ -8,17 +14,46 @@ var ( // SampleExtractor transforms a log entry into a sample. // In case of failure the second return value will be false. type SampleExtractor interface { - Extract(line []byte) (float64, bool) + Extract(line []byte, lbs labels.Labels) (float64, labels.Labels) } type countSampleExtractor struct{} -func (countSampleExtractor) Extract(line []byte) (float64, bool) { - return 1., true +func (countSampleExtractor) Extract(line []byte, lbs labels.Labels) (float64, labels.Labels) { + return 1., lbs } type bytesSampleExtractor struct{} -func (bytesSampleExtractor) Extract(line []byte) (float64, bool) { - return float64(len(line)), true +func (bytesSampleExtractor) Extract(line []byte, lbs labels.Labels) (float64, labels.Labels) { + return float64(len(line)), lbs +} + +type labelSampleExtractor struct { + labelName string + + builder *labels.Builder +} + +func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, labels.Labels) { + stringValue := lbs.Get(l.labelName) + if stringValue == "" { + // todo(cyriltovena) handle errors. + return 0, lbs + } + f, err := strconv.ParseFloat(stringValue, 64) + if err != nil { + // todo(cyriltovena) handle errors. + return 0, lbs + } + l.builder.Reset(lbs) + l.builder.Del(l.labelName) + return f, l.builder.Labels() +} + +func newLabelSampleExtractor(labelName string) *labelSampleExtractor { + return &labelSampleExtractor{ + labelName: labelName, + builder: labels.NewBuilder(nil), + } } From 01e93c0cfacdf0584cb19da27d0f879b09e502e6 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 29 Sep 2020 18:13:42 +0200 Subject: [PATCH 14/45] Indent this hell. Signed-off-by: Cyril Tovena --- pkg/logql/expr.y | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 16da95fa1b289..2e78eb5b31003 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -114,19 +114,19 @@ logExpr: ; logRangeExpr: - selector RANGE { $$ = newLogRange(newMatcherExpr($1), $2, nil) } - | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $4, nil) } - | selector RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($1), $2 , $3) } - | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($2), $4 , $5) } - | selector unwrapExpr RANGE { $$ = newLogRange(newMatcherExpr($1), $3, $2 ) } - | OPEN_PARENTHESIS selector unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $5, $3 ) } - | selector pipelineExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $3, nil ) } - | OPEN_PARENTHESIS selector pipelineExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $5, nil ) } - | selector pipelineExpr unwrapExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $4, $3) } - | OPEN_PARENTHESIS selector pipelineExpr unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $6, $4) } - | selector RANGE pipelineExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, nil) } - | selector RANGE pipelineExpr unwrapExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, $4 ) } - | OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = $2 } + selector RANGE { $$ = newLogRange(newMatcherExpr($1), $2, nil) } + | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $4, nil) } + | selector RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($1), $2 , $3) } + | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($2), $4 , $5) } + | selector unwrapExpr RANGE { $$ = newLogRange(newMatcherExpr($1), $3, $2 ) } + | OPEN_PARENTHESIS selector unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $5, $3 ) } + | selector pipelineExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $3, nil ) } + | OPEN_PARENTHESIS selector pipelineExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $5, nil ) } + | selector pipelineExpr unwrapExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $4, $3) } + | OPEN_PARENTHESIS selector pipelineExpr unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $6, $4) } + | selector RANGE pipelineExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, nil) } + | selector RANGE pipelineExpr unwrapExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, $4 ) } + | OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = $2 } | logRangeExpr error ; From e455c8871bdee167a40adfa05f7f844b179b0de2 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 29 Sep 2020 18:22:22 +0200 Subject: [PATCH 15/45] Moar tests and it works. Signed-off-by: Cyril Tovena --- pkg/logql/parser_test.go | 84 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index c821fa8c491e2..b119b38597514 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -1072,6 +1072,90 @@ func TestParse(t *testing.T) { exp: nil, err: ParseError{msg: "invalid aggregation count_over_time with unwrap"}, }, + { + in: `stdvar_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m])`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo")), + OpRangeTypeStdvar, + ), + }, + { + in: `stddev_over_time({app="foo"} |= "bar" | unwrap bar [5m])`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + }, + }, + 5*time.Minute, + newUnwrapExpr("bar")), + OpRangeTypeStddev, + ), + }, + { + in: `min_over_time({app="foo"} | unwrap bar [5m])`, + exp: newRangeAggregationExpr( + newLogRange( + newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + 5*time.Minute, + newUnwrapExpr("bar")), + OpRangeTypeMin, + ), + }, + { + in: `max_over_time(({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo )[5m])`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo")), + OpRangeTypeMax, + ), + }, { // ensure binary ops with two literals are reduced recursively in: `1 + 1 + 1`, From 8bc18e55cb07dc098ed28dd68a7dd4f541852948 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 30 Sep 2020 12:55:26 +0200 Subject: [PATCH 16/45] Add more tests which lead me to find a bug in the lexer Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 26 +++++++++++++++++------ pkg/logql/ast_test.go | 13 ++++++++++-- pkg/logql/lex.go | 2 +- pkg/logql/parser_test.go | 45 +++++++++++++++++++++++++++++++++++++++- 4 files changed, 76 insertions(+), 10 deletions(-) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 53b200280deaa..bde029f329801 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -124,15 +124,24 @@ func (m MultiPipelineExpr) Pipeline() (Pipeline, error) { if err != nil { return nil, err } + if p == NoopPipeline { + continue + } c = append(c, p) } + if len(c) == 0 { + return NoopPipeline, nil + } return c, nil } func (m MultiPipelineExpr) String() string { var sb strings.Builder - for _, e := range m { + for i, e := range m { sb.WriteString(e.String()) + if i+1 != len(m) { + sb.WriteString(" ") + } } return sb.String() } @@ -174,7 +183,7 @@ func (e *matchersExpr) String() string { for i, m := range e.matchers { sb.WriteString(m.String()) if i+1 != len(e.matchers) { - sb.WriteString(",") + sb.WriteString(", ") } } sb.WriteString("}") @@ -205,6 +214,7 @@ func (e *pipelineExpr) Matchers() []*labels.Matcher { func (e *pipelineExpr) String() string { var sb strings.Builder sb.WriteString(e.left.String()) + sb.WriteString(" ") sb.WriteString(e.pipeline.String()) return sb.String() } @@ -232,6 +242,7 @@ func (e *lineFilterExpr) String() string { var sb strings.Builder if e.left != nil { sb.WriteString(e.left.String()) + sb.WriteString(" ") } switch e.ty { case labels.MatchRegexp: @@ -243,6 +254,7 @@ func (e *lineFilterExpr) String() string { case labels.MatchNotEqual: sb.WriteString("!=") } + sb.WriteString(" ") sb.WriteString(strconv.Quote(e.match)) return sb.String() } @@ -314,9 +326,11 @@ func (e *labelParserExpr) Pipeline() (Pipeline, error) { func (e *labelParserExpr) String() string { var sb strings.Builder - sb.WriteString("|") + sb.WriteString(OpPipe) + sb.WriteString(" ") sb.WriteString(e.op) if e.param != "" { + sb.WriteString(" ") sb.WriteString(strconv.Quote(e.param)) } return sb.String() @@ -336,7 +350,7 @@ func (e *labelFilterExpr) Pipeline() (Pipeline, error) { } func (e *labelFilterExpr) String() string { - return fmt.Sprintf("| %s", e.Filterer.String()) + return fmt.Sprintf("%s %s", OpPipe, e.Filterer.String()) } type lineFmtExpr struct { @@ -362,7 +376,7 @@ func (e *lineFmtExpr) Pipeline() (Pipeline, error) { } func (e *lineFmtExpr) String() string { - return fmt.Sprintf("| %s %s", OpFmtLine, strconv.Quote(e.value)) + return fmt.Sprintf("%s %s %s", OpPipe, OpFmtLine, strconv.Quote(e.value)) } type labelFmtExpr struct { @@ -392,7 +406,7 @@ func (e *labelFmtExpr) Pipeline() (Pipeline, error) { func (e *labelFmtExpr) String() string { var sb strings.Builder - sb.WriteString(fmt.Sprintf("| %s ", OpFmtLabel)) + sb.WriteString(fmt.Sprintf("%s %s ", OpPipe, OpFmtLabel)) for i, f := range e.formats { sb.WriteString(f.name) sb.WriteString("=") diff --git a/pkg/logql/ast_test.go b/pkg/logql/ast_test.go index c14c0c9acdf95..bfcda028d5db6 100644 --- a/pkg/logql/ast_test.go +++ b/pkg/logql/ast_test.go @@ -1,7 +1,6 @@ package logql import ( - "strings" "testing" "github.com/prometheus/prometheus/pkg/labels" @@ -27,6 +26,7 @@ func Test_logSelectorExpr_String(t *testing.T) { {`{foo="bar", bar!="baz"} != "bip" !~ ".+bop" | json`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | logfmt`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | regexp "(?Pfoo|bar)"`, true}, + {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | regexp "(?Pfoo|bar)" | foo<5.01 , bar>20ms or foo="bar" | line_format "blip{{.boop}}bap" | label_format foo=bar,bar="blip{{.blop}}"`, true}, } for _, tt := range tests { @@ -42,7 +42,7 @@ func Test_logSelectorExpr_String(t *testing.T) { t.Fatalf("failed to get filter: %s", err) } require.Equal(t, tt.expectFilter, p != NoopPipeline) - if expr.String() != strings.Replace(tt.selector, " ", "", -1) { + if expr.String() != tt.selector { t.Fatalf("error expected: %s got: %s", tt.selector, expr.String()) } }) @@ -61,6 +61,7 @@ func Test_SampleExpr_String(t *testing.T) { `sum(count_over_time({job="mysql"} | regexp "(?Pfoo|bar)" [5m]))`, `topk(10,sum(rate({region="us-east1"}[5m])) by (name))`, `avg( rate( ( {job="nginx"} |= "GET" ) [10s] ) ) by (region)`, + `avg(min_over_time({job="nginx"} |= "GET" | unwrap foo[10s])) by (region)`, `sum by (cluster) (count_over_time({job="mysql"}[5m]))`, `sum by (cluster) (count_over_time({job="mysql"}[5m])) / sum by (cluster) (count_over_time({job="postgres"}[5m])) `, ` @@ -74,6 +75,14 @@ func Test_SampleExpr_String(t *testing.T) { / count_over_time({namespace="tns"}[5m]) )`, + `stdvar_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m])`, + `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms|unwrap latency [5m])`, + `sum by (job) ( + sum_over_time({namespace="tns"} |= "level=error" | json | foo=5 and bar<25ms | unwrap latency[5m]) + / + count_over_time({namespace="tns"} | logfmt | label_format foo=bar[5m]) + )`, } { t.Run(tc, func(t *testing.T) { expr, err := ParseExpr(tc) diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go index 065893d57e4bd..a5d00977c5842 100644 --- a/pkg/logql/lex.go +++ b/pkg/logql/lex.go @@ -174,7 +174,7 @@ func tryScanDuration(number string, l *scanner.Scanner) (time.Duration, bool) { return 0, false } // we need to consume the scanner, now that we know this is a duration. - for i := 0; i <= consumed; i++ { + for i := 0; i < consumed; i++ { _ = l.Next() } return d, true diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index b119b38597514..f86e8b82794f5 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -6,9 +6,10 @@ import ( "testing" "time" - "github.com/grafana/loki/pkg/logql/labelfilter" "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/require" + + "github.com/grafana/loki/pkg/logql/labelfilter" ) func newString(s string) *string { @@ -1102,6 +1103,48 @@ func TestParse(t *testing.T) { OpRangeTypeStdvar, ), }, + { + in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "level=error"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, "foo", 5), + labelfilter.NewDuration(labelfilter.FilterLesserThan, "bar", 25*time.Millisecond), + ), + }, + }, + }, + 5*time.Minute, + newUnwrapExpr("latency")), + OpRangeTypeSum, + ), + }, + { + in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo==5,bar<25ms| unwrap latency [5m])`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "level=error"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterEqual, "foo", 5), + labelfilter.NewDuration(labelfilter.FilterLesserThan, "bar", 25*time.Millisecond), + ), + }, + }, + }, + 5*time.Minute, + newUnwrapExpr("latency")), + OpRangeTypeSum, + ), + }, { in: `stddev_over_time({app="foo"} |= "bar" | unwrap bar [5m])`, exp: newRangeAggregationExpr( From 08d2cf7811c2c990dce50cac4ba45697fb11732d Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 30 Sep 2020 16:05:20 +0200 Subject: [PATCH 17/45] Add more tests and fix all engine tests Signed-off-by: Cyril Tovena --- pkg/logql/engine_test.go | 3 +- pkg/logql/series_extractor_test.go | 38 +++++++++++++++ pkg/logql/shardmapper.go | 4 +- pkg/logql/shardmapper_test.go | 78 +++++++++++++++--------------- pkg/logql/test_utils.go | 53 ++++++++++++++++++-- 5 files changed, 129 insertions(+), 47 deletions(-) create mode 100644 pkg/logql/series_extractor_test.go diff --git a/pkg/logql/engine_test.go b/pkg/logql/engine_test.go index 268592fc13322..4741734d0da4f 100644 --- a/pkg/logql/engine_test.go +++ b/pkg/logql/engine_test.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "math" + "strings" // "math" "testing" @@ -1817,7 +1818,7 @@ func paramsID(p interface{}) string { if err != nil { panic(err) } - return string(b) + return strings.ReplaceAll(string(b), " ", "") } type logData struct { diff --git a/pkg/logql/series_extractor_test.go b/pkg/logql/series_extractor_test.go new file mode 100644 index 0000000000000..3f5dd86adebee --- /dev/null +++ b/pkg/logql/series_extractor_test.go @@ -0,0 +1,38 @@ +package logql + +import ( + "reflect" + "testing" + + "github.com/prometheus/prometheus/pkg/labels" +) + +func Test_labelSampleExtractor_Extract(t *testing.T) { + tests := []struct { + name string + ex *labelSampleExtractor + in labels.Labels + want float64 + wantLbs labels.Labels + }{ + { + "convert float", + newLabelSampleExtractor("foo"), + labels.Labels{labels.Label{Name: "foo", Value: "15.0"}}, + 15, + labels.Labels{}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + + outval, outlbs := tt.ex.Extract([]byte(""), tt.in) + if outval != tt.want { + t.Errorf("labelSampleExtractor.Extract() val = %v, want %v", outval, tt.want) + } + if !reflect.DeepEqual(outlbs, tt.wantLbs) { + t.Errorf("labelSampleExtractor.Extract() lbs = %v, want %v", outlbs, tt.wantLbs) + } + }) + } +} diff --git a/pkg/logql/shardmapper.go b/pkg/logql/shardmapper.go index 04a574223ed6a..19c74407f8ca1 100644 --- a/pkg/logql/shardmapper.go +++ b/pkg/logql/shardmapper.go @@ -130,8 +130,8 @@ func (m ShardMapper) Map(expr Expr, r *shardRecorder) (Expr, error) { case *literalExpr: return e, nil //todo(cyriltovena) enable sharding on logqlv2 - // case *matchersExpr, *filterExpr: - // return m.mapLogSelectorExpr(e.(LogSelectorExpr), r), nil + case *matchersExpr, *pipelineExpr: + return m.mapLogSelectorExpr(e.(LogSelectorExpr), r), nil case *vectorAggregationExpr: return m.mapVectorAggregationExpr(e, r) case *rangeAggregationExpr: diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index 48cc1484d011a..59c8be1de5295 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -1,6 +1,7 @@ package logql import ( + "strings" "testing" "time" @@ -130,6 +131,10 @@ func TestMappingStrings(t *testing.T) { in: `{foo="bar"}`, out: `downstream<{foo="bar"}, shard=0_of_2> ++ downstream<{foo="bar"}, shard=1_of_2>`, }, + { + in: `{foo="bar"} |= "foo" |~ "bar" | json | latency >= 10s or foo<5 and bar="t" | line_format "b{{.blip}}"`, + out: `downstream<{foo="bar"} |="foo" |~"bar" | json | latency>=10s or foo<5,bar="t"| line_format "b{{.blip}}",shard=0_of_2>++downstream<{foo="bar"} |="foo" |~"bar" | json | latency>=10s or foo<5, bar="t" | line_format "b{{.blip}}",shard=1_of_2>`, + }, { in: `sum(rate({foo="bar"}[1m]))`, out: `sum(downstream ++ downstream)`, @@ -162,7 +167,7 @@ func TestMappingStrings(t *testing.T) { mapped, err := m.Map(ast, nilMetrics.shardRecorder()) require.Nil(t, err) - require.Equal(t, tc.out, mapped.String()) + require.Equal(t, strings.ReplaceAll(tc.out, " ", ""), strings.ReplaceAll(mapped.String(), " ", "")) }) } @@ -207,45 +212,38 @@ func TestMapping(t *testing.T) { }, }, }, - // todo(cyriltovena) fix - // { - // in: `{foo="bar"} |= "error"`, - // expr: &ConcatLogSelectorExpr{ - // DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - // shard: &astmapper.ShardAnnotation{ - // Shard: 0, - // Of: 2, - // }, - // LogSelectorExpr: &filterExpr{ - // match: "error", - // ty: labels.MatchEqual, - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // }, - // }, - // next: &ConcatLogSelectorExpr{ - // DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ - // shard: &astmapper.ShardAnnotation{ - // Shard: 1, - // Of: 2, - // }, - // LogSelectorExpr: &filterExpr{ - // match: "error", - // ty: labels.MatchEqual, - // left: &matchersExpr{ - // matchers: []*labels.Matcher{ - // mustNewMatcher(labels.MatchEqual, "foo", "bar"), - // }, - // }, - // }, - // }, - // next: nil, - // }, - // }, - // }, + { + in: `{foo="bar"} |= "error"`, + expr: &ConcatLogSelectorExpr{ + DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ + shard: &astmapper.ShardAnnotation{ + Shard: 0, + Of: 2, + }, + LogSelectorExpr: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "error"), + }, + ), + }, + next: &ConcatLogSelectorExpr{ + DownstreamLogSelectorExpr: DownstreamLogSelectorExpr{ + shard: &astmapper.ShardAnnotation{ + Shard: 1, + Of: 2, + }, + LogSelectorExpr: newPipelineExpr( + newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "error"), + }, + ), + }, + next: nil, + }, + }, + }, { in: `rate({foo="bar"}[5m])`, expr: &ConcatSampleExpr{ diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index 68010e724b49e..77d7e471fb1ed 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -90,13 +90,58 @@ outer: } func processStream(in []logproto.Stream, pipeline Pipeline) []logproto.Stream { - // todo(cyriltovena) - return in + resByStream := map[string]*logproto.Stream{} + + for _, stream := range in { + for _, e := range stream.Entries { + if l, out, ok := pipeline.Process([]byte(e.Line), mustParseLabels(stream.Labels)); ok { + var s *logproto.Stream + var found bool + s, found = resByStream[out.String()] + if !found { + s = &logproto.Stream{Labels: out.String()} + resByStream[out.String()] = s + } + s.Entries = append(s.Entries, logproto.Entry{ + Timestamp: e.Timestamp, + Line: string(l), + }) + } + } + } + streams := []logproto.Stream{} + for _, stream := range resByStream { + streams = append(streams, *stream) + } + return streams } func processSeries(in []logproto.Stream, pipeline Pipeline, ex SampleExtractor) []logproto.Series { - // todo(cyriltovena) - return nil + resBySeries := map[string]*logproto.Series{} + + for _, stream := range in { + for _, e := range stream.Entries { + if l, out, ok := pipeline.Process([]byte(e.Line), mustParseLabels(stream.Labels)); ok { + f, lbs := ex.Extract(l, out) + var s *logproto.Series + var found bool + s, found = resBySeries[lbs.String()] + if !found { + s = &logproto.Series{Labels: lbs.String()} + resBySeries[lbs.String()] = s + } + s.Samples = append(s.Samples, logproto.Sample{ + Timestamp: e.Timestamp.UnixNano(), + Value: f, + }) + } + } + } + series := []logproto.Series{} + for _, s := range resBySeries { + series = append(series, *s) + } + return series } func (q MockQuerier) SelectSamples(ctx context.Context, req SelectSampleParams) (iter.SampleIterator, error) { From b8014176797f22d00a2d2c71c781bdbff3febc21 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 30 Sep 2020 17:16:09 +0200 Subject: [PATCH 18/45] Fixes match stage in promtail pipelines. Signed-off-by: Cyril Tovena --- pkg/logentry/stages/match.go | 31 ++++++++++------ pkg/util/conv.go | 16 ++++++-- pkg/util/conv_test.go | 71 ++++++++++++++++++++++++++++++++++++ 3 files changed, 103 insertions(+), 15 deletions(-) diff --git a/pkg/logentry/stages/match.go b/pkg/logentry/stages/match.go index 5d9f5eae4001b..cbb68b06dd038 100644 --- a/pkg/logentry/stages/match.go +++ b/pkg/logentry/stages/match.go @@ -12,6 +12,7 @@ import ( "github.com/prometheus/common/model" "github.com/grafana/loki/pkg/logql" + "github.com/grafana/loki/pkg/util" ) const ( @@ -95,9 +96,9 @@ func newMatcherStage(logger log.Logger, jobName *string, config interface{}, reg } } - filter, err := selector.Filter() + pipeline, err := selector.Pipeline() if err != nil { - return nil, errors.Wrap(err, "error parsing filter") + return nil, errors.Wrap(err, "error parsing pipeline") } dropReason := "match_stage" @@ -108,9 +109,9 @@ func newMatcherStage(logger log.Logger, jobName *string, config interface{}, reg return &matcherStage{ dropReason: dropReason, matchers: selector.Matchers(), - pipeline: pl, + stage: pl, action: cfg.Action, - filter: filter, + pipeline: pipeline, }, nil } @@ -118,25 +119,33 @@ func newMatcherStage(logger log.Logger, jobName *string, config interface{}, reg type matcherStage struct { dropReason string matchers []*labels.Matcher - filter logql.LineFilter - pipeline Stage + pipeline logql.Pipeline + stage Stage action string } // Process implements Stage -func (m *matcherStage) Process(labels model.LabelSet, extracted map[string]interface{}, t *time.Time, entry *string) { +func (m *matcherStage) Process(lbs model.LabelSet, extracted map[string]interface{}, t *time.Time, entry *string) { for _, filter := range m.matchers { - if !filter.Matches(string(labels[model.LabelName(filter.Name)])) { + if !filter.Matches(string(lbs[model.LabelName(filter.Name)])) { return } } - if m.filter == nil || m.filter.Filter([]byte(*entry)) { + + if newLine, newLabels, ok := m.pipeline.Process([]byte(*entry), labels.FromMap(util.ModelLabelSetToMap(lbs))); ok { switch m.action { case MatchActionDrop: // Adds the drop label to not be sent by the api.EntryHandler - labels[dropLabel] = model.LabelValue(m.dropReason) + lbs[dropLabel] = model.LabelValue(m.dropReason) case MatchActionKeep: - m.pipeline.Process(labels, extracted, t, entry) + *entry = string(newLine) + for k := range lbs { + delete(lbs, k) + } + for _, l := range newLabels { + lbs[model.LabelName(l.Name)] = model.LabelValue(l.Value) + } + m.stage.Process(lbs, extracted, t, entry) } } } diff --git a/pkg/util/conv.go b/pkg/util/conv.go index 5938d4398302c..952989659a7ad 100644 --- a/pkg/util/conv.go +++ b/pkg/util/conv.go @@ -5,6 +5,7 @@ import ( "sort" "strings" "time" + "unsafe" "github.com/cortexproject/cortex/pkg/ingester/client" "github.com/prometheus/common/model" @@ -38,11 +39,18 @@ func ToClientLabels(labels string) ([]client.LabelAdapter, error) { // ModelLabelSetToMap convert a model.LabelSet to a map[string]string func ModelLabelSetToMap(m model.LabelSet) map[string]string { - result := map[string]string{} - for k, v := range m { - result[string(k)] = string(v) + if len(m) == 0 { + return map[string]string{} } - return result + return *(*map[string]string)(unsafe.Pointer(&m)) +} + +// MapToModelLabelSet converts a map into a model.LabelSet +func MapToModelLabelSet(m map[string]string) model.LabelSet { + if len(m) == 0 { + return model.LabelSet{} + } + return *(*map[model.LabelName]model.LabelValue)(unsafe.Pointer(&m)) } // RoundToMilliseconds returns milliseconds precision time from nanoseconds. diff --git a/pkg/util/conv_test.go b/pkg/util/conv_test.go index ab27d230b26f9..de6d08ce0c9af 100644 --- a/pkg/util/conv_test.go +++ b/pkg/util/conv_test.go @@ -57,3 +57,74 @@ func TestRoundToMilliseconds(t *testing.T) { }) } } + +func TestModelLabelSetToMap(t *testing.T) { + + tests := []struct { + name string + m model.LabelSet + want map[string]string + }{ + { + "nil", + nil, + map[string]string{}, + }, + { + "one", + model.LabelSet{model.LabelName("foo"): model.LabelValue("bar")}, + map[string]string{"foo": "bar"}, + }, + { + "two", + model.LabelSet{ + model.LabelName("foo"): model.LabelValue("bar"), + model.LabelName("buzz"): model.LabelValue("fuzz"), + }, + map[string]string{ + "foo": "bar", + "buzz": "fuzz", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := ModelLabelSetToMap(tt.m); !reflect.DeepEqual(got, tt.want) { + t.Errorf("ModelLabelSetToMap() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestMapToModelLabelSet(t *testing.T) { + tests := []struct { + name string + args map[string]string + want model.LabelSet + }{ + {"nil", nil, model.LabelSet{}}, + { + "one", + map[string]string{"foo": "bar"}, + model.LabelSet{model.LabelName("foo"): model.LabelValue("bar")}, + }, + { + "two", + map[string]string{ + "foo": "bar", + "buzz": "fuzz", + }, + model.LabelSet{ + model.LabelName("foo"): model.LabelValue("bar"), + model.LabelName("buzz"): model.LabelValue("fuzz"), + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := MapToModelLabelSet(tt.args); !reflect.DeepEqual(got, tt.want) { + t.Errorf("MapToModelLabelSet() = %v, want %v", got, tt.want) + } + }) + } +} From 850b0036b49764ff253eb760a8412b4b6ee5bd42 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 1 Oct 2020 12:14:29 +0200 Subject: [PATCH 19/45] Hook Pipeline into ingester, tailer and storage. Signed-off-by: Cyril Tovena --- pkg/chunkenc/dumb_chunk.go | 4 +- pkg/chunkenc/interface.go | 8 +- pkg/chunkenc/memchunk.go | 135 ++++++++++++++-------------- pkg/chunkenc/memchunk_test.go | 38 ++++---- pkg/ingester/chunk_test.go | 4 +- pkg/ingester/flush_test.go | 2 +- pkg/ingester/instance.go | 17 +--- pkg/ingester/stream.go | 12 +-- pkg/ingester/stream_test.go | 4 +- pkg/ingester/tailer.go | 72 ++++++++++----- pkg/ingester/transfer_test.go | 3 +- pkg/logql/ast.go | 33 +++++++ pkg/querier/http.go | 6 +- pkg/querier/queryrange/roundtrip.go | 15 ++-- pkg/storage/batch.go | 22 ++--- pkg/storage/batch_test.go | 8 +- pkg/storage/lazy_chunk.go | 14 ++- pkg/storage/lazy_chunk_test.go | 6 +- pkg/storage/store.go | 46 +++++----- pkg/storage/store_test.go | 2 +- pkg/util/conv.go | 18 ++++ 21 files changed, 272 insertions(+), 197 deletions(-) diff --git a/pkg/chunkenc/dumb_chunk.go b/pkg/chunkenc/dumb_chunk.go index 2651dc347ddd9..ad4173e54d137 100644 --- a/pkg/chunkenc/dumb_chunk.go +++ b/pkg/chunkenc/dumb_chunk.go @@ -69,7 +69,7 @@ func (c *dumbChunk) Utilization() float64 { // Returns an iterator that goes from _most_ recent to _least_ recent (ie, // backwards). -func (c *dumbChunk) Iterator(_ context.Context, from, through time.Time, direction logproto.Direction, _ labels.Labels, _ logql.LineFilter, _ logql.LabelParser) (iter.EntryIterator, error) { +func (c *dumbChunk) Iterator(_ context.Context, from, through time.Time, direction logproto.Direction, _ labels.Labels, _ logql.Pipeline) (iter.EntryIterator, error) { i := sort.Search(len(c.entries), func(i int) bool { return !from.After(c.entries[i].Timestamp) }) @@ -94,7 +94,7 @@ func (c *dumbChunk) Iterator(_ context.Context, from, through time.Time, directi }, nil } -func (c *dumbChunk) SampleIterator(_ context.Context, from, through time.Time, _ labels.Labels, _ logql.LineFilter, _ logql.SampleExtractor, _ logql.LabelParser) iter.SampleIterator { +func (c *dumbChunk) SampleIterator(_ context.Context, from, through time.Time, _ labels.Labels, _ logql.Pipeline, _ logql.SampleExtractor) iter.SampleIterator { return nil } diff --git a/pkg/chunkenc/interface.go b/pkg/chunkenc/interface.go index 7722065e0ee7f..676420f11c362 100644 --- a/pkg/chunkenc/interface.go +++ b/pkg/chunkenc/interface.go @@ -99,8 +99,8 @@ type Chunk interface { Bounds() (time.Time, time.Time) SpaceFor(*logproto.Entry) bool Append(*logproto.Entry) error - Iterator(ctx context.Context, mintT, maxtT time.Time, direction logproto.Direction, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) (iter.EntryIterator, error) - SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator + Iterator(ctx context.Context, mintT, maxtT time.Time, direction logproto.Direction, lbs labels.Labels, pipeline logql.Pipeline) (iter.EntryIterator, error) + SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator // Returns the list of blocks in the chunks. Blocks(mintT, maxtT time.Time) []Block Size() int @@ -123,7 +123,7 @@ type Block interface { // Entries is the amount of entries in the block. Entries() int // Iterator returns an entry iterator for the block. - Iterator(ctx context.Context, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) iter.EntryIterator + Iterator(ctx context.Context, lbs labels.Labels, pipeline logql.Pipeline) iter.EntryIterator // SampleIterator returns a sample iterator for the block. - SampleIterator(ctx context.Context, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator + SampleIterator(ctx context.Context, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator } diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index 917171bfe2402..949e8468f5f9f 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -476,7 +476,7 @@ func (c *MemChunk) Bounds() (fromT, toT time.Time) { } // Iterator implements Chunk. -func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, direction logproto.Direction, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) (iter.EntryIterator, error) { +func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, direction logproto.Direction, lbs labels.Labels, pipeline logql.Pipeline) (iter.EntryIterator, error) { mint, maxt := mintT.UnixNano(), maxtT.UnixNano() its := make([]iter.EntryIterator, 0, len(c.blocks)+1) @@ -484,11 +484,11 @@ func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, directi if maxt < b.mint || b.maxt < mint { continue } - its = append(its, b.Iterator(ctx, lbs, filter, parser)) + its = append(its, b.Iterator(ctx, lbs, pipeline)) } if !c.head.isEmpty() { - its = append(its, c.head.iterator(ctx, direction, mint, maxt, lbs, filter, parser)) + its = append(its, c.head.iterator(ctx, direction, mint, maxt, lbs, pipeline)) } iterForward := iter.NewTimeRangedIterator( @@ -505,7 +505,7 @@ func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, directi } // Iterator implements Chunk. -func (c *MemChunk) SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator { +func (c *MemChunk) SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator { mint, maxt := from.UnixNano(), through.UnixNano() its := make([]iter.SampleIterator, 0, len(c.blocks)+1) @@ -513,11 +513,11 @@ func (c *MemChunk) SampleIterator(ctx context.Context, from, through time.Time, if maxt < b.mint || b.maxt < mint { continue } - its = append(its, b.SampleIterator(ctx, lbs, filter, extractor, parser)) + its = append(its, b.SampleIterator(ctx, lbs, pipeline, extractor)) } if !c.head.isEmpty() { - its = append(its, c.head.sampleIterator(ctx, mint, maxt, lbs, filter, extractor, parser)) + its = append(its, c.head.sampleIterator(ctx, mint, maxt, lbs, pipeline, extractor)) } return iter.NewTimeRangedSampleIterator( @@ -540,18 +540,18 @@ func (c *MemChunk) Blocks(mintT, maxtT time.Time) []Block { return blocks } -func (b block) Iterator(ctx context.Context, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) iter.EntryIterator { +func (b block) Iterator(ctx context.Context, lbs labels.Labels, pipeline logql.Pipeline) iter.EntryIterator { if len(b.b) == 0 { return iter.NoopIterator } - return newEntryIterator(ctx, b.readers, b.b, lbs, filter, parser) + return newEntryIterator(ctx, b.readers, b.b, lbs, pipeline) } -func (b block) SampleIterator(ctx context.Context, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator { +func (b block) SampleIterator(ctx context.Context, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator { if len(b.b) == 0 { return iter.NoopIterator } - return newSampleIterator(ctx, b.readers, b.b, lbs, filter, extractor, parser) + return newSampleIterator(ctx, b.readers, b.b, lbs, pipeline, extractor) } func (b block) Offset() int { @@ -568,7 +568,7 @@ func (b block) MaxTime() int64 { return b.maxt } -func (hb *headBlock) iterator(ctx context.Context, direction logproto.Direction, mint, maxt int64, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) iter.EntryIterator { +func (hb *headBlock) iterator(ctx context.Context, direction logproto.Direction, mint, maxt int64, lbs labels.Labels, pipeline logql.Pipeline) iter.EntryIterator { if hb.isEmpty() || (maxt < hb.mint || hb.maxt < mint) { return iter.NoopIterator } @@ -584,22 +584,23 @@ func (hb *headBlock) iterator(ctx context.Context, direction logproto.Direction, for _, e := range hb.entries { chunkStats.HeadChunkBytes += int64(len(e.s)) line := []byte(e.s) - if filter == nil || filter.Filter(line) { - parsedLbs := parser.Parse(line, lbs) - var ok bool - var stream *logproto.Stream - lhash := parsedLbs.Hash() - if stream, ok = streams[lhash]; !ok { - stream = &logproto.Stream{ - Labels: parsedLbs.String(), - } - streams[lhash] = stream + newLine, parsedLbs, ok := pipeline.Process(line, lbs) + if !ok { + continue + } + var stream *logproto.Stream + lhash := parsedLbs.Hash() + if stream, ok = streams[lhash]; !ok { + stream = &logproto.Stream{ + Labels: parsedLbs.String(), } - stream.Entries = append(stream.Entries, logproto.Entry{ - Timestamp: time.Unix(0, e.t), - Line: e.s, - }) + streams[lhash] = stream } + stream.Entries = append(stream.Entries, logproto.Entry{ + Timestamp: time.Unix(0, e.t), + Line: string(newLine), + }) + } if len(streams) == 0 { @@ -612,7 +613,7 @@ func (hb *headBlock) iterator(ctx context.Context, direction logproto.Direction, return iter.NewStreamsIterator(ctx, streamsResult, direction) } -func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator { +func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator { if hb.isEmpty() || (maxt < hb.mint || hb.maxt < mint) { return iter.NoopIterator } @@ -622,26 +623,26 @@ func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, lbs l for _, e := range hb.entries { chunkStats.HeadChunkBytes += int64(len(e.s)) line := []byte(e.s) - if filter == nil || filter.Filter(line) { - parsedLbs := parser.Parse(line, lbs) - if value, ok := extractor.Extract([]byte(e.s)); ok { - var ok bool - var s *logproto.Series - lhash := parsedLbs.Hash() - if s, ok = series[lhash]; !ok { - s = &logproto.Series{ - Labels: parsedLbs.String(), - } - series[lhash] = s - } - s.Samples = append(s.Samples, logproto.Sample{ - Timestamp: e.t, - Value: value, - Hash: xxhash.Sum64([]byte(e.s)), - }) - + newLine, parsedLabels, ok := pipeline.Process(line, lbs) + if !ok { + continue + } + var value float64 + var found bool + value, parsedLabels = extractor.Extract(newLine, parsedLabels) + var s *logproto.Series + lhash := parsedLabels.Hash() + if s, found = series[lhash]; !found { + s = &logproto.Series{ + Labels: parsedLabels.String(), } + series[lhash] = s } + s.Samples = append(s.Samples, logproto.Sample{ + Timestamp: e.t, + Value: value, + Hash: xxhash.Sum64([]byte(e.s)), + }) } if len(series) == 0 { @@ -668,17 +669,16 @@ type bufferedIterator struct { buf []byte // The buffer for a single entry. currLine []byte // the current line, this is the same as the buffer but sliced the the line size. currTs int64 - currLabels string + currLabels labels.Labels consumed bool closed bool - baseLbs labels.Labels - parser logql.LabelParser - filter logql.LineFilter + baseLbs labels.Labels + pipeline logql.Pipeline } -func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) *bufferedIterator { +func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, pipeline logql.Pipeline) *bufferedIterator { chunkStats := stats.GetChunkData(ctx) chunkStats.CompressedBytes += int64(len(b)) return &bufferedIterator{ @@ -687,11 +687,10 @@ func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, lbs lab reader: nil, // will be initialized later bufReader: nil, // will be initialized later pool: pool, - filter: filter, + pipeline: pipeline, decBuf: make([]byte, binary.MaxVarintLen64), consumed: true, baseLbs: lbs, - parser: parser, } } @@ -711,14 +710,16 @@ func (si *bufferedIterator) Next() bool { // we decode always the line length and ts as varint si.stats.DecompressedBytes += int64(len(line)) + 2*binary.MaxVarintLen64 si.stats.DecompressedLines++ - if si.filter != nil && !si.filter.Filter(line) { + + newLine, lbs, ok := si.pipeline.Process(line, si.baseLbs) + if !ok { continue } si.currTs = ts - si.currLine = line + si.currLine = newLine si.consumed = false // todo(cyriltovena) add cache for building the string of labels via some sort of decode context. - si.currLabels = si.parser.Parse(line, si.baseLbs).String() + si.currLabels = lbs return true } } @@ -803,11 +804,11 @@ func (si *bufferedIterator) close() { si.decBuf = nil } -func (si *bufferedIterator) Labels() string { return si.currLabels } +func (si *bufferedIterator) Labels() string { return si.currLabels.String() } -func newEntryIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, filter logql.LineFilter, parser logql.LabelParser) iter.EntryIterator { +func newEntryIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, pipeline logql.Pipeline) iter.EntryIterator { return &entryBufferedIterator{ - bufferedIterator: newBufferedIterator(ctx, pool, b, lbs, filter, parser), + bufferedIterator: newBufferedIterator(ctx, pool, b, lbs, pipeline), } } @@ -825,9 +826,9 @@ func (e *entryBufferedIterator) Entry() logproto.Entry { return e.cur } -func newSampleIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) iter.SampleIterator { +func newSampleIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator { it := &sampleBufferedIterator{ - bufferedIterator: newBufferedIterator(ctx, pool, b, lbs, filter, parser), + bufferedIterator: newBufferedIterator(ctx, pool, b, lbs, pipeline), extractor: extractor, } return it @@ -835,20 +836,24 @@ func newSampleIterator(ctx context.Context, pool ReaderPool, b []byte, lbs label type sampleBufferedIterator struct { *bufferedIterator + extractor logql.SampleExtractor - cur logproto.Sample - currValue float64 + + cur logproto.Sample + currLabels string + currValue float64 } func (e *sampleBufferedIterator) Next() bool { - var ok bool + var newLabels labels.Labels for e.bufferedIterator.Next() { - if e.currValue, ok = e.extractor.Extract(e.currLine); ok { - return true - } + e.currValue, newLabels = e.extractor.Extract(e.currLine, e.bufferedIterator.currLabels) + e.currLabels = newLabels.String() + return true } return false } +func (e *sampleBufferedIterator) Labels() string { return e.currLabels } func (e *sampleBufferedIterator) Sample() logproto.Sample { if !e.consumed { diff --git a/pkg/chunkenc/memchunk_test.go b/pkg/chunkenc/memchunk_test.go index ddb71cabf0f5d..1637c1931eab8 100644 --- a/pkg/chunkenc/memchunk_test.go +++ b/pkg/chunkenc/memchunk_test.go @@ -113,7 +113,7 @@ func TestBlock(t *testing.T) { } } - it, err := chk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, nil, logql.NoopLabelParser) + it, err := chk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, logql.NoopPipeline) require.NoError(t, err) idx := 0 @@ -128,7 +128,7 @@ func TestBlock(t *testing.T) { require.NoError(t, it.Close()) require.Equal(t, len(cases), idx) - sampleIt := chk.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, nil, logql.ExtractCount, logql.NoopLabelParser) + sampleIt := chk.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, logql.NoopPipeline, logql.ExtractCount) idx = 0 for sampleIt.Next() { s := sampleIt.Sample() @@ -143,7 +143,7 @@ func TestBlock(t *testing.T) { require.Equal(t, len(cases), idx) t.Run("bounded-iteration", func(t *testing.T) { - it, err := chk.Iterator(context.Background(), time.Unix(0, 3), time.Unix(0, 7), logproto.FORWARD, nil, nil, logql.NoopLabelParser) + it, err := chk.Iterator(context.Background(), time.Unix(0, 3), time.Unix(0, 7), logproto.FORWARD, nil, logql.NoopPipeline) require.NoError(t, err) idx := 2 @@ -176,7 +176,7 @@ func TestReadFormatV1(t *testing.T) { t.Fatal(err) } - it, err := r.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, nil, logql.NoopLabelParser) + it, err := r.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, logql.NoopPipeline) if err != nil { t.Fatal(err) } @@ -203,7 +203,7 @@ func TestRoundtripV2(t *testing.T) { assertLines := func(c *MemChunk) { require.Equal(t, enc, c.Encoding()) - it, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, nil, logql.NoopLabelParser) + it, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, logql.NoopPipeline) if err != nil { t.Fatal(err) } @@ -265,7 +265,7 @@ func TestSerialization(t *testing.T) { bc, err := NewByteChunk(byt, testBlockSize, testTargetSize) require.NoError(t, err) - it, err := bc.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, nil, logql.NoopLabelParser) + it, err := bc.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), logproto.FORWARD, nil, logql.NoopPipeline) require.NoError(t, err) for i := 0; i < numSamples; i++ { require.True(t, it.Next()) @@ -276,7 +276,7 @@ func TestSerialization(t *testing.T) { } require.NoError(t, it.Error()) - sampleIt := bc.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, nil, logql.ExtractCount, logql.NoopLabelParser) + sampleIt := bc.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, logql.NoopPipeline, logql.ExtractCount) for i := 0; i < numSamples; i++ { require.True(t, sampleIt.Next(), i) @@ -319,7 +319,7 @@ func TestChunkFilling(t *testing.T) { require.Equal(t, int64(lines), i) - it, err := chk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, 100), logproto.FORWARD, nil, nil, logql.NoopLabelParser) + it, err := chk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, 100), logproto.FORWARD, nil, logql.NoopPipeline) require.NoError(t, err) i = 0 for it.Next() { @@ -462,7 +462,7 @@ func TestChunkStats(t *testing.T) { expectedSize := (inserted * len(entry.Line)) + (inserted * 2 * binary.MaxVarintLen64) ctx := stats.NewContext(context.Background()) - it, err := c.Iterator(ctx, first.Add(-time.Hour), entry.Timestamp.Add(time.Hour), logproto.BACKWARD, nil, logql.LineFilterFunc(func(line []byte) bool { return false }), logql.NoopLabelParser) + it, err := c.Iterator(ctx, first.Add(-time.Hour), entry.Timestamp.Add(time.Hour), logproto.BACKWARD, nil, logql.NoopPipeline) if err != nil { t.Fatal(err) } @@ -491,7 +491,7 @@ func TestChunkStats(t *testing.T) { t.Fatal(err) } ctx = stats.NewContext(context.Background()) - it, err = cb.Iterator(ctx, first.Add(-time.Hour), entry.Timestamp.Add(time.Hour), logproto.BACKWARD, nil, logql.LineFilterFunc(func(line []byte) bool { return false }), logql.NoopLabelParser) + it, err = cb.Iterator(ctx, first.Add(-time.Hour), entry.Timestamp.Add(time.Hour), logproto.BACKWARD, nil, logql.NoopPipeline) if err != nil { t.Fatal(err) } @@ -539,7 +539,7 @@ func TestIteratorClose(t *testing.T) { } { c := NewMemChunk(enc, testBlockSize, testTargetSize) inserted := fillChunk(c) - iter, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, inserted), logproto.BACKWARD, nil, nil, logql.NoopLabelParser) + iter, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, inserted), logproto.BACKWARD, nil, logql.NoopPipeline) if err != nil { t.Fatal(err) } @@ -590,7 +590,7 @@ func BenchmarkRead(b *testing.B) { for n := 0; n < b.N; n++ { for _, c := range chunks { // use forward iterator for benchmark -- backward iterator does extra allocations by keeping entries in memory - iterator, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Now(), logproto.FORWARD, nil, nil, logql.NoopLabelParser) + iterator, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Now(), logproto.FORWARD, nil, logql.NoopPipeline) if err != nil { panic(err) } @@ -615,7 +615,7 @@ func BenchmarkBackwardIterator(b *testing.B) { _ = fillChunk(c) b.ResetTimer() for n := 0; n < b.N; n++ { - iterator, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Now(), logproto.BACKWARD, nil, nil, logql.NoopLabelParser) + iterator, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Now(), logproto.BACKWARD, nil, logql.NoopPipeline) if err != nil { panic(err) } @@ -636,9 +636,7 @@ func TestGenerateDataSize(t *testing.T) { bytesRead := uint64(0) for _, c := range chunks { // use forward iterator for benchmark -- backward iterator does extra allocations by keeping entries in memory - iterator, err := c.Iterator(context.TODO(), time.Unix(0, 0), time.Now(), logproto.FORWARD, nil, logql.LineFilterFunc(func(line []byte) bool { - return true // return all - }), logql.NoopLabelParser) + iterator, err := c.Iterator(context.TODO(), time.Unix(0, 0), time.Now(), logproto.FORWARD, nil, logql.NoopPipeline) if err != nil { panic(err) } @@ -672,7 +670,7 @@ func BenchmarkHeadBlockIterator(b *testing.B) { b.ResetTimer() for n := 0; n < b.N; n++ { - iter := h.iterator(context.Background(), logproto.BACKWARD, 0, math.MaxInt64, nil, nil, logql.NoopLabelParser) + iter := h.iterator(context.Background(), logproto.BACKWARD, 0, math.MaxInt64, nil, logql.NoopPipeline) for iter.Next() { _ = iter.Entry() @@ -731,7 +729,7 @@ func TestMemChunk_IteratorBounds(t *testing.T) { c := createChunk() // testing headchunk - it, err := c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil, nil, logql.NoopLabelParser) + it, err := c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil, logql.NoopPipeline) require.NoError(t, err) for i := range tt.expect { require.Equal(t, tt.expect[i], it.Next()) @@ -740,7 +738,7 @@ func TestMemChunk_IteratorBounds(t *testing.T) { // testing chunk blocks require.NoError(t, c.cut()) - it, err = c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil, nil, logql.NoopLabelParser) + it, err = c.Iterator(context.Background(), tt.mint, tt.maxt, tt.direction, nil, logql.NoopPipeline) require.NoError(t, err) for i := range tt.expect { require.Equal(t, tt.expect[i], it.Next()) @@ -759,7 +757,7 @@ func TestMemchunkLongLine(t *testing.T) { for i := 1; i <= 10; i++ { require.NoError(t, c.Append(&logproto.Entry{Timestamp: time.Unix(0, int64(i)), Line: strings.Repeat("e", 200000)})) } - it, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, 100), logproto.FORWARD, nil, nil, logql.NoopLabelParser) + it, err := c.Iterator(context.Background(), time.Unix(0, 0), time.Unix(0, 100), logproto.FORWARD, nil, logql.NoopPipeline) require.NoError(t, err) for i := 1; i <= 10; i++ { require.True(t, it.Next()) diff --git a/pkg/ingester/chunk_test.go b/pkg/ingester/chunk_test.go index e4c1da942e4c9..fbd1e1028d54c 100644 --- a/pkg/ingester/chunk_test.go +++ b/pkg/ingester/chunk_test.go @@ -64,7 +64,7 @@ func TestIterator(t *testing.T) { for i := 0; i < entries; i++ { from := rand.Intn(entries - 1) len := rand.Intn(entries-from) + 1 - iter, err := chunk.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.FORWARD, labels.Labels{}, nil, logql.NoopLabelParser) + iter, err := chunk.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.FORWARD, labels.Labels{}, logql.NoopPipeline) require.NoError(t, err) testIteratorForward(t, iter, int64(from), int64(from+len)) _ = iter.Close() @@ -73,7 +73,7 @@ func TestIterator(t *testing.T) { for i := 0; i < entries; i++ { from := rand.Intn(entries - 1) len := rand.Intn(entries-from) + 1 - iter, err := chunk.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.BACKWARD, labels.Labels{}, nil, logql.NoopLabelParser) + iter, err := chunk.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.BACKWARD, labels.Labels{}, logql.NoopPipeline) require.NoError(t, err) testIteratorBackward(t, iter, int64(from), int64(from+len)) _ = iter.Close() diff --git a/pkg/ingester/flush_test.go b/pkg/ingester/flush_test.go index 6563ec60cabc5..986b6ef1dbc13 100644 --- a/pkg/ingester/flush_test.go +++ b/pkg/ingester/flush_test.go @@ -320,7 +320,7 @@ func (s *testStore) getChunksForUser(userID string) []chunk.Chunk { } func buildStreamsFromChunk(t *testing.T, lbs string, chk chunkenc.Chunk) logproto.Stream { - it, err := chk.Iterator(context.TODO(), time.Unix(0, 0), time.Unix(1000, 0), logproto.FORWARD, labels.Labels{}, nil, logql.NoopLabelParser) + it, err := chk.Iterator(context.TODO(), time.Unix(0, 0), time.Unix(1000, 0), logproto.FORWARD, labels.Labels{}, logql.NoopPipeline) require.NoError(t, err) stream := logproto.Stream{ diff --git a/pkg/ingester/instance.go b/pkg/ingester/instance.go index 9022b05d61471..a873e8dc79ca5 100644 --- a/pkg/ingester/instance.go +++ b/pkg/ingester/instance.go @@ -200,11 +200,7 @@ func (i *instance) Query(ctx context.Context, req logql.SelectLogParams) ([]iter if err != nil { return nil, err } - filter, err := expr.Filter() - if err != nil { - return nil, err - } - parser, err := expr.Parser() + pipeline, err := expr.Pipeline() if err != nil { return nil, err } @@ -215,7 +211,7 @@ func (i *instance) Query(ctx context.Context, req logql.SelectLogParams) ([]iter expr.Matchers(), func(stream *stream) error { ingStats.TotalChunksMatched += int64(len(stream.chunks)) - iter, err := stream.Iterator(ctx, req.Start, req.End, req.Direction, filter, parser) + iter, err := stream.Iterator(ctx, req.Start, req.End, req.Direction, pipeline) if err != nil { return err } @@ -235,7 +231,7 @@ func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams if err != nil { return nil, err } - filter, err := expr.Selector().Filter() + pipeline, err := expr.Selector().Pipeline() if err != nil { return nil, err } @@ -244,18 +240,13 @@ func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams return nil, err } - parser, err := expr.Selector().Parser() - if err != nil { - return nil, err - } - ingStats := stats.GetIngesterData(ctx) var iters []iter.SampleIterator err = i.forMatchingStreams( expr.Selector().Matchers(), func(stream *stream) error { ingStats.TotalChunksMatched += int64(len(stream.chunks)) - iter, err := stream.SampleIterator(ctx, req.Start, req.End, filter, extractor, parser) + iter, err := stream.SampleIterator(ctx, req.Start, req.End, pipeline, extractor) if err != nil { return err } diff --git a/pkg/ingester/stream.go b/pkg/ingester/stream.go index 151ccbbc79080..a1b1cd079d90b 100644 --- a/pkg/ingester/stream.go +++ b/pkg/ingester/stream.go @@ -184,7 +184,9 @@ func (s *stream) Push(ctx context.Context, entries []logproto.Entry, synchronize closedTailers = append(closedTailers, tailer.getID()) continue } - tailer.send(stream) + if err := tailer.send(stream); err != nil { + level.Error(util.WithContext(ctx, util.Logger)).Log("msg", "failed to send stream to tailer", "err", err) + } } s.tailerMtx.RUnlock() @@ -256,10 +258,10 @@ func (s *stream) cutChunkForSynchronization(entryTimestamp, prevEntryTimestamp t } // Returns an iterator. -func (s *stream) Iterator(ctx context.Context, from, through time.Time, direction logproto.Direction, filter logql.LineFilter, parser logql.LabelParser) (iter.EntryIterator, error) { +func (s *stream) Iterator(ctx context.Context, from, through time.Time, direction logproto.Direction, pipeline logql.Pipeline) (iter.EntryIterator, error) { iterators := make([]iter.EntryIterator, 0, len(s.chunks)) for _, c := range s.chunks { - itr, err := c.chunk.Iterator(ctx, from, through, direction, s.labels, filter, parser) + itr, err := c.chunk.Iterator(ctx, from, through, direction, s.labels, pipeline) if err != nil { return nil, err } @@ -278,10 +280,10 @@ func (s *stream) Iterator(ctx context.Context, from, through time.Time, directio } // Returns an SampleIterator. -func (s *stream) SampleIterator(ctx context.Context, from, through time.Time, filter logql.LineFilter, extractor logql.SampleExtractor, parser logql.LabelParser) (iter.SampleIterator, error) { +func (s *stream) SampleIterator(ctx context.Context, from, through time.Time, pipeline logql.Pipeline, extractor logql.SampleExtractor) (iter.SampleIterator, error) { iterators := make([]iter.SampleIterator, 0, len(s.chunks)) for _, c := range s.chunks { - if itr := c.chunk.SampleIterator(ctx, from, through, s.labels, filter, extractor, parser); itr != nil { + if itr := c.chunk.SampleIterator(ctx, from, through, s.labels, pipeline, extractor); itr != nil { iterators = append(iterators, itr) } } diff --git a/pkg/ingester/stream_test.go b/pkg/ingester/stream_test.go index bb3e83a4b65e6..ffaa422725e08 100644 --- a/pkg/ingester/stream_test.go +++ b/pkg/ingester/stream_test.go @@ -120,7 +120,7 @@ func TestStreamIterator(t *testing.T) { for i := 0; i < 100; i++ { from := rand.Intn(chunks*entries - 1) len := rand.Intn(chunks*entries-from) + 1 - iter, err := s.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.FORWARD, nil, logql.NoopLabelParser) + iter, err := s.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.FORWARD, logql.NoopPipeline) require.NotNil(t, iter) require.NoError(t, err) testIteratorForward(t, iter, int64(from), int64(from+len)) @@ -130,7 +130,7 @@ func TestStreamIterator(t *testing.T) { for i := 0; i < 100; i++ { from := rand.Intn(entries - 1) len := rand.Intn(chunks*entries-from) + 1 - iter, err := s.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.BACKWARD, nil, logql.NoopLabelParser) + iter, err := s.Iterator(context.TODO(), time.Unix(int64(from), 0), time.Unix(int64(from+len), 0), logproto.BACKWARD, logql.NoopPipeline) require.NotNil(t, iter) require.NoError(t, err) testIteratorBackward(t, iter, int64(from), int64(from+len)) diff --git a/pkg/ingester/tailer.go b/pkg/ingester/tailer.go index 647ce0822aab7..8c03c8fcc644e 100644 --- a/pkg/ingester/tailer.go +++ b/pkg/ingester/tailer.go @@ -22,7 +22,7 @@ type tailer struct { id uint32 orgID string matchers []*labels.Matcher - filter logql.LineFilter + pipeline logql.Pipeline expr logql.Expr sendChan chan *logproto.Stream @@ -44,7 +44,7 @@ func newTailer(orgID, query string, conn logproto.Querier_TailServer) (*tailer, if err != nil { return nil, err } - filter, err := expr.Filter() + pipeline, err := expr.Pipeline() if err != nil { return nil, err } @@ -53,7 +53,7 @@ func newTailer(orgID, query string, conn logproto.Querier_TailServer) (*tailer, return &tailer{ orgID: orgID, matchers: matchers, - filter: filter, + pipeline: pipeline, sendChan: make(chan *logproto.Stream, bufferSizeForTailResponse), conn: conn, droppedStreams: []*logproto.DroppedStream{}, @@ -103,47 +103,71 @@ func (t *tailer) loop() { } } -func (t *tailer) send(stream logproto.Stream) { +func (t *tailer) send(stream logproto.Stream) error { if t.isClosed() { - return + return nil } // if we are already dropping streams due to blocked connection, drop new streams directly to save some effort if blockedSince := t.blockedSince(); blockedSince != nil { if blockedSince.Before(time.Now().Add(-time.Second * 15)) { t.close() - return + return nil } t.dropStream(stream) - return + return nil } - t.filterEntriesInStream(&stream) - - if len(stream.Entries) == 0 { - return + streams, err := t.processStream(stream) + if err != nil { + return err } - - select { - case t.sendChan <- &stream: - default: - t.dropStream(stream) + if len(streams) == 0 { + return nil + } + for _, s := range streams { + select { + case t.sendChan <- &logproto.Stream{Labels: s.Labels, Entries: s.Entries}: + default: + t.dropStream(s) + } } + return nil } -func (t *tailer) filterEntriesInStream(stream *logproto.Stream) { +func (t *tailer) processStream(stream logproto.Stream) ([]logproto.Stream, error) { // Optimization: skip filtering entirely, if no filter is set - if t.filter == nil { - return + if t.pipeline == logql.NoopPipeline { + return []logproto.Stream{stream}, nil + } + streams := map[uint64]*logproto.Stream{} + lbs, err := util.ParseLabels(stream.Labels) + if err != nil { + return nil, err } - - var filteredEntries []logproto.Entry for _, e := range stream.Entries { - if t.filter.Filter([]byte(e.Line)) { - filteredEntries = append(filteredEntries, e) + newLine, parsedLbs, ok := t.pipeline.Process([]byte(e.Line), lbs) + if !ok { + continue } + var stream *logproto.Stream + lhash := parsedLbs.Hash() + if stream, ok = streams[lhash]; !ok { + stream = &logproto.Stream{ + Labels: parsedLbs.String(), + } + streams[lhash] = stream + } + stream.Entries = append(stream.Entries, logproto.Entry{ + Timestamp: e.Timestamp, + Line: string(newLine), + }) + } + streamsResult := make([]logproto.Stream, 0, len(streams)) + for _, stream := range streams { + streamsResult = append(streamsResult, *stream) } - stream.Entries = filteredEntries + return streamsResult, nil } // Returns true if tailer is interested in the passed labelset diff --git a/pkg/ingester/transfer_test.go b/pkg/ingester/transfer_test.go index a00350fde7f7a..694bb38876bfb 100644 --- a/pkg/ingester/transfer_test.go +++ b/pkg/ingester/transfer_test.go @@ -95,8 +95,7 @@ func TestTransferOut(t *testing.T) { time.Unix(0, 0), time.Unix(10, 0), logproto.FORWARD, - logql.LineFilterFunc(func([]byte) bool { return true }), - logql.NoopLabelParser, + logql.NoopPipeline, ) if !assert.NoError(t, err) { continue diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index bde029f329801..a1e56bd573e2c 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -76,6 +76,7 @@ type Querier interface { type LogSelectorExpr interface { Matchers() []*labels.Matcher PipelineExpr + HasFilter() bool Expr } @@ -194,6 +195,10 @@ func (e *matchersExpr) Pipeline() (Pipeline, error) { return NoopPipeline, nil } +func (e *matchersExpr) HasFilter() bool { + return false +} + type pipelineExpr struct { pipeline MultiPipelineExpr left *matchersExpr @@ -223,6 +228,18 @@ func (e *pipelineExpr) Pipeline() (Pipeline, error) { return e.pipeline.Pipeline() } +func (e *pipelineExpr) HasFilter() bool { + for _, p := range e.pipeline { + switch p.(type) { + case *lineFilterExpr, *labelFilterExpr: + return true + default: + continue + } + } + return false +} + type lineFilterExpr struct { left *lineFilterExpr ty labels.MatchType @@ -238,6 +255,21 @@ func newLineFilterExpr(left *lineFilterExpr, ty labels.MatchType, match string) } } +// AddFilterExpr adds a filter expression to a logselector expression. +func AddFilterExpr(expr LogSelectorExpr, ty labels.MatchType, match string) (LogSelectorExpr, error) { + filter := newLineFilterExpr(nil, ty, match) + switch e := expr.(type) { + case *matchersExpr: + return newPipelineExpr(e, MultiPipelineExpr{filter}), nil + case *pipelineExpr: + e.pipeline = append(e.pipeline, filter) + return e, nil + default: + return nil, fmt.Errorf("unknown LogSelector: %v+", expr) + } + +} + func (e *lineFilterExpr) String() string { var sb strings.Builder if e.left != nil { @@ -809,6 +841,7 @@ func (e *literalExpr) String() string { // to facilitate sum types. We'll be type switching when evaluating them anyways // and they will only be present in binary operation legs. func (e *literalExpr) Selector() LogSelectorExpr { return e } +func (e *literalExpr) HasFilter() bool { return false } func (e *literalExpr) Operations() []string { return nil } func (e *literalExpr) Pipeline() (Pipeline, error) { return NoopPipeline, nil } func (e *literalExpr) Matchers() []*labels.Matcher { return nil } diff --git a/pkg/querier/http.go b/pkg/querier/http.go index 6290429a1992a..d9da166c26086 100644 --- a/pkg/querier/http.go +++ b/pkg/querier/http.go @@ -336,7 +336,11 @@ func parseRegexQuery(httpRequest *http.Request) (string, error) { if err != nil { return "", err } - query = logql.NewFilterExpr(expr, labels.MatchRegexp, regexp).String() + newExpr, err := logql.AddFilterExpr(expr, labels.MatchRegexp, regexp) + if err != nil { + return "", err + } + query = newExpr.String() } return query, nil } diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index f7b0b9ecf91b6..97054dafce120 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -122,14 +122,14 @@ func (r roundTripper) RoundTrip(req *http.Request) (*http.Response, error) { case logql.SampleExpr: return r.metric.RoundTrip(req) case logql.LogSelectorExpr: - filter, err := transformRegexQuery(req, e).Filter() + expr, err := transformRegexQuery(req, e) if err != nil { return nil, httpgrpc.Errorf(http.StatusBadRequest, err.Error()) } if err := validateLimits(req, rangeQuery.Limit, r.limits); err != nil { return nil, err } - if filter == nil { + if !expr.HasFilter() { return r.next.RoundTrip(req) } return r.log.RoundTrip(req) @@ -155,18 +155,21 @@ func (r roundTripper) RoundTrip(req *http.Request) (*http.Response, error) { } // transformRegexQuery backport the old regexp params into the v1 query format -func transformRegexQuery(req *http.Request, expr logql.LogSelectorExpr) logql.LogSelectorExpr { +func transformRegexQuery(req *http.Request, expr logql.LogSelectorExpr) (logql.LogSelectorExpr, error) { regexp := req.Form.Get("regexp") if regexp != "" { - expr = logql.NewFilterExpr(expr, labels.MatchRegexp, regexp) + filterExpr, err := logql.AddFilterExpr(expr, labels.MatchRegexp, regexp) + if err != nil { + return nil, err + } params := req.URL.Query() - params.Set("query", expr.String()) + params.Set("query", filterExpr.String()) req.URL.RawQuery = params.Encode() // force the form and query to be parsed again. req.Form = nil req.PostForm = nil } - return expr + return expr, nil } // validates log entries limits diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index 5b3a242ef1560..4a7920022e424 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -350,8 +350,7 @@ type logBatchIterator struct { ctx context.Context metrics *ChunkMetrics matchers []*labels.Matcher - filter logql.LineFilter - parser logql.LabelParser + pipeline logql.Pipeline } func newLogBatchIterator( @@ -360,8 +359,7 @@ func newLogBatchIterator( chunks []*LazyChunk, batchSize int, matchers []*labels.Matcher, - filter logql.LineFilter, - parser logql.LabelParser, + pipeline logql.Pipeline, direction logproto.Direction, start, end time.Time, ) (iter.EntryIterator, error) { @@ -371,8 +369,7 @@ func newLogBatchIterator( matchers = removeMatchersByName(matchers, labels.MetricName, astmapper.ShardLabel) logbatch := &logBatchIterator{ matchers: matchers, - filter: filter, - parser: parser, + pipeline: pipeline, metrics: metrics, ctx: ctx, } @@ -428,7 +425,7 @@ func (it *logBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, through if !chks[i][j].IsValid { continue } - iterator, err := chks[i][j].Iterator(it.ctx, from, through, it.direction, it.filter, it.parser, nextChunk) + iterator, err := chks[i][j].Iterator(it.ctx, from, through, it.direction, it.pipeline, nextChunk) if err != nil { return nil, err } @@ -451,8 +448,7 @@ type sampleBatchIterator struct { ctx context.Context metrics *ChunkMetrics matchers []*labels.Matcher - filter logql.LineFilter - parser logql.LabelParser + pipeline logql.Pipeline extractor logql.SampleExtractor } @@ -462,8 +458,7 @@ func newSampleBatchIterator( chunks []*LazyChunk, batchSize int, matchers []*labels.Matcher, - filter logql.LineFilter, - parser logql.LabelParser, + pipeline logql.Pipeline, extractor logql.SampleExtractor, start, end time.Time, ) (iter.SampleIterator, error) { @@ -474,8 +469,7 @@ func newSampleBatchIterator( samplebatch := &sampleBatchIterator{ matchers: matchers, - filter: filter, - parser: parser, + pipeline: pipeline, extractor: extractor, metrics: metrics, ctx: ctx, @@ -529,7 +523,7 @@ func (it *sampleBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, thro if !chks[i][j].IsValid { continue } - iterator, err := chks[i][j].SampleIterator(it.ctx, from, through, it.filter, it.extractor, it.parser, nextChunk) + iterator, err := chks[i][j].SampleIterator(it.ctx, from, through, it.pipeline, it.extractor, nextChunk) if err != nil { return nil, err } diff --git a/pkg/storage/batch_test.go b/pkg/storage/batch_test.go index 095c60a95a803..485b981fd89e1 100644 --- a/pkg/storage/batch_test.go +++ b/pkg/storage/batch_test.go @@ -956,7 +956,7 @@ func Test_newLogBatchChunkIterator(t *testing.T) { for name, tt := range tests { tt := tt t.Run(name, func(t *testing.T) { - it, err := newLogBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), nil, logql.NoopLabelParser, tt.direction, tt.start, tt.end) + it, err := newLogBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), logql.NoopPipeline, tt.direction, tt.start, tt.end) require.NoError(t, err) streams, _, err := iter.ReadBatch(it, 1000) _ = it.Close() @@ -1241,7 +1241,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { for name, tt := range tests { tt := tt t.Run(name, func(t *testing.T) { - it, err := newSampleBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), nil, logql.NoopLabelParser, logql.ExtractCount, tt.start, tt.end) + it, err := newSampleBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), logql.NoopPipeline, logql.ExtractCount, tt.start, tt.end) require.NoError(t, err) series, _, err := iter.ReadSampleBatch(it, 1000) _ = it.Close() @@ -1448,8 +1448,8 @@ func TestBuildHeapIterator(t *testing.T) { batchChunkIterator: &batchChunkIterator{ direction: logproto.FORWARD, }, - ctx: ctx, - parser: logql.NoopLabelParser, + ctx: ctx, + pipeline: logql.NoopPipeline, } it, err := b.buildHeapIterator(tc.input, from, from.Add(6*time.Millisecond), nil) if err != nil { diff --git a/pkg/storage/lazy_chunk.go b/pkg/storage/lazy_chunk.go index 2a96f6063b241..6683f46f1f81b 100644 --- a/pkg/storage/lazy_chunk.go +++ b/pkg/storage/lazy_chunk.go @@ -33,8 +33,7 @@ func (c *LazyChunk) Iterator( ctx context.Context, from, through time.Time, direction logproto.Direction, - filter logql.LineFilter, - parser logql.LabelParser, + pipeline logql.Pipeline, nextChunk *LazyChunk, ) (iter.EntryIterator, error) { @@ -61,7 +60,7 @@ func (c *LazyChunk) Iterator( // if the block is overlapping cache it with the next chunk boundaries. if nextChunk != nil && IsBlockOverlapping(b, nextChunk, direction) { // todo(cyriltovena) we can avoid to drop the metric name for each chunks since many chunks have the same metric/labelset. - it := newCachedIterator(b.Iterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), filter, parser), b.Entries()) + it := newCachedIterator(b.Iterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), pipeline), b.Entries()) its = append(its, it) if c.overlappingBlocks == nil { c.overlappingBlocks = make(map[int]*cachedIterator) @@ -73,7 +72,7 @@ func (c *LazyChunk) Iterator( delete(c.overlappingBlocks, b.Offset()) } // non-overlapping block with the next chunk are not cached. - its = append(its, b.Iterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), filter, parser)) + its = append(its, b.Iterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), pipeline)) } // build the final iterator bound to the requested time range. @@ -96,9 +95,8 @@ func (c *LazyChunk) Iterator( func (c *LazyChunk) SampleIterator( ctx context.Context, from, through time.Time, - filter logql.LineFilter, + pipeline logql.Pipeline, extractor logql.SampleExtractor, - parser logql.LabelParser, nextChunk *LazyChunk, ) (iter.SampleIterator, error) { @@ -125,7 +123,7 @@ func (c *LazyChunk) SampleIterator( // if the block is overlapping cache it with the next chunk boundaries. if nextChunk != nil && IsBlockOverlapping(b, nextChunk, logproto.FORWARD) { // todo(cyriltovena) we can avoid to drop the metric name for each chunks since many chunks have the same metric/labelset. - it := newCachedSampleIterator(b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), filter, extractor, parser), b.Entries()) + it := newCachedSampleIterator(b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), pipeline, extractor), b.Entries()) its = append(its, it) if c.overlappingSampleBlocks == nil { c.overlappingSampleBlocks = make(map[int]*cachedSampleIterator) @@ -137,7 +135,7 @@ func (c *LazyChunk) SampleIterator( delete(c.overlappingSampleBlocks, b.Offset()) } // non-overlapping block with the next chunk are not cached. - its = append(its, b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), filter, extractor, parser)) + its = append(its, b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), pipeline, extractor)) } // build the final iterator bound to the requested time range. diff --git a/pkg/storage/lazy_chunk_test.go b/pkg/storage/lazy_chunk_test.go index 0ee570e49b85b..1ec863bbc3a42 100644 --- a/pkg/storage/lazy_chunk_test.go +++ b/pkg/storage/lazy_chunk_test.go @@ -46,7 +46,7 @@ func TestLazyChunkIterator(t *testing.T) { }, } { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { - it, err := tc.chunk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(1000, 0), logproto.FORWARD, logql.TrueFilter, logql.NoopLabelParser, nil) + it, err := tc.chunk.Iterator(context.Background(), time.Unix(0, 0), time.Unix(1000, 0), logproto.FORWARD, logql.NoopPipeline, nil) require.Nil(t, err) streams, _, err := iter.ReadBatch(it, 1000) require.Nil(t, err) @@ -174,10 +174,10 @@ func (fakeBlock) Entries() int { return 0 } func (fakeBlock) Offset() int { return 0 } func (f fakeBlock) MinTime() int64 { return f.mint } func (f fakeBlock) MaxTime() int64 { return f.maxt } -func (fakeBlock) Iterator(context.Context, labels.Labels, logql.LineFilter, logql.LabelParser) iter.EntryIterator { +func (fakeBlock) Iterator(context.Context, labels.Labels, logql.Pipeline) iter.EntryIterator { return nil } -func (fakeBlock) SampleIterator(context.Context, labels.Labels, logql.LineFilter, logql.SampleExtractor, logql.LabelParser) iter.SampleIterator { +func (fakeBlock) SampleIterator(context.Context, labels.Labels, logql.Pipeline, logql.SampleExtractor) iter.SampleIterator { return nil } diff --git a/pkg/storage/store.go b/pkg/storage/store.go index 91760147650e9..72a07701ce5f2 100644 --- a/pkg/storage/store.go +++ b/pkg/storage/store.go @@ -108,28 +108,23 @@ func NewTableClient(name string, cfg Config) (chunk.TableClient, error) { // decodeReq sanitizes an incoming request, rounds bounds, appends the __name__ matcher, // and adds the "__cortex_shard__" label if this is a sharded query. // todo(cyriltovena) refactor this. -func decodeReq(req logql.QueryParams) ([]*labels.Matcher, logql.LineFilter, logql.LabelParser, model.Time, model.Time, error) { +func decodeReq(req logql.QueryParams) ([]*labels.Matcher, model.Time, model.Time, error) { expr, err := req.LogSelector() if err != nil { - return nil, nil, nil, 0, 0, err - } - - filter, err := expr.Filter() - if err != nil { - return nil, nil, nil, 0, 0, err + return nil, 0, 0, err } matchers := expr.Matchers() nameLabelMatcher, err := labels.NewMatcher(labels.MatchEqual, labels.MetricName, "logs") if err != nil { - return nil, nil, nil, 0, 0, err + return nil, 0, 0, err } matchers = append(matchers, nameLabelMatcher) if shards := req.GetShards(); shards != nil { parsed, err := logql.ParseShards(shards) if err != nil { - return nil, nil, nil, 0, 0, err + return nil, 0, 0, err } for _, s := range parsed { shardMatcher, err := labels.NewMatcher( @@ -138,7 +133,7 @@ func decodeReq(req logql.QueryParams) ([]*labels.Matcher, logql.LineFilter, logq s.String(), ) if err != nil { - return nil, nil, nil, 0, 0, err + return nil, 0, 0, err } matchers = append(matchers, shardMatcher) @@ -148,13 +143,9 @@ func decodeReq(req logql.QueryParams) ([]*labels.Matcher, logql.LineFilter, logq break // nolint:staticcheck } } - p, err := expr.Parser() - if err != nil { - return nil, nil, nil, 0, 0, err - } from, through := util.RoundToMilliseconds(req.GetStart(), req.GetEnd()) - return matchers, filter, p, from, through, nil + return matchers, from, through, nil } // lazyChunks is an internal function used to resolve a set of lazy chunks from the store without actually loading them. It's used internally by `LazyQuery` and `GetSeries` @@ -208,7 +199,7 @@ func (s *store) GetSeries(ctx context.Context, req logql.SelectLogParams) ([]log matchers = []*labels.Matcher{nameLabelMatcher} } else { var err error - matchers, _, _, from, through, err = decodeReq(req) + matchers, from, through, err = decodeReq(req) if err != nil { return nil, err } @@ -276,7 +267,7 @@ func (s *store) GetSeries(ctx context.Context, req logql.SelectLogParams) ([]log // SelectLogs returns an iterator that will query the store for more chunks while iterating instead of fetching all chunks upfront // for that request. func (s *store) SelectLogs(ctx context.Context, req logql.SelectLogParams) (iter.EntryIterator, error) { - matchers, filter, parser, from, through, err := decodeReq(req) + matchers, from, through, err := decodeReq(req) if err != nil { return nil, err } @@ -286,16 +277,26 @@ func (s *store) SelectLogs(ctx context.Context, req logql.SelectLogParams) (iter return nil, err } + expr, err := req.LogSelector() + if err != nil { + return nil, err + } + + pipeline, err := expr.Pipeline() + if err != nil { + return nil, err + } + if len(lazyChunks) == 0 { return iter.NoopIterator, nil } - return newLogBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, filter, parser, req.Direction, req.Start, req.End) + return newLogBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, pipeline, req.Direction, req.Start, req.End) } func (s *store) SelectSamples(ctx context.Context, req logql.SelectSampleParams) (iter.SampleIterator, error) { - matchers, filter, parser, from, through, err := decodeReq(req) + matchers, from, through, err := decodeReq(req) if err != nil { return nil, err } @@ -305,6 +306,11 @@ func (s *store) SelectSamples(ctx context.Context, req logql.SelectSampleParams) return nil, err } + pipeline, err := expr.Selector().Pipeline() + if err != nil { + return nil, err + } + extractor, err := expr.Extractor() if err != nil { return nil, err @@ -318,7 +324,7 @@ func (s *store) SelectSamples(ctx context.Context, req logql.SelectSampleParams) if len(lazyChunks) == 0 { return iter.NoopIterator, nil } - return newSampleBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, filter, parser, extractor, req.Start, req.End) + return newSampleBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, pipeline, extractor, req.Start, req.End) } func (s *store) GetSchemaConfigs() []chunk.PeriodConfig { diff --git a/pkg/storage/store_test.go b/pkg/storage/store_test.go index 615490279ad84..953af2d3ff348 100644 --- a/pkg/storage/store_test.go +++ b/pkg/storage/store_test.go @@ -728,7 +728,7 @@ func Test_store_decodeReq_Matchers(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - ms, _, _, _, _, err := decodeReq(logql.SelectLogParams{QueryRequest: tt.req}) + ms, _, _, err := decodeReq(logql.SelectLogParams{QueryRequest: tt.req}) if err != nil { t.Errorf("store.GetSeries() error = %v", err) return diff --git a/pkg/util/conv.go b/pkg/util/conv.go index 952989659a7ad..0fec5e392ad3f 100644 --- a/pkg/util/conv.go +++ b/pkg/util/conv.go @@ -37,6 +37,24 @@ func ToClientLabels(labels string) ([]client.LabelAdapter, error) { return result, nil } +// ParseLabels parses labels from a string using logql parser. +func ParseLabels(lbs string) (labels.Labels, error) { + matchers, err := logql.ParseMatchers(lbs) + if err != nil { + return nil, err + } + + result := make(labels.Labels, 0, len(matchers)) + for _, m := range matchers { + result = append(result, labels.Label{ + Name: m.Name, + Value: m.Value, + }) + } + sort.Sort(result) + return result, nil +} + // ModelLabelSetToMap convert a model.LabelSet to a map[string]string func ModelLabelSetToMap(m model.LabelSet) map[string]string { if len(m) == 0 { From 31c26c035930a959bb096e9234f508dcfbd135b0 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 1 Oct 2020 22:28:30 +0200 Subject: [PATCH 20/45] Correctly setup sharding for logqlv2 Signed-off-by: Cyril Tovena --- pkg/logql/shardmapper.go | 27 ++++++++++++++++++++++++++- pkg/logql/shardmapper_test.go | 16 ++++++++++++++++ 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/pkg/logql/shardmapper.go b/pkg/logql/shardmapper.go index 19c74407f8ca1..eef0b9e4aa2e7 100644 --- a/pkg/logql/shardmapper.go +++ b/pkg/logql/shardmapper.go @@ -129,7 +129,6 @@ func (m ShardMapper) Map(expr Expr, r *shardRecorder) (Expr, error) { switch e := expr.(type) { case *literalExpr: return e, nil - //todo(cyriltovena) enable sharding on logqlv2 case *matchersExpr, *pipelineExpr: return m.mapLogSelectorExpr(e.(LogSelectorExpr), r), nil case *vectorAggregationExpr: @@ -279,6 +278,13 @@ func (m ShardMapper) mapVectorAggregationExpr(expr *vectorAggregationExpr, r *sh } func (m ShardMapper) mapRangeAggregationExpr(expr *rangeAggregationExpr, r *shardRecorder) SampleExpr { + if hasLabelModifier(expr) { + // if an expr can modify labels this means multiple shards can returns the same labelset. + // When this happens the merge strategy needs to be different than a simple concatenation. + // For instance for rates we need to sum data from different shards but same series. + // Since we currently support only concatenation as merge strategy, we skip those queries. + return expr + } switch expr.operation { case OpRangeTypeCount, OpRangeTypeRate, OpRangeTypeBytesRate, OpRangeTypeBytes: // count_over_time(x) -> count_over_time(x, shard=1) ++ count_over_time(x, shard=2)... @@ -290,6 +296,22 @@ func (m ShardMapper) mapRangeAggregationExpr(expr *rangeAggregationExpr, r *shar } } +// hasLabelModifier tells if an expression contains pipelines that can modify stream labels +// parsers introduce new labels but does not alter original one for instance. +func hasLabelModifier(expr *rangeAggregationExpr) bool { + switch ex := expr.left.left.(type) { + case *matchersExpr: + return false + case *pipelineExpr: + for _, p := range ex.pipeline { + if _, ok := p.(*labelFmtExpr); ok { + return true + } + } + } + return false +} + // isShardable returns false if any of the listed operation types are not shardable and true otherwise func isShardable(ops []string) bool { for _, op := range ops { @@ -329,6 +351,9 @@ var shardableOps = map[string]bool{ OpRangeTypeRate: true, OpRangeTypeBytes: true, OpRangeTypeBytesRate: true, + OpRangeTypeSum: true, + OpRangeTypeMax: true, + OpRangeTypeMin: true, // binops - arith OpTypeAdd: true, diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index 59c8be1de5295..d4ad835997bf5 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -151,6 +151,14 @@ func TestMappingStrings(t *testing.T) { in: `sum(max(rate({foo="bar"}[5m])))`, out: `sum(max(downstream ++ downstream))`, }, + { + in: `sum(max(rate({foo="bar"} | json | label_format foo=bar [5m])))`, + out: `sum(max(rate({foo="bar"} | json | label_format foo=bar [5m])))`, + }, + { + in: `rate({foo="bar"} | json | label_format foo=bar [5m])`, + out: `rate({foo="bar"} | json | label_format foo=bar [5m])`, + }, { in: `{foo="bar"} |= "id=123"`, out: `downstream<{foo="bar"}|="id=123", shard=0_of_2> ++ downstream<{foo="bar"}|="id=123", shard=1_of_2>`, @@ -159,6 +167,14 @@ func TestMappingStrings(t *testing.T) { in: `sum by (cluster) (rate({foo="bar"} |= "id=123" [5m]))`, out: `sum by(cluster)(downstream ++ downstream)`, }, + { + in: `sum by (cluster) (sum_over_time({foo="bar"} |= "id=123" | logfmt | unwrap latency [5m]))`, + out: `sum by(cluster)(downstream ++ downstream)`, + }, + { + in: `sum by (cluster) (stddev_over_time({foo="bar"} |= "id=123" | logfmt | unwrap latency [5m]))`, + out: `sum by (cluster) (stddev_over_time({foo="bar"} |= "id=123" | logfmt | unwrap latency [5m]))`, + }, } { t.Run(tc.in, func(t *testing.T) { ast, err := ParseExpr(tc.in) From b5e11d0fce5344675eaaa1d508b4c601c0fa7f42 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 2 Oct 2020 14:42:24 +0200 Subject: [PATCH 21/45] Fixes precedences issue with label filters and add moar tests :v: --- pkg/logql/ast_test.go | 2 +- pkg/logql/labelfilter/filter.go | 2 + pkg/logql/labelfilter/filter_test.go | 136 +++++++++++++++++++++++++++ pkg/logql/parser_test.go | 94 ++++++++++++++++++ pkg/logql/shardmapper_test.go | 2 +- 5 files changed, 234 insertions(+), 2 deletions(-) create mode 100644 pkg/logql/labelfilter/filter_test.go diff --git a/pkg/logql/ast_test.go b/pkg/logql/ast_test.go index bfcda028d5db6..8282ca479c7f0 100644 --- a/pkg/logql/ast_test.go +++ b/pkg/logql/ast_test.go @@ -26,7 +26,7 @@ func Test_logSelectorExpr_String(t *testing.T) { {`{foo="bar", bar!="baz"} != "bip" !~ ".+bop" | json`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | logfmt`, true}, {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | regexp "(?Pfoo|bar)"`, true}, - {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | regexp "(?Pfoo|bar)" | foo<5.01 , bar>20ms or foo="bar" | line_format "blip{{.boop}}bap" | label_format foo=bar,bar="blip{{.blop}}"`, true}, + {`{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap" | regexp "(?Pfoo|bar)" | ( ( foo<5.01 , bar>20ms ) or foo="bar" ) | line_format "blip{{.boop}}bap" | label_format foo=bar,bar="blip{{.blop}}"`, true}, } for _, tt := range tests { diff --git a/pkg/logql/labelfilter/filter.go b/pkg/logql/labelfilter/filter.go index 3a71355ca58c5..84f120055e083 100644 --- a/pkg/logql/labelfilter/filter.go +++ b/pkg/logql/labelfilter/filter.go @@ -53,6 +53,7 @@ func (b *Binary) Filter(lbs labels.Labels) (bool, error) { func (b *Binary) String() string { var sb strings.Builder + sb.WriteString("( ") sb.WriteString(b.Left.String()) if b.and { sb.WriteString(" , ") @@ -60,5 +61,6 @@ func (b *Binary) String() string { sb.WriteString(" or ") } sb.WriteString(b.Right.String()) + sb.WriteString(" )") return sb.String() } diff --git a/pkg/logql/labelfilter/filter_test.go b/pkg/logql/labelfilter/filter_test.go new file mode 100644 index 0000000000000..2da0bf5cf77d6 --- /dev/null +++ b/pkg/logql/labelfilter/filter_test.go @@ -0,0 +1,136 @@ +package labelfilter + +import ( + "testing" + "time" + + "github.com/prometheus/prometheus/pkg/labels" + "github.com/stretchr/testify/require" +) + +func TestBinary_Filter(t *testing.T) { + + tests := []struct { + f *Binary + lbs labels.Labels + want bool + wantErr bool + }{ + { + NewAnd(NewNumeric(FilterEqual, "foo", 5), NewDuration(FilterEqual, "bar", 1*time.Second)), + labels.Labels{labels.Label{Name: "foo", Value: "5"}, labels.Label{Name: "bar", Value: "1s"}}, + true, + false, + }, + { + NewAnd( + NewNumeric(FilterEqual, "foo", 5), + NewDuration(FilterEqual, "bar", 1*time.Second), + ), + labels.Labels{labels.Label{Name: "foo", Value: "6"}, labels.Label{Name: "bar", Value: "1s"}}, + false, + false, + }, + { + NewAnd( + NewNumeric(FilterEqual, "foo", 5), + NewDuration(FilterEqual, "bar", 1*time.Second), + ), + labels.Labels{labels.Label{Name: "foo", Value: "5"}, labels.Label{Name: "bar", Value: "2s"}}, + false, + false, + }, + { + NewAnd( + NewString(labels.MustNewMatcher(labels.MatchEqual, "foo", "5")), + NewDuration(FilterEqual, "bar", 1*time.Second), + ), + labels.Labels{labels.Label{Name: "foo", Value: "5"}, labels.Label{Name: "bar", Value: "1s"}}, + true, + false, + }, + { + NewAnd( + NewString(labels.MustNewMatcher(labels.MatchEqual, "foo", "5")), + NewDuration(FilterEqual, "bar", 1*time.Second), + ), + labels.Labels{labels.Label{Name: "foo", Value: "6"}, labels.Label{Name: "bar", Value: "1s"}}, + false, + false, + }, + { + NewAnd( + NewOr( + NewDuration(FilterGreaterThan, "duration", 1*time.Second), + NewNumeric(FilterNotEqual, "status", 200), + ), + NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), + ), + labels.Labels{ + {Name: "duration", Value: "2s"}, + {Name: "status", Value: "200"}, + {Name: "method", Value: "GET"}, + }, + true, + false, + }, + { + NewAnd( + NewOr( + NewDuration(FilterGreaterThan, "duration", 1*time.Second), + NewNumeric(FilterNotEqual, "status", 200), + ), + NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), + ), + labels.Labels{ + {Name: "duration", Value: "2s"}, + {Name: "status", Value: "200"}, + {Name: "method", Value: "POST"}, + }, + false, + false, + }, + { + NewAnd( + NewOr( + NewDuration(FilterGreaterThan, "duration", 1*time.Second), + NewNumeric(FilterNotEqual, "status", 200), + ), + NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), + ), + labels.Labels{ + {Name: "duration", Value: "2s"}, + {Name: "status", Value: "500"}, + {Name: "method", Value: "POST"}, + }, + false, + false, + }, + { + NewAnd( + NewOr( + NewDuration(FilterGreaterThan, "duration", 3*time.Second), + NewNumeric(FilterNotEqual, "status", 200), + ), + NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), + ), + labels.Labels{ + {Name: "duration", Value: "2s"}, + {Name: "status", Value: "200"}, + {Name: "method", Value: "POST"}, + }, + false, + false, + }, + } + for _, tt := range tests { + t.Run(tt.f.String(), func(t *testing.T) { + got, err := tt.f.Filter(tt.lbs) + if (err != nil) != tt.wantErr { + t.Errorf("Binary.Filter() error = %v, wantErr %v", err, tt.wantErr) + return + } + require.Equal(t, got, tt.want, tt.lbs) + }) + } +} diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index f86e8b82794f5..6579282e5859a 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -944,6 +944,82 @@ func TestParse(t *testing.T) { }, }, }, + { + in: `{app="foo"} |= "bar" | json | (duration > 1s or status!= 200) and method!="POST"`, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewAnd( + labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThan, "duration", 1*time.Second), + labelfilter.NewNumeric(labelfilter.FilterNotEqual, "status", 200.0), + ), + labelfilter.NewString(mustNewMatcher(labels.MatchNotEqual, "method", "POST")), + ), + }, + }, + }, + }, + { + in: `{app="foo"} |= "bar" | json | ( status_code < 500 and status_code > 200) or latency >= 250ms `, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + ), + }, + }, + }, + }, + { + in: `{app="foo"} |= "bar" | json | ( status_code < 500 or status_code > 200) and latency >= 250ms `, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewAnd( + labelfilter.NewOr( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + ), + }, + }, + }, + }, + { + in: `{app="foo"} |= "bar" | json | status_code < 500 or status_code > 200 and latency >= 250ms `, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + ), + ), + }, + }, + }, + }, { in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) | foo="bar" buzz!="blip", blop=~"boop" or fuzz==5`, @@ -1460,3 +1536,21 @@ func TestIsParseError(t *testing.T) { }) } } + +func Test_PipelineCombined(t *testing.T) { + query := `{job="cortex-ops/query-frontend"} |= "logging.go" | logfmt | line_format "{{.msg}}" | regexp "(?P\\w+) (?P[\\w|/]+) \\((?P\\d+?)\\) (?P.*)" | (duration > 1s or status!=200) and method!="POST" | line_format "{{.duration}}|{{.method}}|{{.status}}"` + + expr, err := ParseLogSelector(query) + require.Nil(t, err) + + p, err := expr.Pipeline() + require.Nil(t, err) + + _, lbs, ok := p.Process([]byte(`level=debug ts=2020-10-02T10:10:42.092268913Z caller=logging.go:66 traceID=a9d4d8a928d8db1 msg="POST /api/prom/api/v1/query_range (200) 1.5s"`), labels.Labels{}) + require.False(t, ok) + require.Equal( + t, + labels.Labels{labels.Label{Name: "caller", Value: "logging.go:66"}, labels.Label{Name: "duration", Value: "1.5s"}, labels.Label{Name: "level", Value: "debug"}, labels.Label{Name: "method", Value: "POST"}, labels.Label{Name: "msg", Value: "POST /api/prom/api/v1/query_range (200) 1.5s"}, labels.Label{Name: "path", Value: "/api/prom/api/v1/query_range"}, labels.Label{Name: "status", Value: "200"}, labels.Label{Name: "traceID", Value: "a9d4d8a928d8db1"}, labels.Label{Name: "ts", Value: "2020-10-02T10:10:42.092268913Z"}}, + lbs, + ) +} diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index d4ad835997bf5..745b63300c6dd 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -133,7 +133,7 @@ func TestMappingStrings(t *testing.T) { }, { in: `{foo="bar"} |= "foo" |~ "bar" | json | latency >= 10s or foo<5 and bar="t" | line_format "b{{.blip}}"`, - out: `downstream<{foo="bar"} |="foo" |~"bar" | json | latency>=10s or foo<5,bar="t"| line_format "b{{.blip}}",shard=0_of_2>++downstream<{foo="bar"} |="foo" |~"bar" | json | latency>=10s or foo<5, bar="t" | line_format "b{{.blip}}",shard=1_of_2>`, + out: `downstream<{foo="bar"} |="foo" |~"bar" | json | (latency>=10s or (foo<5,bar="t"))| line_format "b{{.blip}}",shard=0_of_2>++downstream<{foo="bar"} |="foo" |~"bar" | json | (latency>=10s or (foo<5, bar="t")) | line_format "b{{.blip}}",shard=1_of_2>`, }, { in: `sum(rate({foo="bar"}[1m]))`, From 0fd6018545d3e5df736555a518b01175d1fba02c Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 2 Oct 2020 20:43:12 +0200 Subject: [PATCH 22/45] Adds quantile_over_time, grouping for non associate range aggregation parsing and moar tests --- pkg/logql/ast.go | 45 ++- pkg/logql/expr.y | 9 +- pkg/logql/expr.y.go | 715 ++++++++++++++++++++------------------- pkg/logql/functions.go | 43 +++ pkg/logql/lex.go | 1 + pkg/logql/parser_test.go | 292 ++++++++++++++-- 6 files changed, 728 insertions(+), 377 deletions(-) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index a1e56bd573e2c..baa4d3e8e6ed7 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -594,13 +594,35 @@ type SampleExpr interface { type rangeAggregationExpr struct { left *logRange operation string + + params *float64 + grouping *grouping implicit } -func newRangeAggregationExpr(left *logRange, operation string) SampleExpr { +func newRangeAggregationExpr(left *logRange, operation string, gr *grouping, stringParams *string) SampleExpr { + var params *float64 + if stringParams != nil { + if operation != OpRangeTypeQuantile { + panic(newParseError(fmt.Sprintf("parameter %s not supported for operation %s", *stringParams, operation), 0, 0)) + } + var err error + params = new(float64) + *params, err = strconv.ParseFloat(*stringParams, 64) + if err != nil { + panic(newParseError(fmt.Sprintf("invalid parameter for operation %s: %s", operation, err), 0, 0)) + } + + } else { + if operation == OpRangeTypeQuantile { + panic(newParseError(fmt.Sprintf("parameter required for operation %s", operation), 0, 0)) + } + } e := &rangeAggregationExpr{ left: left, operation: operation, + grouping: gr, + params: params, } if err := e.validate(); err != nil { panic(newParseError(err.Error(), 0, 0)) @@ -613,6 +635,13 @@ func (e *rangeAggregationExpr) Selector() LogSelectorExpr { } func (e rangeAggregationExpr) validate() error { + if e.grouping != nil { + switch e.operation { + case OpRangeTypeAvg, OpRangeTypeStddev, OpRangeTypeStdvar, OpRangeTypeQuantile: + default: + return fmt.Errorf("grouping not allowed for %s aggregation", e.operation) + } + } if e.left.unwrap != nil { switch e.operation { case OpRangeTypeAvg, OpRangeTypeSum, OpRangeTypeMax, OpRangeTypeMin, OpRangeTypeStddev, OpRangeTypeStdvar, OpRangeTypeQuantile: @@ -631,7 +660,19 @@ func (e rangeAggregationExpr) validate() error { // impls Stringer func (e *rangeAggregationExpr) String() string { - return formatOperation(e.operation, nil, e.left.String()) + var sb strings.Builder + sb.WriteString(e.operation) + sb.WriteString("(") + if e.params != nil { + sb.WriteString(strconv.FormatFloat(*e.params, 'f', -1, 64)) + sb.WriteString(",") + } + sb.WriteString(e.left.String()) + sb.WriteString(")") + if e.grouping != nil { + sb.WriteString(e.grouping.String()) + } + return sb.String() } // impl SampleExpr diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 2e78eb5b31003..ba7418bea33c8 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -80,7 +80,7 @@ import ( %token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE SUM AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE LINE_FMT LABEL_FMT UNWRAP AVG_OVER_TIME SUM_OVER_TIME MIN_OVER_TIME - MAX_OVER_TIME STDVAR_OVER_TIME STDDEV_OVER_TIME + MAX_OVER_TIME STDVAR_OVER_TIME STDDEV_OVER_TIME QUANTILE_OVER_TIME // Operators are listed with increasing precedence. %left OR @@ -134,7 +134,11 @@ unwrapExpr: PIPE UNWRAP IDENTIFIER { $$ = newUnwrapExpr($3)}; rangeAggregationExpr: - rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($3,$1) } ; + rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($3, $1, nil, nil) } + | rangeOp OPEN_PARENTHESIS NUMBER COMMA logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($5, $1, nil, &$3) } + | rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS grouping { $$ = newRangeAggregationExpr($3, $1, $5, nil) } + | rangeOp OPEN_PARENTHESIS NUMBER COMMA logRangeExpr CLOSE_PARENTHESIS grouping { $$ = newRangeAggregationExpr($5, $1, $7, &$3) } + ; vectorAggregationExpr: // Aggregations with 1 argument. @@ -294,6 +298,7 @@ rangeOp: | MAX_OVER_TIME { $$ = OpRangeTypeMax } | STDVAR_OVER_TIME { $$ = OpRangeTypeStdvar } | STDDEV_OVER_TIME { $$ = OpRangeTypeStddev } + | QUANTILE_OVER_TIME { $$ = OpRangeTypeQuantile } ; diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index 05d97cc5e92a9..bd0e33c273d30 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -99,21 +99,22 @@ const MIN_OVER_TIME = 57391 const MAX_OVER_TIME = 57392 const STDVAR_OVER_TIME = 57393 const STDDEV_OVER_TIME = 57394 -const OR = 57395 -const AND = 57396 -const UNLESS = 57397 -const CMP_EQ = 57398 -const NEQ = 57399 -const LT = 57400 -const LTE = 57401 -const GT = 57402 -const GTE = 57403 -const ADD = 57404 -const SUB = 57405 -const MUL = 57406 -const DIV = 57407 -const MOD = 57408 -const POW = 57409 +const QUANTILE_OVER_TIME = 57395 +const OR = 57396 +const AND = 57397 +const UNLESS = 57398 +const CMP_EQ = 57399 +const NEQ = 57400 +const LT = 57401 +const LTE = 57402 +const GT = 57403 +const GTE = 57404 +const ADD = 57405 +const SUB = 57406 +const MUL = 57407 +const DIV = 57408 +const MOD = 57409 +const POW = 57410 var exprToknames = [...]string{ "$end", @@ -168,6 +169,7 @@ var exprToknames = [...]string{ "MAX_OVER_TIME", "STDVAR_OVER_TIME", "STDDEV_OVER_TIME", + "QUANTILE_OVER_TIME", "OR", "AND", "UNLESS", @@ -190,7 +192,7 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:309 +//line pkg/logql/expr.y:314 //line yacctab:1 var exprExca = [...]int{ @@ -201,167 +203,170 @@ var exprExca = [...]int{ const exprPrivate = 57344 -const exprLast = 363 +const exprLast = 375 var exprAct = [...]int{ - 166, 69, 149, 52, 4, 141, 51, 174, 5, 117, - 99, 60, 62, 2, 39, 40, 41, 42, 43, 44, - 44, 14, 65, 41, 42, 43, 44, 202, 227, 11, - 75, 58, 55, 224, 151, 115, 116, 6, 56, 57, - 209, 17, 18, 27, 28, 30, 31, 29, 32, 33, - 34, 35, 19, 20, 105, 90, 105, 113, 115, 116, - 93, 168, 21, 22, 23, 24, 25, 26, 143, 124, - 143, 171, 102, 120, 102, 59, 118, 15, 16, 157, - 152, 155, 156, 153, 154, 108, 91, 125, 107, 126, - 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, - 137, 138, 139, 114, 142, 144, 142, 123, 146, 45, - 46, 49, 50, 47, 48, 39, 40, 41, 42, 43, - 44, 210, 158, 58, 122, 167, 212, 173, 169, 170, - 56, 57, 176, 36, 37, 38, 45, 46, 49, 50, - 47, 48, 39, 40, 41, 42, 43, 44, 177, 178, - 179, 37, 38, 45, 46, 49, 50, 47, 48, 39, - 40, 41, 42, 43, 44, 67, 197, 59, 201, 181, - 204, 90, 198, 207, 93, 200, 205, 208, 58, 68, - 58, 70, 71, 218, 213, 56, 57, 56, 57, 199, - 222, 210, 70, 71, 105, 163, 211, 11, 230, 217, - 105, 112, 90, 172, 221, 119, 223, 121, 54, 90, - 168, 182, 102, 232, 143, 11, 206, 229, 102, 228, - 163, 226, 59, 6, 59, 220, 231, 17, 18, 27, - 28, 30, 31, 29, 32, 33, 34, 35, 19, 20, - 74, 164, 110, 160, 186, 185, 162, 218, 21, 22, - 23, 24, 25, 26, 199, 109, 73, 165, 111, 215, - 216, 200, 58, 15, 16, 72, 58, 196, 195, 56, - 57, 161, 203, 56, 57, 159, 184, 183, 76, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 168, 160, 3, 165, 168, 105, 225, 105, - 58, 61, 194, 193, 192, 191, 59, 56, 57, 159, - 59, 143, 214, 105, 150, 102, 180, 102, 190, 189, - 188, 187, 147, 145, 140, 106, 64, 219, 66, 175, - 168, 102, 66, 150, 148, 96, 98, 97, 95, 103, - 104, 202, 94, 53, 59, 100, 144, 142, 101, 96, - 98, 97, 92, 103, 104, 10, 9, 13, 8, 12, - 7, 63, 1, + 70, 169, 53, 151, 4, 143, 177, 100, 45, 52, + 207, 61, 234, 5, 76, 118, 63, 2, 56, 66, + 230, 14, 40, 41, 42, 43, 44, 45, 214, 11, + 42, 43, 44, 45, 114, 116, 117, 6, 153, 116, + 117, 17, 18, 28, 29, 31, 32, 30, 33, 34, + 35, 36, 19, 20, 174, 91, 69, 106, 71, 72, + 109, 94, 21, 22, 23, 24, 25, 26, 27, 59, + 108, 145, 11, 92, 122, 103, 57, 58, 15, 16, + 121, 115, 120, 59, 159, 154, 157, 158, 155, 156, + 57, 58, 127, 126, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 144, 148, + 71, 72, 224, 171, 60, 125, 204, 215, 106, 215, + 160, 113, 217, 184, 216, 124, 185, 237, 60, 176, + 170, 227, 145, 179, 68, 172, 103, 173, 46, 47, + 50, 51, 48, 49, 40, 41, 42, 43, 44, 45, + 180, 181, 182, 38, 39, 46, 47, 50, 51, 48, + 49, 40, 41, 42, 43, 44, 45, 200, 146, 144, + 202, 175, 206, 91, 209, 212, 167, 94, 203, 59, + 213, 120, 210, 201, 239, 236, 57, 58, 218, 37, + 38, 39, 46, 47, 50, 51, 48, 49, 40, 41, + 42, 43, 44, 45, 165, 223, 91, 235, 226, 55, + 119, 165, 229, 91, 123, 106, 199, 198, 11, 162, + 189, 188, 11, 232, 60, 222, 121, 165, 233, 145, + 6, 238, 211, 103, 17, 18, 28, 29, 31, 32, + 30, 33, 34, 35, 36, 19, 20, 75, 166, 106, + 111, 161, 187, 186, 164, 21, 22, 23, 24, 25, + 26, 27, 224, 110, 204, 205, 112, 103, 74, 168, + 59, 15, 16, 73, 59, 197, 196, 57, 58, 163, + 228, 57, 58, 219, 208, 152, 77, 78, 79, 80, + 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, + 171, 205, 106, 231, 171, 168, 59, 106, 195, 194, + 59, 193, 192, 57, 58, 60, 145, 57, 58, 60, + 103, 183, 3, 106, 150, 103, 191, 190, 162, 62, + 220, 221, 161, 149, 147, 142, 171, 107, 225, 178, + 171, 103, 67, 97, 99, 98, 152, 104, 105, 207, + 96, 60, 146, 144, 65, 60, 67, 95, 54, 97, + 99, 98, 101, 104, 105, 102, 93, 10, 9, 13, + 8, 12, 7, 64, 1, } var exprPact = [...]int{ - 15, -1000, 80, -1000, -1000, 165, 15, -1000, -1000, -1000, - -1000, 324, 143, 157, -1000, 259, 250, -1000, -1000, -1000, + 15, -1000, 135, -1000, -1000, 166, 15, -1000, -1000, -1000, + -1000, 352, 112, 34, -1000, 267, 262, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -9, -9, -9, -9, - -9, -9, -9, -9, -9, -9, -9, -9, -9, -9, - -9, 165, -1000, 110, 309, 320, -1000, -1000, -1000, -1000, - 65, 62, 80, 240, 186, -1000, 46, 183, 201, 102, - 85, 47, -1000, -1000, 15, -1000, 15, 15, 15, 15, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -25, -25, -25, + -25, -25, -25, -25, -25, -25, -25, -25, -25, -25, + -25, -25, 166, -1000, 56, 319, 332, -1000, -1000, -1000, + -1000, 47, 37, 135, 248, 106, -1000, 23, 204, 208, + 103, 93, 71, -1000, -1000, 15, -1000, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, - -1000, 319, -1000, 52, -1000, -1000, -1000, -1000, 318, -1000, - -1000, -1000, 190, 317, 329, 23, -1000, -1000, -1000, -1000, - -1000, 328, -1000, 304, 288, 266, 241, 218, 287, 183, - 48, 185, 15, 325, 325, 97, 53, 53, -41, -41, - -47, -47, -47, -47, -48, -48, -48, -48, -48, -48, - -1000, 52, 190, 190, 190, -1000, 293, -1000, 151, -1000, - 200, 270, 238, 314, 312, 298, 296, 261, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, 18, 246, 253, 295, 249, - 193, 168, 15, 17, 173, -1000, 103, 196, 52, 50, - -1000, 310, 255, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 18, -1000, - -1000, 239, 323, 217, 181, 167, -1000, -1000, 10, -1000, - 294, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -15, 211, 209, 175, 168, -1000, -1000, -19, -1000, -1000, - 205, -1000, -1000, + 15, -1000, 330, -1000, 114, -1000, -1000, -1000, -1000, 329, + -1000, -1000, -1000, 245, 328, 342, 27, -1000, -1000, -1000, + -1000, -1000, 338, -1000, 327, 323, 274, 249, 225, 158, + 297, 58, 31, 153, 15, 335, 335, 98, 81, 81, + -35, -35, -60, -60, -60, -60, -41, -41, -41, -41, + -41, -41, -1000, 114, 245, 245, 245, -1000, 298, -1000, + 105, -1000, 115, 246, 214, 320, 305, 302, 269, 210, + -1000, -1000, -1000, -1000, -1000, -1000, 86, 58, 70, 256, + 293, 303, 261, 209, 86, 15, 5, 101, -1000, 99, + 211, 114, 53, -1000, 281, 326, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, 202, -1000, 70, -1000, -1000, 254, 334, 200, 108, + 257, -1000, -1000, -3, -1000, 299, -1000, -1000, -1000, -1000, + -1000, -1000, 86, -1000, -1000, -1000, -31, 199, 177, 104, + 86, -1000, -1000, -1000, -36, -1000, -1000, 176, -1000, -1000, } var exprPgo = [...]int{ - 0, 362, 12, 32, 1, 7, 294, 4, 9, 10, - 361, 360, 359, 8, 358, 357, 356, 355, 240, 352, - 6, 3, 348, 345, 5, 343, 342, 338, 2, 334, - 0, + 0, 374, 16, 18, 0, 6, 322, 4, 15, 7, + 373, 372, 371, 13, 370, 369, 368, 367, 247, 366, + 9, 2, 365, 362, 5, 358, 357, 350, 3, 324, + 1, } var exprR1 = [...]int{ 0, 1, 2, 2, 7, 7, 7, 7, 7, 6, 6, 6, 8, 8, 8, 8, 8, 8, 8, 8, - 8, 8, 8, 8, 8, 8, 30, 11, 14, 14, - 14, 14, 14, 3, 3, 3, 3, 13, 13, 13, - 10, 10, 9, 9, 9, 9, 20, 20, 21, 21, - 21, 21, 21, 25, 25, 19, 19, 19, 26, 28, - 28, 29, 29, 29, 27, 24, 24, 24, 24, 24, - 24, 24, 24, 23, 23, 23, 23, 23, 23, 23, - 22, 22, 22, 22, 22, 22, 22, 16, 16, 16, + 8, 8, 8, 8, 8, 8, 30, 11, 11, 11, + 11, 14, 14, 14, 14, 14, 3, 3, 3, 3, + 13, 13, 13, 10, 10, 9, 9, 9, 9, 20, + 20, 21, 21, 21, 21, 21, 25, 25, 19, 19, + 19, 26, 28, 28, 29, 29, 29, 27, 24, 24, + 24, 24, 24, 24, 24, 24, 23, 23, 23, 23, + 23, 23, 23, 22, 22, 22, 22, 22, 22, 22, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 18, 18, 17, 17, 17, 15, 15, 15, - 15, 15, 15, 15, 15, 15, 12, 12, 12, 12, - 12, 12, 12, 12, 12, 12, 5, 5, 4, 4, + 16, 16, 16, 16, 16, 18, 18, 17, 17, 17, + 15, 15, 15, 15, 15, 15, 15, 15, 15, 12, + 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, + 5, 5, 4, 4, } var exprR2 = [...]int{ 0, 1, 1, 1, 1, 1, 1, 1, 3, 1, 2, 3, 2, 4, 3, 5, 3, 5, 3, 5, - 4, 6, 3, 4, 3, 2, 3, 4, 4, 5, - 5, 6, 7, 1, 1, 1, 1, 3, 3, 3, - 1, 3, 3, 3, 3, 3, 1, 2, 1, 2, - 2, 2, 2, 2, 3, 1, 1, 2, 2, 3, - 3, 1, 3, 3, 2, 1, 1, 1, 3, 2, + 4, 6, 3, 4, 3, 2, 3, 4, 6, 5, + 7, 4, 5, 5, 6, 7, 1, 1, 1, 1, + 3, 3, 3, 1, 3, 3, 3, 3, 3, 1, + 2, 1, 2, 2, 2, 2, 2, 3, 1, 1, + 2, 2, 3, 3, 1, 3, 3, 2, 1, 1, + 1, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 0, 1, 1, 2, 2, 1, 1, 1, + 4, 4, 4, 4, 4, 0, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 3, 4, 4, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 3, 4, 4, } var exprChk = [...]int{ -1000, -1, -2, -6, -7, -13, 22, -11, -14, -16, - -17, 14, -12, -15, 6, 62, 63, 26, 27, 37, - 38, 47, 48, 49, 50, 51, 52, 28, 29, 32, - 30, 31, 33, 34, 35, 36, 53, 54, 55, 62, - 63, 64, 65, 66, 67, 56, 57, 60, 61, 58, - 59, -20, -21, -25, 43, -3, 20, 21, 13, 57, - -7, -6, -2, -10, 2, -9, 4, 22, 22, -4, - 24, 25, 6, 6, -18, 39, -18, -18, -18, -18, + -17, 14, -12, -15, 6, 63, 64, 26, 27, 37, + 38, 47, 48, 49, 50, 51, 52, 53, 28, 29, + 32, 30, 31, 33, 34, 35, 36, 54, 55, 56, + 63, 64, 65, 66, 67, 68, 57, 58, 61, 62, + 59, 60, -20, -21, -25, 43, -3, 20, 21, 13, + 58, -7, -6, -2, -10, 2, -9, 4, 22, 22, + -4, 24, 25, 6, 6, -18, 39, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, - -21, -3, -19, -24, -26, -27, 40, 42, 41, -9, - -23, -22, 22, 44, 45, 4, 5, 23, 23, 15, - 2, 18, 15, 11, 57, 12, 13, -8, -13, 22, - -7, 6, 22, 22, 22, -2, -2, -2, -2, -2, + -18, -21, -3, -19, -24, -26, -27, 40, 42, 41, + -9, -23, -22, 22, 44, 45, 4, 5, 23, 23, + 15, 2, 18, 15, 11, 58, 12, 13, -8, 6, + -13, 22, -7, 6, 22, 22, 22, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - 5, -24, 54, 18, 53, 5, -24, 5, -29, -28, - 4, 11, 57, 60, 61, 58, 59, 56, -9, 5, - 5, 5, 5, 2, 23, 8, -30, -20, 43, -13, - -8, 23, 18, -7, -5, 4, -5, -24, -24, -24, - 23, 18, 11, 7, 6, 7, 6, 7, 6, 7, - 6, 7, 6, 7, 6, 7, 6, -30, -20, 8, - 8, -30, 46, 23, -30, -20, 23, -4, -7, 23, - 18, 23, 23, -28, 2, 4, 5, -30, 8, 4, - 8, 23, 23, -30, 23, 4, -30, 43, 8, 8, - 23, -4, 8, + -2, -2, 5, -24, 55, 18, 54, 5, -24, 5, + -29, -28, 4, 11, 58, 61, 62, 59, 60, 57, + -9, 5, 5, 5, 5, 2, 23, 18, 8, -30, + -20, 43, -13, -8, 23, 18, -7, -5, 4, -5, + -24, -24, -24, 23, 18, 11, 7, 6, 7, 6, + 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, + -4, -8, -30, -20, 8, 8, -30, 46, 23, -30, + -20, 23, -4, -7, 23, 18, 23, 23, -28, 2, + 4, 5, 23, -30, 8, 4, 8, 23, 23, -30, + 23, 4, -4, -30, 43, 8, 8, 23, -4, 8, } var exprDef = [...]int{ 0, -2, 1, 2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 104, 0, 0, 116, 117, 118, - 119, 120, 121, 122, 123, 124, 125, 107, 108, 109, - 110, 111, 112, 113, 114, 115, 102, 102, 102, 102, - 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, - 102, 10, 46, 48, 0, 0, 33, 34, 35, 36, - 3, 2, 0, 0, 0, 40, 0, 0, 0, 0, - 0, 0, 105, 106, 0, 103, 0, 0, 0, 0, + 7, 0, 0, 0, 107, 0, 0, 119, 120, 121, + 122, 123, 124, 125, 126, 127, 128, 129, 110, 111, + 112, 113, 114, 115, 116, 117, 118, 105, 105, 105, + 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, + 105, 105, 10, 49, 51, 0, 0, 36, 37, 38, + 39, 3, 2, 0, 0, 0, 43, 0, 0, 0, + 0, 0, 0, 108, 109, 0, 106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 47, 0, 49, 50, 51, 52, 55, 56, 0, 65, - 66, 67, 0, 0, 0, 0, 53, 8, 11, 37, - 38, 0, 39, 0, 0, 0, 0, 0, 0, 0, - 3, 104, 0, 0, 0, 87, 88, 89, 90, 91, - 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, - 54, 69, 0, 0, 0, 57, 0, 58, 64, 61, - 0, 0, 0, 0, 0, 0, 0, 0, 41, 42, - 43, 44, 45, 25, 27, 12, 0, 0, 0, 0, - 0, 28, 0, 3, 0, 126, 0, 70, 71, 72, - 68, 0, 0, 78, 85, 77, 84, 73, 80, 74, - 81, 75, 82, 76, 83, 79, 86, 14, 22, 16, - 18, 0, 0, 0, 0, 0, 24, 30, 3, 29, - 0, 128, 129, 62, 63, 59, 60, 23, 20, 26, - 13, 0, 0, 0, 31, 127, 15, 0, 17, 19, - 0, 32, 21, + 0, 50, 0, 52, 53, 54, 55, 58, 59, 0, + 68, 69, 70, 0, 0, 0, 0, 56, 8, 11, + 40, 41, 0, 42, 0, 0, 0, 0, 0, 0, + 0, 0, 3, 107, 0, 0, 0, 90, 91, 92, + 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, + 103, 104, 57, 72, 0, 0, 0, 60, 0, 61, + 67, 64, 0, 0, 0, 0, 0, 0, 0, 0, + 44, 45, 46, 47, 48, 25, 27, 0, 12, 0, + 0, 0, 0, 0, 31, 0, 3, 0, 130, 0, + 73, 74, 75, 71, 0, 0, 81, 88, 80, 87, + 76, 83, 77, 84, 78, 85, 79, 86, 82, 89, + 29, 0, 14, 22, 16, 18, 0, 0, 0, 0, + 0, 24, 33, 3, 32, 0, 132, 133, 65, 66, + 62, 63, 28, 23, 20, 26, 13, 0, 0, 0, + 34, 131, 30, 15, 0, 17, 19, 0, 35, 21, } var exprTok1 = [...]int{ @@ -375,7 +380,7 @@ var exprTok2 = [...]int{ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, + 62, 63, 64, 65, 66, 67, 68, } var exprTok3 = [...]int{ 0, @@ -872,610 +877,634 @@ exprdefault: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:137 { - exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp) + exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, nil, nil) } case 28: + exprDollar = exprS[exprpt-6 : exprpt+1] +//line pkg/logql/expr.y:138 + { + exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, nil, &exprDollar[3].str) + } + case 29: + exprDollar = exprS[exprpt-5 : exprpt+1] +//line pkg/logql/expr.y:139 + { + exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[5].Grouping, nil) + } + case 30: + exprDollar = exprS[exprpt-7 : exprpt+1] +//line pkg/logql/expr.y:140 + { + exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[7].Grouping, &exprDollar[3].str) + } + case 31: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:141 +//line pkg/logql/expr.y:145 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } - case 29: + case 32: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:142 +//line pkg/logql/expr.y:146 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } - case 30: + case 33: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:143 +//line pkg/logql/expr.y:147 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } - case 31: + case 34: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:145 +//line pkg/logql/expr.y:149 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } - case 32: + case 35: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:146 +//line pkg/logql/expr.y:150 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } - case 33: + case 36: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:150 +//line pkg/logql/expr.y:154 { exprVAL.Filter = labels.MatchRegexp } - case 34: + case 37: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:151 +//line pkg/logql/expr.y:155 { exprVAL.Filter = labels.MatchEqual } - case 35: + case 38: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:152 +//line pkg/logql/expr.y:156 { exprVAL.Filter = labels.MatchNotRegexp } - case 36: + case 39: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:153 +//line pkg/logql/expr.y:157 { exprVAL.Filter = labels.MatchNotEqual } - case 37: + case 40: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:157 +//line pkg/logql/expr.y:161 { exprVAL.Selector = exprDollar[2].Matchers } - case 38: + case 41: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:158 +//line pkg/logql/expr.y:162 { exprVAL.Selector = exprDollar[2].Matchers } - case 39: + case 42: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:159 +//line pkg/logql/expr.y:163 { } - case 40: + case 43: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:163 +//line pkg/logql/expr.y:167 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } - case 41: + case 44: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:164 +//line pkg/logql/expr.y:168 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } - case 42: + case 45: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:168 +//line pkg/logql/expr.y:172 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } - case 43: + case 46: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:169 +//line pkg/logql/expr.y:173 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } - case 44: + case 47: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:170 +//line pkg/logql/expr.y:174 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } - case 45: + case 48: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:171 +//line pkg/logql/expr.y:175 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } - case 46: + case 49: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:175 +//line pkg/logql/expr.y:179 { exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} } - case 47: + case 50: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:176 +//line pkg/logql/expr.y:180 { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } - case 48: + case 51: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:180 +//line pkg/logql/expr.y:184 { exprVAL.PipelineStage = exprDollar[1].LineFilters } - case 49: + case 52: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:181 +//line pkg/logql/expr.y:185 { exprVAL.PipelineStage = exprDollar[2].LabelParser } - case 50: + case 53: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:182 +//line pkg/logql/expr.y:186 { exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} } - case 51: + case 54: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:183 +//line pkg/logql/expr.y:187 { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } - case 52: + case 55: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:184 +//line pkg/logql/expr.y:188 { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } - case 53: + case 56: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:188 +//line pkg/logql/expr.y:192 { exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) } - case 54: + case 57: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:189 +//line pkg/logql/expr.y:193 { exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } - case 55: + case 58: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:192 +//line pkg/logql/expr.y:196 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } - case 56: + case 59: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:193 +//line pkg/logql/expr.y:197 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } - case 57: + case 60: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:194 +//line pkg/logql/expr.y:198 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } - case 58: + case 61: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:197 +//line pkg/logql/expr.y:201 { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } - case 59: + case 62: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:200 +//line pkg/logql/expr.y:204 { exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 60: + case 63: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:201 +//line pkg/logql/expr.y:205 { exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 61: + case 64: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:205 +//line pkg/logql/expr.y:209 { exprVAL.LabelsFormat = []labelFmt{exprDollar[1].LabelFormat} } - case 62: + case 65: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:206 +//line pkg/logql/expr.y:210 { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } - case 64: + case 67: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:210 +//line pkg/logql/expr.y:214 { exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } - case 65: + case 68: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:213 +//line pkg/logql/expr.y:217 { exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) } - case 66: + case 69: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:214 +//line pkg/logql/expr.y:218 { exprVAL.LabelFilter = exprDollar[1].DurationFilter } - case 67: + case 70: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:215 +//line pkg/logql/expr.y:219 { exprVAL.LabelFilter = exprDollar[1].NumberFilter } - case 68: + case 71: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:216 +//line pkg/logql/expr.y:220 { exprVAL.LabelFilter = exprDollar[2].LabelFilter } - case 69: + case 72: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:217 +//line pkg/logql/expr.y:221 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } - case 70: + case 73: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:218 +//line pkg/logql/expr.y:222 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 71: + case 74: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:219 +//line pkg/logql/expr.y:223 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 72: + case 75: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:220 +//line pkg/logql/expr.y:224 { exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 73: + case 76: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:224 +//line pkg/logql/expr.y:228 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } - case 74: + case 77: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:225 +//line pkg/logql/expr.y:229 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 75: + case 78: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:226 +//line pkg/logql/expr.y:230 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } - case 76: + case 79: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:227 +//line pkg/logql/expr.y:231 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 77: + case 80: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:228 +//line pkg/logql/expr.y:232 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } - case 78: + case 81: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:229 +//line pkg/logql/expr.y:233 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 79: + case 82: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:230 +//line pkg/logql/expr.y:234 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 80: + case 83: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:234 +//line pkg/logql/expr.y:238 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 81: + case 84: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:235 +//line pkg/logql/expr.y:239 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 82: + case 85: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:236 +//line pkg/logql/expr.y:240 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 83: + case 86: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:237 +//line pkg/logql/expr.y:241 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 84: + case 87: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:238 +//line pkg/logql/expr.y:242 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 85: + case 88: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:239 +//line pkg/logql/expr.y:243 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 86: + case 89: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:240 +//line pkg/logql/expr.y:244 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 87: + case 90: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:246 +//line pkg/logql/expr.y:250 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 88: + case 91: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:247 +//line pkg/logql/expr.y:251 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 89: + case 92: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:248 +//line pkg/logql/expr.y:252 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 90: + case 93: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:249 +//line pkg/logql/expr.y:253 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 91: + case 94: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:250 +//line pkg/logql/expr.y:254 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 92: + case 95: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:251 +//line pkg/logql/expr.y:255 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 93: + case 96: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:252 +//line pkg/logql/expr.y:256 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 94: + case 97: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:253 +//line pkg/logql/expr.y:257 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 95: + case 98: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:254 +//line pkg/logql/expr.y:258 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 96: + case 99: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:255 +//line pkg/logql/expr.y:259 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 97: + case 100: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:256 +//line pkg/logql/expr.y:260 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 98: + case 101: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:257 +//line pkg/logql/expr.y:261 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 99: + case 102: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:258 +//line pkg/logql/expr.y:262 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 100: + case 103: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:259 +//line pkg/logql/expr.y:263 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 101: + case 104: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:260 +//line pkg/logql/expr.y:264 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 102: + case 105: exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:264 +//line pkg/logql/expr.y:268 { exprVAL.BinOpModifier = BinOpOptions{} } - case 103: + case 106: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:265 +//line pkg/logql/expr.y:269 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } - case 104: + case 107: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:269 +//line pkg/logql/expr.y:273 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 105: + case 108: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:270 +//line pkg/logql/expr.y:274 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 106: + case 109: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:271 +//line pkg/logql/expr.y:275 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 107: + case 110: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:275 +//line pkg/logql/expr.y:279 { exprVAL.VectorOp = OpTypeSum } - case 108: + case 111: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:276 +//line pkg/logql/expr.y:280 { exprVAL.VectorOp = OpTypeAvg } - case 109: + case 112: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:277 +//line pkg/logql/expr.y:281 { exprVAL.VectorOp = OpTypeCount } - case 110: + case 113: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:278 +//line pkg/logql/expr.y:282 { exprVAL.VectorOp = OpTypeMax } - case 111: + case 114: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:279 +//line pkg/logql/expr.y:283 { exprVAL.VectorOp = OpTypeMin } - case 112: + case 115: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:280 +//line pkg/logql/expr.y:284 { exprVAL.VectorOp = OpTypeStddev } - case 113: + case 116: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:281 +//line pkg/logql/expr.y:285 { exprVAL.VectorOp = OpTypeStdvar } - case 114: + case 117: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:282 +//line pkg/logql/expr.y:286 { exprVAL.VectorOp = OpTypeBottomK } - case 115: + case 118: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:283 +//line pkg/logql/expr.y:287 { exprVAL.VectorOp = OpTypeTopK } - case 116: + case 119: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:287 +//line pkg/logql/expr.y:291 { exprVAL.RangeOp = OpRangeTypeCount } - case 117: + case 120: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:288 +//line pkg/logql/expr.y:292 { exprVAL.RangeOp = OpRangeTypeRate } - case 118: + case 121: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:289 +//line pkg/logql/expr.y:293 { exprVAL.RangeOp = OpRangeTypeBytes } - case 119: + case 122: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:290 +//line pkg/logql/expr.y:294 { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 120: + case 123: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:291 +//line pkg/logql/expr.y:295 { exprVAL.RangeOp = OpRangeTypeAvg } - case 121: + case 124: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:292 +//line pkg/logql/expr.y:296 { exprVAL.RangeOp = OpRangeTypeSum } - case 122: + case 125: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:293 +//line pkg/logql/expr.y:297 { exprVAL.RangeOp = OpRangeTypeMin } - case 123: + case 126: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:294 +//line pkg/logql/expr.y:298 { exprVAL.RangeOp = OpRangeTypeMax } - case 124: + case 127: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:295 +//line pkg/logql/expr.y:299 { exprVAL.RangeOp = OpRangeTypeStdvar } - case 125: + case 128: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:296 +//line pkg/logql/expr.y:300 { exprVAL.RangeOp = OpRangeTypeStddev } - case 126: + case 129: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:301 + { + exprVAL.RangeOp = OpRangeTypeQuantile + } + case 130: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:306 { exprVAL.Labels = []string{exprDollar[1].str} } - case 127: + case 131: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:302 +//line pkg/logql/expr.y:307 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 128: + case 132: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:306 +//line pkg/logql/expr.y:311 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } - case 129: + case 133: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:307 +//line pkg/logql/expr.y:312 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/functions.go b/pkg/logql/functions.go index 7afdaa524dd6d..cc9086a0be3a0 100644 --- a/pkg/logql/functions.go +++ b/pkg/logql/functions.go @@ -3,6 +3,7 @@ package logql import ( "fmt" "math" + "sort" "time" "github.com/prometheus/prometheus/promql" @@ -47,6 +48,8 @@ func (r rangeAggregationExpr) aggregator() (RangeVectorAggregator, error) { return stddevOverTime, nil case OpRangeTypeStdvar: return stdvarOverTime, nil + case OpRangeTypeQuantile: + return quantileOverTime(*r.params), nil default: return nil, fmt.Errorf(unsupportedErr, r.operation) } @@ -146,3 +149,43 @@ func stddevOverTime(samples []promql.Point) float64 { } return math.Sqrt(aux / count) } + +func quantileOverTime(q float64) func(samples []promql.Point) float64 { + return func(samples []promql.Point) float64 { + values := make(vectorByValueHeap, 0, len(samples)) + for _, v := range samples { + values = append(values, promql.Sample{Point: promql.Point{V: v.V}}) + } + return quantile(q, values) + } +} + +// quantile calculates the given quantile of a vector of samples. +// +// The Vector will be sorted. +// If 'values' has zero elements, NaN is returned. +// If q<0, -Inf is returned. +// If q>1, +Inf is returned. +func quantile(q float64, values vectorByValueHeap) float64 { + if len(values) == 0 { + return math.NaN() + } + if q < 0 { + return math.Inf(-1) + } + if q > 1 { + return math.Inf(+1) + } + sort.Sort(values) + + n := float64(len(values)) + // When the quantile lies between two samples, + // we use a weighted average of the two samples. + rank := q * (n - 1) + + lowerIndex := math.Max(0, math.Floor(rank)) + upperIndex := math.Min(n-1, lowerIndex+1) + + weight := rank - math.Floor(rank) + return values[int(lowerIndex)].V*(1-weight) + values[int(upperIndex)].V*weight +} diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go index a5d00977c5842..b2b47b79b2760 100644 --- a/pkg/logql/lex.go +++ b/pkg/logql/lex.go @@ -42,6 +42,7 @@ var tokens = map[string]int{ OpRangeTypeMax: MAX_OVER_TIME, OpRangeTypeStdvar: STDVAR_OVER_TIME, OpRangeTypeStddev: STDDEV_OVER_TIME, + OpRangeTypeQuantile: QUANTILE_OVER_TIME, // vec ops OpTypeSum: SUM, diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 6579282e5859a..96848061b9718 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -359,7 +359,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), }, { in: `bytes_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, @@ -378,7 +378,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeBytes), + }, OpRangeTypeBytes, nil, nil), }, { in: `sum(count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, @@ -397,7 +397,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -422,7 +422,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeBytesRate), + }, OpRangeTypeBytesRate, nil, nil), "sum", &grouping{ without: false, @@ -447,7 +447,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "topk", &grouping{ without: true, @@ -474,7 +474,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeRate), + }, OpRangeTypeRate, nil, nil), "sum", &grouping{ without: false, @@ -502,7 +502,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), }, { in: `sum(count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (foo)`, @@ -521,7 +521,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -546,7 +546,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "topk", &grouping{ without: true, @@ -573,7 +573,7 @@ func TestParse(t *testing.T) { }, ), interval: 5 * time.Minute, - }, OpRangeTypeRate), + }, OpRangeTypeRate, nil, nil), "sum", &grouping{ without: false, @@ -647,7 +647,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -663,7 +663,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -680,7 +680,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -710,7 +710,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -726,7 +726,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -743,7 +743,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -771,7 +771,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -790,7 +790,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -806,7 +806,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -836,7 +836,7 @@ func TestParse(t *testing.T) { newLineFilterExpr(nil, labels.MatchEqual, "level=error"), }), interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), newRangeAggregationExpr( &logRange{ left: &matchersExpr{ @@ -845,7 +845,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), + }, OpRangeTypeCount, nil, nil)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), }, { in: `sum by (job) ( @@ -866,7 +866,7 @@ func TestParse(t *testing.T) { newLineFilterExpr(nil, labels.MatchEqual, "level=error"), }), interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), newRangeAggregationExpr( &logRange{ left: &matchersExpr{ @@ -875,7 +875,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), + }, OpRangeTypeCount, nil, nil)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), mustNewLiteralExpr("100", false), ), }, @@ -894,7 +894,7 @@ func TestParse(t *testing.T) { }, }, interval: 5 * time.Minute, - }, OpRangeTypeCount), + }, OpRangeTypeCount, nil, nil), "sum", &grouping{ without: false, @@ -1136,6 +1136,7 @@ func TestParse(t *testing.T) { 5*time.Minute, nil), OpRangeTypeCount, + nil, nil, ), }, { @@ -1176,7 +1177,7 @@ func TestParse(t *testing.T) { }, 5*time.Minute, newUnwrapExpr("foo")), - OpRangeTypeStdvar, + OpRangeTypeStdvar, nil, nil, ), }, { @@ -1197,7 +1198,7 @@ func TestParse(t *testing.T) { }, 5*time.Minute, newUnwrapExpr("latency")), - OpRangeTypeSum, + OpRangeTypeSum, nil, nil, ), }, { @@ -1218,7 +1219,7 @@ func TestParse(t *testing.T) { }, 5*time.Minute, newUnwrapExpr("latency")), - OpRangeTypeSum, + OpRangeTypeSum, nil, nil, ), }, { @@ -1232,7 +1233,7 @@ func TestParse(t *testing.T) { }, 5*time.Minute, newUnwrapExpr("bar")), - OpRangeTypeStddev, + OpRangeTypeStddev, nil, nil, ), }, { @@ -1242,7 +1243,7 @@ func TestParse(t *testing.T) { newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), 5*time.Minute, newUnwrapExpr("bar")), - OpRangeTypeMin, + OpRangeTypeMin, nil, nil, ), }, { @@ -1272,7 +1273,222 @@ func TestParse(t *testing.T) { }, 5*time.Minute, newUnwrapExpr("foo")), - OpRangeTypeMax, + OpRangeTypeMax, nil, nil, + ), + }, + { + in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m])`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo")), + OpRangeTypeQuantile, nil, newString("0.99998"), + ), + }, + { + in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m]) by (namespace,instance)`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo")), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + ), + }, + { + in: `sum without (foo) ( + quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m] + ) by (namespace,instance) + )`, + exp: mustNewVectorAggregationExpr( + newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo")), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + ), + OpTypeSum, + &grouping{without: true, groups: []string{"foo"}}, + nil, + ), + }, + { + in: `topk(10, + quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m] + ) by (namespace,instance) + )`, + exp: mustNewVectorAggregationExpr( + newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo")), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + ), + OpTypeTopK, + nil, + newString("10"), + ), + }, + { + in: ` + sum by (foo,bar) ( + quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m] + ) by (namespace,instance) + ) + + + avg( + avg_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m] + ) by (namespace,instance) + ) by (foo,bar) + `, + exp: mustNewBinOpExpr(OpTypeAdd, BinOpOptions{ReturnBool: false}, + mustNewVectorAggregationExpr( + newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo")), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + ), + OpTypeSum, + &grouping{groups: []string{"foo", "bar"}}, + nil, + ), + mustNewVectorAggregationExpr( + newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo")), + OpRangeTypeAvg, &grouping{without: false, groups: []string{"namespace", "instance"}}, nil, + ), + OpTypeAvg, + &grouping{groups: []string{"foo", "bar"}}, + nil, + ), ), }, { @@ -1447,6 +1663,22 @@ func TestParse(t *testing.T) { col: 1, }, }, + { + in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m]) by (foo)`, + err: ParseError{msg: "grouping not allowed for sum_over_time aggregation"}, + }, + { + in: `sum_over_time(50,{namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`, + err: ParseError{msg: "parameter 50 not supported for operation sum_over_time"}, + }, + { + in: `quantile_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`, + err: ParseError{msg: "parameter required for operation quantile_over_time"}, + }, + { + in: `quantile_over_time(foo,{namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`, + err: ParseError{msg: "syntax error: unexpected IDENTIFIER, expecting NUMBER or { or (", line: 1, col: 20}, + }, } { t.Run(tc.in, func(t *testing.T) { ast, err := ParseExpr(tc.in) From 2ca6677fbb4d6d9fe6684e3456a9aaa26b1771d8 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Sat, 3 Oct 2020 10:31:26 +0200 Subject: [PATCH 23/45] Extract with grouping --- pkg/logql/functions.go | 2 +- pkg/logql/series_extractor.go | 17 +++++++------ pkg/logql/series_extractor_test.go | 38 ++++++++++++++++++++++++++++-- 3 files changed, 47 insertions(+), 10 deletions(-) diff --git a/pkg/logql/functions.go b/pkg/logql/functions.go index cc9086a0be3a0..8f11f03ba973f 100644 --- a/pkg/logql/functions.go +++ b/pkg/logql/functions.go @@ -16,7 +16,7 @@ func (r rangeAggregationExpr) Extractor() (SampleExtractor, error) { return nil, err } if r.left.unwrap != nil { - return newLabelSampleExtractor(r.left.unwrap.identifier), nil + return newLabelSampleExtractor(r.left.unwrap.identifier, r.grouping), nil } switch r.operation { case OpRangeTypeRate, OpRangeTypeCount: diff --git a/pkg/logql/series_extractor.go b/pkg/logql/series_extractor.go index b04edc10e22f6..d9ccef7ba86df 100644 --- a/pkg/logql/series_extractor.go +++ b/pkg/logql/series_extractor.go @@ -31,8 +31,7 @@ func (bytesSampleExtractor) Extract(line []byte, lbs labels.Labels) (float64, la type labelSampleExtractor struct { labelName string - - builder *labels.Builder + gr *grouping } func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, labels.Labels) { @@ -46,14 +45,18 @@ func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, la // todo(cyriltovena) handle errors. return 0, lbs } - l.builder.Reset(lbs) - l.builder.Del(l.labelName) - return f, l.builder.Labels() + if l.gr != nil { + if l.gr.without { + return f, lbs.WithoutLabels(append(l.gr.groups, l.labelName)...) + } + return f, lbs.WithLabels(l.gr.groups...) + } + return f, lbs.WithoutLabels(l.labelName) } -func newLabelSampleExtractor(labelName string) *labelSampleExtractor { +func newLabelSampleExtractor(labelName string, gr *grouping) *labelSampleExtractor { return &labelSampleExtractor{ labelName: labelName, - builder: labels.NewBuilder(nil), + gr: gr, } } diff --git a/pkg/logql/series_extractor_test.go b/pkg/logql/series_extractor_test.go index 3f5dd86adebee..9e88ab66a2b2d 100644 --- a/pkg/logql/series_extractor_test.go +++ b/pkg/logql/series_extractor_test.go @@ -2,6 +2,7 @@ package logql import ( "reflect" + "sort" "testing" "github.com/prometheus/prometheus/pkg/labels" @@ -17,15 +18,48 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { }{ { "convert float", - newLabelSampleExtractor("foo"), + newLabelSampleExtractor("foo", nil), labels.Labels{labels.Label{Name: "foo", Value: "15.0"}}, 15, labels.Labels{}, }, + { + "convert float without", + newLabelSampleExtractor("foo", + &grouping{without: true, groups: []string{"bar", "buzz"}}, + ), + labels.Labels{ + {Name: "foo", Value: "10"}, + {Name: "bar", Value: "foo"}, + {Name: "buzz", Value: "blip"}, + {Name: "namespace", Value: "dev"}, + }, + 10, + labels.Labels{ + {Name: "namespace", Value: "dev"}, + }, + }, + { + "convert float with", + newLabelSampleExtractor("foo", + &grouping{without: false, groups: []string{"bar", "buzz"}}, + ), + labels.Labels{ + {Name: "foo", Value: "0.6"}, + {Name: "bar", Value: "foo"}, + {Name: "buzz", Value: "blip"}, + {Name: "namespace", Value: "dev"}, + }, + 0.6, + labels.Labels{ + {Name: "bar", Value: "foo"}, + {Name: "buzz", Value: "blip"}, + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - + sort.Sort(tt.in) outval, outlbs := tt.ex.Extract([]byte(""), tt.in) if outval != tt.want { t.Errorf("labelSampleExtractor.Extract() val = %v, want %v", outval, tt.want) From 4effb67e4a05a40f519fb256b630f4dcea23c974 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Mon, 5 Oct 2020 14:18:23 +0200 Subject: [PATCH 24/45] Adds parsing duration on unwrap --- pkg/logql/ast.go | 17 +- pkg/logql/expr.y | 6 +- pkg/logql/expr.y.go | 692 ++++++++++++++++++++------------------- pkg/logql/lex.go | 3 + pkg/logql/parser_test.go | 24 +- 5 files changed, 385 insertions(+), 357 deletions(-) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index baa4d3e8e6ed7..4982f7cbc156f 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -472,10 +472,18 @@ func mustNewFloat(s string) float64 { type unwrapExpr struct { identifier string + operation string } -func newUnwrapExpr(id string) *unwrapExpr { - return &unwrapExpr{identifier: id} +func (u unwrapExpr) String() string { + if u.operation != "" { + return fmt.Sprintf("%s %s %s(%s)", OpPipe, OpUnwrap, u.operation, u.identifier) + } + return fmt.Sprintf("%s %s %s", OpPipe, OpUnwrap, u.identifier) +} + +func newUnwrapExpr(id string, operation string) *unwrapExpr { + return &unwrapExpr{identifier: id, operation: operation} } type logRange struct { @@ -490,7 +498,7 @@ func (r logRange) String() string { var sb strings.Builder sb.WriteString(r.left.String()) if r.unwrap != nil { - sb.WriteString(fmt.Sprintf("%s %s %s", OpPipe, OpUnwrap, r.unwrap.identifier)) + sb.WriteString(r.unwrap.String()) } sb.WriteString(fmt.Sprintf("[%v]", model.Duration(r.interval))) return sb.String() @@ -560,6 +568,9 @@ const ( OpPipe = "|" OpUnwrap = "unwrap" + + // conversion Op + OpConvDuration = "duration_format" ) func IsComparisonOperator(op string) bool { diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index ba7418bea33c8..d52e5bc93d66b 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -80,7 +80,7 @@ import ( %token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE SUM AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE LINE_FMT LABEL_FMT UNWRAP AVG_OVER_TIME SUM_OVER_TIME MIN_OVER_TIME - MAX_OVER_TIME STDVAR_OVER_TIME STDDEV_OVER_TIME QUANTILE_OVER_TIME + MAX_OVER_TIME STDVAR_OVER_TIME STDDEV_OVER_TIME QUANTILE_OVER_TIME DURATION_CONV // Operators are listed with increasing precedence. %left OR @@ -131,7 +131,9 @@ logRangeExpr: ; unwrapExpr: - PIPE UNWRAP IDENTIFIER { $$ = newUnwrapExpr($3)}; + PIPE UNWRAP IDENTIFIER { $$ = newUnwrapExpr($3, "")} + | PIPE UNWRAP DURATION_CONV OPEN_PARENTHESIS IDENTIFIER CLOSE_PARENTHESIS { $$ = newUnwrapExpr($5, OpConvDuration)} + ; rangeAggregationExpr: rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($3, $1, nil, nil) } diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index bd0e33c273d30..b580303666086 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -100,21 +100,22 @@ const MAX_OVER_TIME = 57392 const STDVAR_OVER_TIME = 57393 const STDDEV_OVER_TIME = 57394 const QUANTILE_OVER_TIME = 57395 -const OR = 57396 -const AND = 57397 -const UNLESS = 57398 -const CMP_EQ = 57399 -const NEQ = 57400 -const LT = 57401 -const LTE = 57402 -const GT = 57403 -const GTE = 57404 -const ADD = 57405 -const SUB = 57406 -const MUL = 57407 -const DIV = 57408 -const MOD = 57409 -const POW = 57410 +const DURATION_CONV = 57396 +const OR = 57397 +const AND = 57398 +const UNLESS = 57399 +const CMP_EQ = 57400 +const NEQ = 57401 +const LT = 57402 +const LTE = 57403 +const GT = 57404 +const GTE = 57405 +const ADD = 57406 +const SUB = 57407 +const MUL = 57408 +const DIV = 57409 +const MOD = 57410 +const POW = 57411 var exprToknames = [...]string{ "$end", @@ -170,6 +171,7 @@ var exprToknames = [...]string{ "STDVAR_OVER_TIME", "STDDEV_OVER_TIME", "QUANTILE_OVER_TIME", + "DURATION_CONV", "OR", "AND", "UNLESS", @@ -192,7 +194,7 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:314 +//line pkg/logql/expr.y:316 //line yacctab:1 var exprExca = [...]int{ @@ -203,170 +205,174 @@ var exprExca = [...]int{ const exprPrivate = 57344 -const exprLast = 375 +const exprLast = 384 var exprAct = [...]int{ 70, 169, 53, 151, 4, 143, 177, 100, 45, 52, - 207, 61, 234, 5, 76, 118, 63, 2, 56, 66, - 230, 14, 40, 41, 42, 43, 44, 45, 214, 11, - 42, 43, 44, 45, 114, 116, 117, 6, 153, 116, + 225, 61, 207, 5, 236, 118, 63, 2, 76, 66, + 56, 14, 40, 41, 42, 43, 44, 45, 243, 11, + 42, 43, 44, 45, 71, 72, 231, 6, 153, 116, 117, 17, 18, 28, 29, 31, 32, 30, 33, 34, - 35, 36, 19, 20, 174, 91, 69, 106, 71, 72, - 109, 94, 21, 22, 23, 24, 25, 26, 27, 59, - 108, 145, 11, 92, 122, 103, 57, 58, 15, 16, - 121, 115, 120, 59, 159, 154, 157, 158, 155, 156, - 57, 58, 127, 126, 128, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 144, 148, - 71, 72, 224, 171, 60, 125, 204, 215, 106, 215, - 160, 113, 217, 184, 216, 124, 185, 237, 60, 176, - 170, 227, 145, 179, 68, 172, 103, 173, 46, 47, + 35, 36, 19, 20, 69, 91, 71, 72, 214, 106, + 226, 94, 21, 22, 23, 24, 25, 26, 27, 114, + 116, 117, 174, 145, 122, 92, 59, 103, 183, 15, + 16, 234, 120, 57, 58, 159, 154, 157, 158, 155, + 156, 109, 127, 224, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 239, 148, + 146, 144, 204, 165, 106, 165, 215, 115, 215, 108, + 160, 217, 60, 216, 126, 125, 184, 228, 145, 176, + 170, 124, 103, 179, 222, 172, 211, 173, 46, 47, 50, 51, 48, 49, 40, 41, 42, 43, 44, 45, 180, 181, 182, 38, 39, 46, 47, 50, 51, 48, - 49, 40, 41, 42, 43, 44, 45, 200, 146, 144, - 202, 175, 206, 91, 209, 212, 167, 94, 203, 59, - 213, 120, 210, 201, 239, 236, 57, 58, 218, 37, + 49, 40, 41, 42, 43, 44, 45, 200, 119, 68, + 202, 175, 206, 91, 209, 212, 11, 94, 203, 59, + 213, 120, 210, 201, 121, 167, 57, 58, 218, 37, 38, 39, 46, 47, 50, 51, 48, 49, 40, 41, - 42, 43, 44, 45, 165, 223, 91, 235, 226, 55, - 119, 165, 229, 91, 123, 106, 199, 198, 11, 162, - 189, 188, 11, 232, 60, 222, 121, 165, 233, 145, - 6, 238, 211, 103, 17, 18, 28, 29, 31, 32, - 30, 33, 34, 35, 36, 19, 20, 75, 166, 106, - 111, 161, 187, 186, 164, 21, 22, 23, 24, 25, - 26, 27, 224, 110, 204, 205, 112, 103, 74, 168, - 59, 15, 16, 73, 59, 197, 196, 57, 58, 163, - 228, 57, 58, 219, 208, 152, 77, 78, 79, 80, - 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, - 171, 205, 106, 231, 171, 168, 59, 106, 195, 194, - 59, 193, 192, 57, 58, 60, 145, 57, 58, 60, - 103, 183, 3, 106, 150, 103, 191, 190, 162, 62, - 220, 221, 161, 149, 147, 142, 171, 107, 225, 178, - 171, 103, 67, 97, 99, 98, 152, 104, 105, 207, - 96, 60, 146, 144, 65, 60, 67, 95, 54, 97, - 99, 98, 101, 104, 105, 102, 93, 10, 9, 13, - 8, 12, 7, 64, 1, + 42, 43, 44, 45, 111, 223, 91, 165, 11, 171, + 113, 185, 230, 91, 123, 242, 121, 110, 238, 59, + 112, 237, 11, 233, 227, 60, 57, 58, 166, 235, + 6, 224, 240, 106, 17, 18, 28, 29, 31, 32, + 30, 33, 34, 35, 36, 19, 20, 145, 204, 55, + 106, 103, 106, 199, 198, 21, 22, 23, 24, 25, + 26, 27, 205, 74, 145, 60, 168, 59, 103, 3, + 103, 59, 15, 16, 57, 58, 62, 229, 57, 58, + 75, 208, 73, 205, 146, 144, 197, 196, 59, 162, + 189, 188, 161, 187, 186, 57, 58, 171, 168, 195, + 194, 171, 144, 59, 193, 192, 191, 190, 220, 221, + 57, 58, 219, 60, 152, 150, 164, 60, 171, 77, + 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, + 88, 89, 90, 171, 60, 106, 96, 163, 162, 161, + 149, 147, 106, 95, 142, 107, 65, 241, 67, 60, + 232, 178, 67, 103, 152, 54, 101, 102, 93, 10, + 103, 9, 13, 8, 12, 7, 64, 1, 0, 0, + 0, 97, 99, 98, 0, 104, 105, 207, 97, 99, + 98, 0, 104, 105, } var exprPact = [...]int{ - 15, -1000, 135, -1000, -1000, 166, 15, -1000, -1000, -1000, - -1000, 352, 112, 34, -1000, 267, 262, -1000, -1000, -1000, + 15, -1000, 134, -1000, -1000, 206, 15, -1000, -1000, -1000, + -1000, 344, 147, 32, -1000, 276, 257, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -25, -25, -25, - -25, -25, -25, -25, -25, -25, -25, -25, -25, -25, - -25, -25, 166, -1000, 56, 319, 332, -1000, -1000, -1000, - -1000, 47, 37, 135, 248, 106, -1000, 23, 204, 208, - 103, 93, 71, -1000, -1000, 15, -1000, 15, 15, 15, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -21, -21, -21, + -21, -21, -21, -21, -21, -21, -21, -21, -21, -21, + -21, -21, 206, -1000, 63, 338, 340, -1000, -1000, -1000, + -1000, 96, 68, 134, 202, 195, -1000, 58, 162, 208, + 109, 103, 102, -1000, -1000, 15, -1000, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, - 15, -1000, 330, -1000, 114, -1000, -1000, -1000, -1000, 329, - -1000, -1000, -1000, 245, 328, 342, 27, -1000, -1000, -1000, - -1000, -1000, 338, -1000, 327, 323, 274, 249, 225, 158, - 297, 58, 31, 153, 15, 335, 335, 98, 81, 81, - -35, -35, -60, -60, -60, -60, -41, -41, -41, -41, - -41, -41, -1000, 114, 245, 245, 245, -1000, 298, -1000, - 105, -1000, 115, 246, 214, 320, 305, 302, 269, 210, - -1000, -1000, -1000, -1000, -1000, -1000, 86, 58, 70, 256, - 293, 303, 261, 209, 86, 15, 5, 101, -1000, 99, - 211, 114, 53, -1000, 281, 326, -1000, -1000, -1000, -1000, + 15, -1000, 339, -1000, 229, -1000, -1000, -1000, -1000, 336, + -1000, -1000, -1000, 248, 335, 350, 27, -1000, -1000, -1000, + -1000, -1000, 348, -1000, 334, 333, 332, 311, 205, 167, + 290, 194, 49, 153, 15, 347, 347, 97, 80, 80, + -36, -36, -61, -61, -61, -61, -42, -42, -42, -42, + -42, -42, -1000, 229, 248, 248, 248, -1000, 55, -1000, + 108, -1000, 200, 287, 284, 300, 298, 293, 280, 247, + -1000, -1000, -1000, -1000, -1000, -1000, 10, 194, 166, 240, + 275, 331, 258, 113, 10, 15, 35, 100, -1000, 98, + 110, 229, 246, -1000, 310, 304, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 202, -1000, 70, -1000, -1000, 254, 334, 200, 108, - 257, -1000, -1000, -3, -1000, 299, -1000, -1000, -1000, -1000, - -1000, -1000, 86, -1000, -1000, -1000, -31, 199, 177, 104, - 86, -1000, -1000, -1000, -36, -1000, -1000, 176, -1000, -1000, + -1000, 111, -1000, 166, -1000, -1000, 223, 6, 216, 104, + 254, -1000, -1000, 13, -1000, 346, -1000, -1000, -1000, -1000, + -1000, -1000, 10, -1000, -1000, -1000, 59, -29, 213, 210, + 85, 10, -1000, -1000, 343, -1000, -34, -1000, -1000, 207, + -1000, 5, -1000, -1000, } var exprPgo = [...]int{ - 0, 374, 16, 18, 0, 6, 322, 4, 15, 7, - 373, 372, 371, 13, 370, 369, 368, 367, 247, 366, - 9, 2, 365, 362, 5, 358, 357, 350, 3, 324, + 0, 367, 16, 20, 0, 6, 269, 4, 15, 7, + 366, 365, 364, 13, 363, 362, 361, 359, 280, 358, + 9, 2, 357, 356, 5, 355, 343, 336, 3, 315, 1, } var exprR1 = [...]int{ 0, 1, 2, 2, 7, 7, 7, 7, 7, 6, 6, 6, 8, 8, 8, 8, 8, 8, 8, 8, - 8, 8, 8, 8, 8, 8, 30, 11, 11, 11, - 11, 14, 14, 14, 14, 14, 3, 3, 3, 3, - 13, 13, 13, 10, 10, 9, 9, 9, 9, 20, - 20, 21, 21, 21, 21, 21, 25, 25, 19, 19, - 19, 26, 28, 28, 29, 29, 29, 27, 24, 24, - 24, 24, 24, 24, 24, 24, 23, 23, 23, 23, - 23, 23, 23, 22, 22, 22, 22, 22, 22, 22, - 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 18, 18, 17, 17, 17, - 15, 15, 15, 15, 15, 15, 15, 15, 15, 12, + 8, 8, 8, 8, 8, 8, 30, 30, 11, 11, + 11, 11, 14, 14, 14, 14, 14, 3, 3, 3, + 3, 13, 13, 13, 10, 10, 9, 9, 9, 9, + 20, 20, 21, 21, 21, 21, 21, 25, 25, 19, + 19, 19, 26, 28, 28, 29, 29, 29, 27, 24, + 24, 24, 24, 24, 24, 24, 24, 23, 23, 23, + 23, 23, 23, 23, 22, 22, 22, 22, 22, 22, + 22, 16, 16, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 16, 16, 16, 18, 18, 17, 17, + 17, 15, 15, 15, 15, 15, 15, 15, 15, 15, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, - 5, 5, 4, 4, + 12, 5, 5, 4, 4, } var exprR2 = [...]int{ 0, 1, 1, 1, 1, 1, 1, 1, 3, 1, 2, 3, 2, 4, 3, 5, 3, 5, 3, 5, - 4, 6, 3, 4, 3, 2, 3, 4, 6, 5, - 7, 4, 5, 5, 6, 7, 1, 1, 1, 1, - 3, 3, 3, 1, 3, 3, 3, 3, 3, 1, - 2, 1, 2, 2, 2, 2, 2, 3, 1, 1, - 2, 2, 3, 3, 1, 3, 3, 2, 1, 1, - 1, 3, 2, 3, 3, 3, 3, 3, 3, 3, + 4, 6, 3, 4, 3, 2, 3, 6, 4, 6, + 5, 7, 4, 5, 5, 6, 7, 1, 1, 1, + 1, 3, 3, 3, 1, 3, 3, 3, 3, 3, + 1, 2, 1, 2, 2, 2, 2, 2, 3, 1, + 1, 2, 2, 3, 3, 1, 3, 3, 2, 1, + 1, 1, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 0, 1, 1, 2, 2, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 0, 1, 1, 2, + 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 3, 4, 4, + 1, 1, 3, 4, 4, } var exprChk = [...]int{ -1000, -1, -2, -6, -7, -13, 22, -11, -14, -16, - -17, 14, -12, -15, 6, 63, 64, 26, 27, 37, + -17, 14, -12, -15, 6, 64, 65, 26, 27, 37, 38, 47, 48, 49, 50, 51, 52, 53, 28, 29, - 32, 30, 31, 33, 34, 35, 36, 54, 55, 56, - 63, 64, 65, 66, 67, 68, 57, 58, 61, 62, - 59, 60, -20, -21, -25, 43, -3, 20, 21, 13, - 58, -7, -6, -2, -10, 2, -9, 4, 22, 22, + 32, 30, 31, 33, 34, 35, 36, 55, 56, 57, + 64, 65, 66, 67, 68, 69, 58, 59, 62, 63, + 60, 61, -20, -21, -25, 43, -3, 20, 21, 13, + 59, -7, -6, -2, -10, 2, -9, 4, 22, 22, -4, 24, 25, 6, 6, -18, 39, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, -21, -3, -19, -24, -26, -27, 40, 42, 41, -9, -23, -22, 22, 44, 45, 4, 5, 23, 23, - 15, 2, 18, 15, 11, 58, 12, 13, -8, 6, + 15, 2, 18, 15, 11, 59, 12, 13, -8, 6, -13, 22, -7, 6, 22, 22, 22, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, 5, -24, 55, 18, 54, 5, -24, 5, - -29, -28, 4, 11, 58, 61, 62, 59, 60, 57, + -2, -2, 5, -24, 56, 18, 55, 5, -24, 5, + -29, -28, 4, 11, 59, 62, 63, 60, 61, 58, -9, 5, 5, 5, 5, 2, 23, 18, 8, -30, -20, 43, -13, -8, 23, 18, -7, -5, 4, -5, -24, -24, -24, 23, 18, 11, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, -4, -8, -30, -20, 8, 8, -30, 46, 23, -30, -20, 23, -4, -7, 23, 18, 23, 23, -28, 2, - 4, 5, 23, -30, 8, 4, 8, 23, 23, -30, - 23, 4, -4, -30, 43, 8, 8, 23, -4, 8, + 4, 5, 23, -30, 8, 4, 54, 8, 23, 23, + -30, 23, 4, -4, 22, -30, 43, 8, 8, 23, + -4, 4, 8, 23, } var exprDef = [...]int{ 0, -2, 1, 2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 107, 0, 0, 119, 120, 121, - 122, 123, 124, 125, 126, 127, 128, 129, 110, 111, - 112, 113, 114, 115, 116, 117, 118, 105, 105, 105, - 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, - 105, 105, 10, 49, 51, 0, 0, 36, 37, 38, - 39, 3, 2, 0, 0, 0, 43, 0, 0, 0, - 0, 0, 0, 108, 109, 0, 106, 0, 0, 0, + 7, 0, 0, 0, 108, 0, 0, 120, 121, 122, + 123, 124, 125, 126, 127, 128, 129, 130, 111, 112, + 113, 114, 115, 116, 117, 118, 119, 106, 106, 106, + 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, + 106, 106, 10, 50, 52, 0, 0, 37, 38, 39, + 40, 3, 2, 0, 0, 0, 44, 0, 0, 0, + 0, 0, 0, 109, 110, 0, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 50, 0, 52, 53, 54, 55, 58, 59, 0, - 68, 69, 70, 0, 0, 0, 0, 56, 8, 11, - 40, 41, 0, 42, 0, 0, 0, 0, 0, 0, - 0, 0, 3, 107, 0, 0, 0, 90, 91, 92, - 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, - 103, 104, 57, 72, 0, 0, 0, 60, 0, 61, - 67, 64, 0, 0, 0, 0, 0, 0, 0, 0, - 44, 45, 46, 47, 48, 25, 27, 0, 12, 0, - 0, 0, 0, 0, 31, 0, 3, 0, 130, 0, - 73, 74, 75, 71, 0, 0, 81, 88, 80, 87, - 76, 83, 77, 84, 78, 85, 79, 86, 82, 89, - 29, 0, 14, 22, 16, 18, 0, 0, 0, 0, - 0, 24, 33, 3, 32, 0, 132, 133, 65, 66, - 62, 63, 28, 23, 20, 26, 13, 0, 0, 0, - 34, 131, 30, 15, 0, 17, 19, 0, 35, 21, + 0, 51, 0, 53, 54, 55, 56, 59, 60, 0, + 69, 70, 71, 0, 0, 0, 0, 57, 8, 11, + 41, 42, 0, 43, 0, 0, 0, 0, 0, 0, + 0, 0, 3, 108, 0, 0, 0, 91, 92, 93, + 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, + 104, 105, 58, 73, 0, 0, 0, 61, 0, 62, + 68, 65, 0, 0, 0, 0, 0, 0, 0, 0, + 45, 46, 47, 48, 49, 25, 28, 0, 12, 0, + 0, 0, 0, 0, 32, 0, 3, 0, 131, 0, + 74, 75, 76, 72, 0, 0, 82, 89, 81, 88, + 77, 84, 78, 85, 79, 86, 80, 87, 83, 90, + 30, 0, 14, 22, 16, 18, 0, 0, 0, 0, + 0, 24, 34, 3, 33, 0, 133, 134, 66, 67, + 63, 64, 29, 23, 20, 26, 0, 13, 0, 0, + 0, 35, 132, 31, 0, 15, 0, 17, 19, 0, + 36, 0, 21, 27, } var exprTok1 = [...]int{ @@ -380,7 +386,7 @@ var exprTok2 = [...]int{ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, + 62, 63, 64, 65, 66, 67, 68, 69, } var exprTok3 = [...]int{ 0, @@ -871,640 +877,646 @@ exprdefault: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:134 { - exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[3].str) + exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[3].str, "") } case 27: + exprDollar = exprS[exprpt-6 : exprpt+1] +//line pkg/logql/expr.y:135 + { + exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[5].str, OpConvDuration) + } + case 28: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:137 +//line pkg/logql/expr.y:139 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, nil, nil) } - case 28: + case 29: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:138 +//line pkg/logql/expr.y:140 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, nil, &exprDollar[3].str) } - case 29: + case 30: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:139 +//line pkg/logql/expr.y:141 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[5].Grouping, nil) } - case 30: + case 31: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:140 +//line pkg/logql/expr.y:142 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[7].Grouping, &exprDollar[3].str) } - case 31: + case 32: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:145 +//line pkg/logql/expr.y:147 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } - case 32: + case 33: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:146 +//line pkg/logql/expr.y:148 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } - case 33: + case 34: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:147 +//line pkg/logql/expr.y:149 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } - case 34: + case 35: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:149 +//line pkg/logql/expr.y:151 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } - case 35: + case 36: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:150 +//line pkg/logql/expr.y:152 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } - case 36: + case 37: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:154 +//line pkg/logql/expr.y:156 { exprVAL.Filter = labels.MatchRegexp } - case 37: + case 38: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:155 +//line pkg/logql/expr.y:157 { exprVAL.Filter = labels.MatchEqual } - case 38: + case 39: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:156 +//line pkg/logql/expr.y:158 { exprVAL.Filter = labels.MatchNotRegexp } - case 39: + case 40: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:157 +//line pkg/logql/expr.y:159 { exprVAL.Filter = labels.MatchNotEqual } - case 40: + case 41: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:161 +//line pkg/logql/expr.y:163 { exprVAL.Selector = exprDollar[2].Matchers } - case 41: + case 42: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:162 +//line pkg/logql/expr.y:164 { exprVAL.Selector = exprDollar[2].Matchers } - case 42: + case 43: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:163 +//line pkg/logql/expr.y:165 { } - case 43: + case 44: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:167 +//line pkg/logql/expr.y:169 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } - case 44: + case 45: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:168 +//line pkg/logql/expr.y:170 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } - case 45: + case 46: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:172 +//line pkg/logql/expr.y:174 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } - case 46: + case 47: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:173 +//line pkg/logql/expr.y:175 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } - case 47: + case 48: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:174 +//line pkg/logql/expr.y:176 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } - case 48: + case 49: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:175 +//line pkg/logql/expr.y:177 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } - case 49: + case 50: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:179 +//line pkg/logql/expr.y:181 { exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} } - case 50: + case 51: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:180 +//line pkg/logql/expr.y:182 { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } - case 51: + case 52: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:184 +//line pkg/logql/expr.y:186 { exprVAL.PipelineStage = exprDollar[1].LineFilters } - case 52: + case 53: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:185 +//line pkg/logql/expr.y:187 { exprVAL.PipelineStage = exprDollar[2].LabelParser } - case 53: + case 54: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:186 +//line pkg/logql/expr.y:188 { exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} } - case 54: + case 55: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:187 +//line pkg/logql/expr.y:189 { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } - case 55: + case 56: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:188 +//line pkg/logql/expr.y:190 { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } - case 56: + case 57: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:192 +//line pkg/logql/expr.y:194 { exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) } - case 57: + case 58: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:193 +//line pkg/logql/expr.y:195 { exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } - case 58: + case 59: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:196 +//line pkg/logql/expr.y:198 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } - case 59: + case 60: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:197 +//line pkg/logql/expr.y:199 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } - case 60: + case 61: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:198 +//line pkg/logql/expr.y:200 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } - case 61: + case 62: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:201 +//line pkg/logql/expr.y:203 { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } - case 62: + case 63: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:204 +//line pkg/logql/expr.y:206 { exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 63: + case 64: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:205 +//line pkg/logql/expr.y:207 { exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 64: + case 65: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:209 +//line pkg/logql/expr.y:211 { exprVAL.LabelsFormat = []labelFmt{exprDollar[1].LabelFormat} } - case 65: + case 66: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:210 +//line pkg/logql/expr.y:212 { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } - case 67: + case 68: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:214 +//line pkg/logql/expr.y:216 { exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } - case 68: + case 69: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:217 +//line pkg/logql/expr.y:219 { exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) } - case 69: + case 70: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:218 +//line pkg/logql/expr.y:220 { exprVAL.LabelFilter = exprDollar[1].DurationFilter } - case 70: + case 71: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:219 +//line pkg/logql/expr.y:221 { exprVAL.LabelFilter = exprDollar[1].NumberFilter } - case 71: + case 72: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:220 +//line pkg/logql/expr.y:222 { exprVAL.LabelFilter = exprDollar[2].LabelFilter } - case 72: + case 73: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:221 +//line pkg/logql/expr.y:223 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } - case 73: - exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:222 - { - exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) - } case 74: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:223 +//line pkg/logql/expr.y:224 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 75: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:224 +//line pkg/logql/expr.y:225 { - exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) + exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 76: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:228 +//line pkg/logql/expr.y:226 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) + exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 77: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:229 +//line pkg/logql/expr.y:230 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } case 78: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:230 +//line pkg/logql/expr.y:231 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 79: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:231 +//line pkg/logql/expr.y:232 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } case 80: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:232 +//line pkg/logql/expr.y:233 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 81: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:233 +//line pkg/logql/expr.y:234 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } case 82: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:234 +//line pkg/logql/expr.y:235 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 83: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:238 +//line pkg/logql/expr.y:236 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 84: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:239 +//line pkg/logql/expr.y:240 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 85: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:240 +//line pkg/logql/expr.y:241 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 86: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:241 +//line pkg/logql/expr.y:242 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 87: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:242 +//line pkg/logql/expr.y:243 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 88: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:243 +//line pkg/logql/expr.y:244 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 89: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:244 +//line pkg/logql/expr.y:245 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 90: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:246 + { + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + } + case 91: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:250 +//line pkg/logql/expr.y:252 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 91: + case 92: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:251 +//line pkg/logql/expr.y:253 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 92: + case 93: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:252 +//line pkg/logql/expr.y:254 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 93: + case 94: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:253 +//line pkg/logql/expr.y:255 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 94: + case 95: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:254 +//line pkg/logql/expr.y:256 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 95: + case 96: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:255 +//line pkg/logql/expr.y:257 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 96: + case 97: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:256 +//line pkg/logql/expr.y:258 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 97: + case 98: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:257 +//line pkg/logql/expr.y:259 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 98: + case 99: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:258 +//line pkg/logql/expr.y:260 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 99: + case 100: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:259 +//line pkg/logql/expr.y:261 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 100: + case 101: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:260 +//line pkg/logql/expr.y:262 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 101: + case 102: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:261 +//line pkg/logql/expr.y:263 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 102: + case 103: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:262 +//line pkg/logql/expr.y:264 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 103: + case 104: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:263 +//line pkg/logql/expr.y:265 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 104: + case 105: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:264 +//line pkg/logql/expr.y:266 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 105: + case 106: exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:268 +//line pkg/logql/expr.y:270 { exprVAL.BinOpModifier = BinOpOptions{} } - case 106: + case 107: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:269 +//line pkg/logql/expr.y:271 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } - case 107: + case 108: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:273 +//line pkg/logql/expr.y:275 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 108: + case 109: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:274 +//line pkg/logql/expr.y:276 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 109: + case 110: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:275 +//line pkg/logql/expr.y:277 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 110: + case 111: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:279 +//line pkg/logql/expr.y:281 { exprVAL.VectorOp = OpTypeSum } - case 111: + case 112: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:280 +//line pkg/logql/expr.y:282 { exprVAL.VectorOp = OpTypeAvg } - case 112: + case 113: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:281 +//line pkg/logql/expr.y:283 { exprVAL.VectorOp = OpTypeCount } - case 113: + case 114: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:282 +//line pkg/logql/expr.y:284 { exprVAL.VectorOp = OpTypeMax } - case 114: + case 115: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:283 +//line pkg/logql/expr.y:285 { exprVAL.VectorOp = OpTypeMin } - case 115: + case 116: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:284 +//line pkg/logql/expr.y:286 { exprVAL.VectorOp = OpTypeStddev } - case 116: + case 117: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:285 +//line pkg/logql/expr.y:287 { exprVAL.VectorOp = OpTypeStdvar } - case 117: + case 118: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:286 +//line pkg/logql/expr.y:288 { exprVAL.VectorOp = OpTypeBottomK } - case 118: + case 119: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:287 +//line pkg/logql/expr.y:289 { exprVAL.VectorOp = OpTypeTopK } - case 119: + case 120: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:291 +//line pkg/logql/expr.y:293 { exprVAL.RangeOp = OpRangeTypeCount } - case 120: + case 121: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:292 +//line pkg/logql/expr.y:294 { exprVAL.RangeOp = OpRangeTypeRate } - case 121: + case 122: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:293 +//line pkg/logql/expr.y:295 { exprVAL.RangeOp = OpRangeTypeBytes } - case 122: + case 123: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:294 +//line pkg/logql/expr.y:296 { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 123: + case 124: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:295 +//line pkg/logql/expr.y:297 { exprVAL.RangeOp = OpRangeTypeAvg } - case 124: + case 125: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:296 +//line pkg/logql/expr.y:298 { exprVAL.RangeOp = OpRangeTypeSum } - case 125: + case 126: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:297 +//line pkg/logql/expr.y:299 { exprVAL.RangeOp = OpRangeTypeMin } - case 126: + case 127: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:298 +//line pkg/logql/expr.y:300 { exprVAL.RangeOp = OpRangeTypeMax } - case 127: + case 128: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:299 +//line pkg/logql/expr.y:301 { exprVAL.RangeOp = OpRangeTypeStdvar } - case 128: + case 129: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:300 +//line pkg/logql/expr.y:302 { exprVAL.RangeOp = OpRangeTypeStddev } - case 129: + case 130: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:301 +//line pkg/logql/expr.y:303 { exprVAL.RangeOp = OpRangeTypeQuantile } - case 130: + case 131: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:306 +//line pkg/logql/expr.y:308 { exprVAL.Labels = []string{exprDollar[1].str} } - case 131: + case 132: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:307 +//line pkg/logql/expr.y:309 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 132: + case 133: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:311 +//line pkg/logql/expr.y:313 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } - case 133: + case 134: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:312 +//line pkg/logql/expr.y:314 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go index b2b47b79b2760..2fcd7042fe1ca 100644 --- a/pkg/logql/lex.go +++ b/pkg/logql/lex.go @@ -80,6 +80,9 @@ var tokens = map[string]int{ // fmt OpFmtLabel: LABEL_FMT, OpFmtLine: LINE_FMT, + + // conversion Op + OpConvDuration: DURATION_CONV, } type lexer struct { diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 96848061b9718..69dbcc2df597b 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -1176,7 +1176,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("foo")), + newUnwrapExpr("foo", "")), OpRangeTypeStdvar, nil, nil, ), }, @@ -1197,7 +1197,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("latency")), + newUnwrapExpr("latency", "")), OpRangeTypeSum, nil, nil, ), }, @@ -1218,7 +1218,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("latency")), + newUnwrapExpr("latency", "")), OpRangeTypeSum, nil, nil, ), }, @@ -1232,7 +1232,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("bar")), + newUnwrapExpr("bar", "")), OpRangeTypeStddev, nil, nil, ), }, @@ -1242,7 +1242,7 @@ func TestParse(t *testing.T) { newLogRange( newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), 5*time.Minute, - newUnwrapExpr("bar")), + newUnwrapExpr("bar", "")), OpRangeTypeMin, nil, nil, ), }, @@ -1272,7 +1272,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("foo")), + newUnwrapExpr("foo", "")), OpRangeTypeMax, nil, nil, ), }, @@ -1302,7 +1302,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("foo")), + newUnwrapExpr("foo", "")), OpRangeTypeQuantile, nil, newString("0.99998"), ), }, @@ -1332,7 +1332,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("foo")), + newUnwrapExpr("foo", "")), OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), ), }, @@ -1366,7 +1366,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("foo")), + newUnwrapExpr("foo", "")), OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), ), OpTypeSum, @@ -1404,7 +1404,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("foo")), + newUnwrapExpr("foo", "")), OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), ), OpTypeTopK, @@ -1451,7 +1451,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("foo")), + newUnwrapExpr("foo", "")), OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), ), OpTypeSum, @@ -1482,7 +1482,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("foo")), + newUnwrapExpr("foo", "")), OpRangeTypeAvg, &grouping{without: false, groups: []string{"namespace", "instance"}}, nil, ), OpTypeAvg, From 832a977cbb02cfb2346df96a3a48acfafbbc71dc Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 6 Oct 2020 15:44:21 +0200 Subject: [PATCH 25/45] Improve the lexer to support more common identifier as functions. Also add duration convertion for unwrap. Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 3 +- pkg/logql/ast_test.go | 7 + pkg/logql/expr.y | 49 +- pkg/logql/expr.y.go | 782 +++++++++++++++-------------- pkg/logql/functions.go | 2 +- pkg/logql/lex.go | 97 +++- pkg/logql/lex_test.go | 45 ++ pkg/logql/parser_test.go | 143 ++++++ pkg/logql/series_extractor.go | 31 +- pkg/logql/series_extractor_test.go | 40 +- 10 files changed, 763 insertions(+), 436 deletions(-) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 4982f7cbc156f..2e8ffa3a1f497 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -570,7 +570,8 @@ const ( OpUnwrap = "unwrap" // conversion Op - OpConvDuration = "duration_format" + OpConvDuration = "duration" + OpConvDurationSeconds = "duration_seconds" ) func IsComparisonOperator(op string) bool { diff --git a/pkg/logql/ast_test.go b/pkg/logql/ast_test.go index 8282ca479c7f0..e3250b0d4fdb6 100644 --- a/pkg/logql/ast_test.go +++ b/pkg/logql/ast_test.go @@ -83,6 +83,13 @@ func Test_SampleExpr_String(t *testing.T) { / count_over_time({namespace="tns"} | logfmt | label_format foo=bar[5m]) )`, + `sum by (job) ( + sum_over_time( + {namespace="tns"} |= "level=error" | json | avg=5 and bar<25ms | unwrap duration(latency) [5m] + ) + / + count_over_time({namespace="tns"} | logfmt | label_format foo=bar[5m]) + )`, } { t.Run(tc, func(t *testing.T) { expr, err := ParseExpr(tc) diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index d52e5bc93d66b..5e885e0ebde3e 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -19,6 +19,7 @@ import ( Matchers []*labels.Matcher RangeAggregationExpr SampleExpr RangeOp string + ConvOp string Selector []*labels.Matcher VectorAggregationExpr SampleExpr MetricExpr SampleExpr @@ -56,6 +57,7 @@ import ( %type matchers %type rangeAggregationExpr %type rangeOp +%type convOp %type selector %type vectorAggregationExpr %type vectorOp @@ -80,7 +82,7 @@ import ( %token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT OPEN_PARENTHESIS CLOSE_PARENTHESIS BY WITHOUT COUNT_OVER_TIME RATE SUM AVG MAX MIN COUNT STDDEV STDVAR BOTTOMK TOPK BYTES_OVER_TIME BYTES_RATE BOOL JSON REGEXP LOGFMT PIPE LINE_FMT LABEL_FMT UNWRAP AVG_OVER_TIME SUM_OVER_TIME MIN_OVER_TIME - MAX_OVER_TIME STDVAR_OVER_TIME STDDEV_OVER_TIME QUANTILE_OVER_TIME DURATION_CONV + MAX_OVER_TIME STDVAR_OVER_TIME STDDEV_OVER_TIME QUANTILE_OVER_TIME DURATION_CONV DURATION_SECONDS_CONV // Operators are listed with increasing precedence. %left OR @@ -114,32 +116,37 @@ logExpr: ; logRangeExpr: - selector RANGE { $$ = newLogRange(newMatcherExpr($1), $2, nil) } - | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $4, nil) } - | selector RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($1), $2 , $3) } - | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($2), $4 , $5) } - | selector unwrapExpr RANGE { $$ = newLogRange(newMatcherExpr($1), $3, $2 ) } - | OPEN_PARENTHESIS selector unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $5, $3 ) } - | selector pipelineExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $3, nil ) } - | OPEN_PARENTHESIS selector pipelineExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $5, nil ) } - | selector pipelineExpr unwrapExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $4, $3) } - | OPEN_PARENTHESIS selector pipelineExpr unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $6, $4) } - | selector RANGE pipelineExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, nil) } - | selector RANGE pipelineExpr unwrapExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, $4 ) } - | OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = $2 } + selector RANGE { $$ = newLogRange(newMatcherExpr($1), $2, nil) } + | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $4, nil) } + | selector RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($1), $2 , $3) } + | OPEN_PARENTHESIS selector CLOSE_PARENTHESIS RANGE unwrapExpr { $$ = newLogRange(newMatcherExpr($2), $4 , $5) } + | selector unwrapExpr RANGE { $$ = newLogRange(newMatcherExpr($1), $3, $2 ) } + | OPEN_PARENTHESIS selector unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newMatcherExpr($2), $5, $3 ) } + | selector pipelineExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $3, nil ) } + | OPEN_PARENTHESIS selector pipelineExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $5, nil ) } + | selector pipelineExpr unwrapExpr RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $2), $4, $3) } + | OPEN_PARENTHESIS selector pipelineExpr unwrapExpr CLOSE_PARENTHESIS RANGE { $$ = newLogRange(newPipelineExpr(newMatcherExpr($2), $3), $6, $4) } + | selector RANGE pipelineExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, nil) } + | selector RANGE pipelineExpr unwrapExpr { $$ = newLogRange(newPipelineExpr(newMatcherExpr($1), $3), $2, $4 ) } + | OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = $2 } | logRangeExpr error ; unwrapExpr: - PIPE UNWRAP IDENTIFIER { $$ = newUnwrapExpr($3, "")} - | PIPE UNWRAP DURATION_CONV OPEN_PARENTHESIS IDENTIFIER CLOSE_PARENTHESIS { $$ = newUnwrapExpr($5, OpConvDuration)} + PIPE UNWRAP IDENTIFIER { $$ = newUnwrapExpr($3, "")} + | PIPE UNWRAP convOp OPEN_PARENTHESIS IDENTIFIER CLOSE_PARENTHESIS { $$ = newUnwrapExpr($5, $3)} ; -rangeAggregationExpr: - rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($3, $1, nil, nil) } - | rangeOp OPEN_PARENTHESIS NUMBER COMMA logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($5, $1, nil, &$3) } - | rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS grouping { $$ = newRangeAggregationExpr($3, $1, $5, nil) } - | rangeOp OPEN_PARENTHESIS NUMBER COMMA logRangeExpr CLOSE_PARENTHESIS grouping { $$ = newRangeAggregationExpr($5, $1, $7, &$3) } +convOp: + DURATION_CONV { $$ = OpConvDuration } + | DURATION_SECONDS_CONV { $$ = OpConvDurationSeconds } + ; + +rangeAggregationExpr: + rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($3, $1, nil, nil) } + | rangeOp OPEN_PARENTHESIS NUMBER COMMA logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($5, $1, nil, &$3) } + | rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS grouping { $$ = newRangeAggregationExpr($3, $1, $5, nil) } + | rangeOp OPEN_PARENTHESIS NUMBER COMMA logRangeExpr CLOSE_PARENTHESIS grouping { $$ = newRangeAggregationExpr($5, $1, $7, &$3) } ; vectorAggregationExpr: diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index b580303666086..88dab45cc41ad 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -26,6 +26,7 @@ type exprSymType struct { Matchers []*labels.Matcher RangeAggregationExpr SampleExpr RangeOp string + ConvOp string Selector []*labels.Matcher VectorAggregationExpr SampleExpr MetricExpr SampleExpr @@ -101,21 +102,22 @@ const STDVAR_OVER_TIME = 57393 const STDDEV_OVER_TIME = 57394 const QUANTILE_OVER_TIME = 57395 const DURATION_CONV = 57396 -const OR = 57397 -const AND = 57398 -const UNLESS = 57399 -const CMP_EQ = 57400 -const NEQ = 57401 -const LT = 57402 -const LTE = 57403 -const GT = 57404 -const GTE = 57405 -const ADD = 57406 -const SUB = 57407 -const MUL = 57408 -const DIV = 57409 -const MOD = 57410 -const POW = 57411 +const DURATION_SECONDS_CONV = 57397 +const OR = 57398 +const AND = 57399 +const UNLESS = 57400 +const CMP_EQ = 57401 +const NEQ = 57402 +const LT = 57403 +const LTE = 57404 +const GT = 57405 +const GTE = 57406 +const ADD = 57407 +const SUB = 57408 +const MUL = 57409 +const DIV = 57410 +const MOD = 57411 +const POW = 57412 var exprToknames = [...]string{ "$end", @@ -172,6 +174,7 @@ var exprToknames = [...]string{ "STDDEV_OVER_TIME", "QUANTILE_OVER_TIME", "DURATION_CONV", + "DURATION_SECONDS_CONV", "OR", "AND", "UNLESS", @@ -194,7 +197,7 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:316 +//line pkg/logql/expr.y:323 //line yacctab:1 var exprExca = [...]int{ @@ -205,174 +208,175 @@ var exprExca = [...]int{ const exprPrivate = 57344 -const exprLast = 384 +const exprLast = 392 var exprAct = [...]int{ 70, 169, 53, 151, 4, 143, 177, 100, 45, 52, - 225, 61, 207, 5, 236, 118, 63, 2, 76, 66, - 56, 14, 40, 41, 42, 43, 44, 45, 243, 11, - 42, 43, 44, 45, 71, 72, 231, 6, 153, 116, + 207, 61, 238, 5, 76, 118, 63, 2, 56, 66, + 245, 14, 40, 41, 42, 43, 44, 45, 225, 11, + 42, 43, 44, 45, 114, 116, 117, 6, 153, 116, 117, 17, 18, 28, 29, 31, 32, 30, 33, 34, - 35, 36, 19, 20, 69, 91, 71, 72, 214, 106, - 226, 94, 21, 22, 23, 24, 25, 26, 27, 114, - 116, 117, 174, 145, 122, 92, 59, 103, 183, 15, - 16, 234, 120, 57, 58, 159, 154, 157, 158, 155, - 156, 109, 127, 224, 128, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 239, 148, - 146, 144, 204, 165, 106, 165, 215, 115, 215, 108, - 160, 217, 60, 216, 126, 125, 184, 228, 145, 176, - 170, 124, 103, 179, 222, 172, 211, 173, 46, 47, + 35, 36, 19, 20, 233, 91, 69, 106, 71, 72, + 214, 94, 21, 22, 23, 24, 25, 26, 27, 59, + 174, 145, 109, 92, 122, 103, 57, 58, 227, 228, + 15, 16, 120, 115, 71, 72, 159, 154, 157, 158, + 155, 156, 127, 224, 128, 129, 130, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 241, 148, + 144, 204, 215, 165, 106, 165, 60, 217, 215, 11, + 160, 108, 184, 216, 236, 175, 230, 121, 145, 176, + 170, 126, 103, 179, 222, 172, 211, 173, 46, 47, 50, 51, 48, 49, 40, 41, 42, 43, 44, 45, 180, 181, 182, 38, 39, 46, 47, 50, 51, 48, - 49, 40, 41, 42, 43, 44, 45, 200, 119, 68, - 202, 175, 206, 91, 209, 212, 11, 94, 203, 59, - 213, 120, 210, 201, 121, 167, 57, 58, 218, 37, + 49, 40, 41, 42, 43, 44, 45, 200, 119, 106, + 202, 125, 206, 91, 209, 212, 11, 94, 203, 124, + 213, 120, 210, 201, 121, 68, 165, 103, 218, 37, 38, 39, 46, 47, 50, 51, 48, 49, 40, 41, - 42, 43, 44, 45, 111, 223, 91, 165, 11, 171, - 113, 185, 230, 91, 123, 242, 121, 110, 238, 59, - 112, 237, 11, 233, 227, 60, 57, 58, 166, 235, - 6, 224, 240, 106, 17, 18, 28, 29, 31, 32, - 30, 33, 34, 35, 36, 19, 20, 145, 204, 55, - 106, 103, 106, 199, 198, 21, 22, 23, 24, 25, - 26, 27, 205, 74, 145, 60, 168, 59, 103, 3, - 103, 59, 15, 16, 57, 58, 62, 229, 57, 58, - 75, 208, 73, 205, 146, 144, 197, 196, 59, 162, - 189, 188, 161, 187, 186, 57, 58, 171, 168, 195, - 194, 171, 144, 59, 193, 192, 191, 190, 220, 221, - 57, 58, 219, 60, 152, 150, 164, 60, 171, 77, - 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, - 88, 89, 90, 171, 60, 106, 96, 163, 162, 161, - 149, 147, 106, 95, 142, 107, 65, 241, 67, 60, - 232, 178, 67, 103, 152, 54, 101, 102, 93, 10, - 103, 9, 13, 8, 12, 7, 64, 1, 0, 0, - 0, 97, 99, 98, 0, 104, 105, 207, 97, 99, - 98, 0, 104, 105, + 42, 43, 44, 45, 111, 223, 91, 166, 167, 113, + 185, 244, 232, 91, 205, 106, 123, 110, 74, 59, + 112, 240, 239, 235, 11, 229, 57, 58, 224, 231, + 204, 237, 6, 103, 242, 73, 17, 18, 28, 29, + 31, 32, 30, 33, 34, 35, 36, 19, 20, 171, + 164, 97, 99, 98, 163, 104, 105, 21, 22, 23, + 24, 25, 26, 27, 59, 168, 60, 220, 221, 205, + 59, 57, 58, 162, 59, 15, 16, 57, 58, 168, + 208, 57, 58, 161, 59, 162, 189, 188, 161, 187, + 186, 57, 58, 75, 171, 106, 199, 198, 3, 59, + 171, 106, 197, 196, 171, 62, 57, 58, 219, 145, + 152, 60, 149, 103, 171, 145, 147, 60, 142, 103, + 183, 60, 195, 194, 193, 192, 191, 190, 107, 55, + 243, 60, 77, 78, 79, 80, 81, 82, 83, 84, + 85, 86, 87, 88, 89, 90, 60, 146, 144, 106, + 65, 234, 67, 146, 144, 178, 67, 152, 150, 96, + 95, 54, 101, 102, 93, 10, 9, 103, 13, 8, + 226, 12, 7, 64, 1, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 97, 99, 98, 0, 104, + 105, 207, } var exprPact = [...]int{ - 15, -1000, 134, -1000, -1000, 206, 15, -1000, -1000, -1000, - -1000, 344, 147, 32, -1000, 276, 257, -1000, -1000, -1000, + 15, -1000, 133, -1000, -1000, 286, 15, -1000, -1000, -1000, + -1000, 348, 163, 34, -1000, 229, 212, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -21, -21, -21, - -21, -21, -21, -21, -21, -21, -21, -21, -21, -21, - -21, -21, 206, -1000, 63, 338, 340, -1000, -1000, -1000, - -1000, 96, 68, 134, 202, 195, -1000, 58, 162, 208, - 109, 103, 102, -1000, -1000, 15, -1000, 15, 15, 15, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -25, -25, -25, + -25, -25, -25, -25, -25, -25, -25, -25, -25, -25, + -25, -25, 286, -1000, 56, 211, 323, -1000, -1000, -1000, + -1000, 98, 49, 133, 202, 194, -1000, 23, 162, 210, + 157, 149, 109, -1000, -1000, 15, -1000, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, - 15, -1000, 339, -1000, 229, -1000, -1000, -1000, -1000, 336, - -1000, -1000, -1000, 248, 335, 350, 27, -1000, -1000, -1000, - -1000, -1000, 348, -1000, 334, 333, 332, 311, 205, 167, - 290, 194, 49, 153, 15, 347, 347, 97, 80, 80, - -36, -36, -61, -61, -61, -61, -42, -42, -42, -42, - -42, -42, -1000, 229, 248, 248, 248, -1000, 55, -1000, - 108, -1000, 200, 287, 284, 300, 298, 293, 280, 247, - -1000, -1000, -1000, -1000, -1000, -1000, 10, 194, 166, 240, - 275, 331, 258, 113, 10, 15, 35, 100, -1000, 98, - 110, 229, 246, -1000, 310, 304, -1000, -1000, -1000, -1000, + 15, -1000, 313, -1000, 291, -1000, -1000, -1000, -1000, 311, + -1000, -1000, -1000, 165, 307, 353, 27, -1000, -1000, -1000, + -1000, -1000, 352, -1000, 278, 268, 249, 245, 184, 190, + 271, 105, 47, 107, 15, 351, 351, 96, 79, 79, + -37, -37, -62, -62, -62, -62, -43, -43, -43, -43, + -43, -43, -1000, 291, 165, 165, 165, -1000, 297, -1000, + 104, -1000, 199, 283, 280, 320, 318, 316, 296, 290, + -1000, -1000, -1000, -1000, -1000, -1000, 60, 105, 251, 222, + 261, 345, 257, 113, 60, 15, 37, 100, -1000, 94, + 110, 291, 53, -1000, 306, 263, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 111, -1000, 166, -1000, -1000, 223, 6, 216, 104, - 254, -1000, -1000, 13, -1000, 346, -1000, -1000, -1000, -1000, - -1000, -1000, 10, -1000, -1000, -1000, 59, -29, 213, 210, - 85, 10, -1000, -1000, 343, -1000, -34, -1000, -1000, 207, - -1000, 5, -1000, -1000, + -1000, 111, -1000, 251, -1000, -1000, 220, 24, 217, 103, + 206, -1000, -1000, 31, -1000, 347, -1000, -1000, -1000, -1000, + -1000, -1000, 60, -1000, -1000, -1000, 102, -1000, -1000, -31, + 214, 213, 85, 60, -1000, -1000, 326, -1000, -36, -1000, + -1000, 203, -1000, -3, -1000, -1000, } var exprPgo = [...]int{ - 0, 367, 16, 20, 0, 6, 269, 4, 15, 7, - 366, 365, 364, 13, 363, 362, 361, 359, 280, 358, - 9, 2, 357, 356, 5, 355, 343, 336, 3, 315, - 1, + 0, 374, 16, 18, 0, 6, 298, 4, 15, 7, + 373, 372, 371, 370, 13, 369, 368, 366, 365, 293, + 364, 9, 2, 363, 362, 5, 361, 360, 359, 3, + 358, 1, } var exprR1 = [...]int{ 0, 1, 2, 2, 7, 7, 7, 7, 7, 6, 6, 6, 8, 8, 8, 8, 8, 8, 8, 8, - 8, 8, 8, 8, 8, 8, 30, 30, 11, 11, - 11, 11, 14, 14, 14, 14, 14, 3, 3, 3, - 3, 13, 13, 13, 10, 10, 9, 9, 9, 9, - 20, 20, 21, 21, 21, 21, 21, 25, 25, 19, - 19, 19, 26, 28, 28, 29, 29, 29, 27, 24, - 24, 24, 24, 24, 24, 24, 24, 23, 23, 23, - 23, 23, 23, 23, 22, 22, 22, 22, 22, 22, - 22, 16, 16, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 16, 16, 16, 16, 18, 18, 17, 17, - 17, 15, 15, 15, 15, 15, 15, 15, 15, 15, - 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, - 12, 5, 5, 4, 4, + 8, 8, 8, 8, 8, 8, 31, 31, 13, 13, + 11, 11, 11, 11, 15, 15, 15, 15, 15, 3, + 3, 3, 3, 14, 14, 14, 10, 10, 9, 9, + 9, 9, 21, 21, 22, 22, 22, 22, 22, 26, + 26, 20, 20, 20, 27, 29, 29, 30, 30, 30, + 28, 25, 25, 25, 25, 25, 25, 25, 25, 24, + 24, 24, 24, 24, 24, 24, 23, 23, 23, 23, + 23, 23, 23, 17, 17, 17, 17, 17, 17, 17, + 17, 17, 17, 17, 17, 17, 17, 17, 19, 19, + 18, 18, 18, 16, 16, 16, 16, 16, 16, 16, + 16, 16, 12, 12, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 5, 5, 4, 4, } var exprR2 = [...]int{ 0, 1, 1, 1, 1, 1, 1, 1, 3, 1, 2, 3, 2, 4, 3, 5, 3, 5, 3, 5, - 4, 6, 3, 4, 3, 2, 3, 6, 4, 6, - 5, 7, 4, 5, 5, 6, 7, 1, 1, 1, - 1, 3, 3, 3, 1, 3, 3, 3, 3, 3, - 1, 2, 1, 2, 2, 2, 2, 2, 3, 1, - 1, 2, 2, 3, 3, 1, 3, 3, 2, 1, - 1, 1, 3, 2, 3, 3, 3, 3, 3, 3, + 4, 6, 3, 4, 3, 2, 3, 6, 1, 1, + 4, 6, 5, 7, 4, 5, 5, 6, 7, 1, + 1, 1, 1, 3, 3, 3, 1, 3, 3, 3, + 3, 3, 1, 2, 1, 2, 2, 2, 2, 2, + 3, 1, 1, 2, 2, 3, 3, 1, 3, 3, + 2, 1, 1, 1, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 0, 1, 1, 2, - 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 0, 1, + 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 3, 4, 4, + 1, 1, 1, 1, 3, 4, 4, } var exprChk = [...]int{ - -1000, -1, -2, -6, -7, -13, 22, -11, -14, -16, - -17, 14, -12, -15, 6, 64, 65, 26, 27, 37, + -1000, -1, -2, -6, -7, -14, 22, -11, -15, -17, + -18, 14, -12, -16, 6, 65, 66, 26, 27, 37, 38, 47, 48, 49, 50, 51, 52, 53, 28, 29, - 32, 30, 31, 33, 34, 35, 36, 55, 56, 57, - 64, 65, 66, 67, 68, 69, 58, 59, 62, 63, - 60, 61, -20, -21, -25, 43, -3, 20, 21, 13, - 59, -7, -6, -2, -10, 2, -9, 4, 22, 22, - -4, 24, 25, 6, 6, -18, 39, -18, -18, -18, - -18, -18, -18, -18, -18, -18, -18, -18, -18, -18, - -18, -21, -3, -19, -24, -26, -27, 40, 42, 41, - -9, -23, -22, 22, 44, 45, 4, 5, 23, 23, - 15, 2, 18, 15, 11, 59, 12, 13, -8, 6, - -13, 22, -7, 6, 22, 22, 22, -2, -2, -2, + 32, 30, 31, 33, 34, 35, 36, 56, 57, 58, + 65, 66, 67, 68, 69, 70, 59, 60, 63, 64, + 61, 62, -21, -22, -26, 43, -3, 20, 21, 13, + 60, -7, -6, -2, -10, 2, -9, 4, 22, 22, + -4, 24, 25, 6, 6, -19, 39, -19, -19, -19, + -19, -19, -19, -19, -19, -19, -19, -19, -19, -19, + -19, -22, -3, -20, -25, -27, -28, 40, 42, 41, + -9, -24, -23, 22, 44, 45, 4, 5, 23, 23, + 15, 2, 18, 15, 11, 60, 12, 13, -8, 6, + -14, 22, -7, 6, 22, 22, 22, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, 5, -24, 56, 18, 55, 5, -24, 5, - -29, -28, 4, 11, 59, 62, 63, 60, 61, 58, - -9, 5, 5, 5, 5, 2, 23, 18, 8, -30, - -20, 43, -13, -8, 23, 18, -7, -5, 4, -5, - -24, -24, -24, 23, 18, 11, 7, 6, 7, 6, + -2, -2, 5, -25, 57, 18, 56, 5, -25, 5, + -30, -29, 4, 11, 60, 63, 64, 61, 62, 59, + -9, 5, 5, 5, 5, 2, 23, 18, 8, -31, + -21, 43, -14, -8, 23, 18, -7, -5, 4, -5, + -25, -25, -25, 23, 18, 11, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, - -4, -8, -30, -20, 8, 8, -30, 46, 23, -30, - -20, 23, -4, -7, 23, 18, 23, 23, -28, 2, - 4, 5, 23, -30, 8, 4, 54, 8, 23, 23, - -30, 23, 4, -4, 22, -30, 43, 8, 8, 23, - -4, 4, 8, 23, + -4, -8, -31, -21, 8, 8, -31, 46, 23, -31, + -21, 23, -4, -7, 23, 18, 23, 23, -29, 2, + 4, 5, 23, -31, 8, 4, -13, 54, 55, 8, + 23, 23, -31, 23, 4, -4, 22, -31, 43, 8, + 8, 23, -4, 4, 8, 23, } var exprDef = [...]int{ 0, -2, 1, 2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 108, 0, 0, 120, 121, 122, - 123, 124, 125, 126, 127, 128, 129, 130, 111, 112, - 113, 114, 115, 116, 117, 118, 119, 106, 106, 106, - 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, - 106, 106, 10, 50, 52, 0, 0, 37, 38, 39, - 40, 3, 2, 0, 0, 0, 44, 0, 0, 0, - 0, 0, 0, 109, 110, 0, 107, 0, 0, 0, + 7, 0, 0, 0, 110, 0, 0, 122, 123, 124, + 125, 126, 127, 128, 129, 130, 131, 132, 113, 114, + 115, 116, 117, 118, 119, 120, 121, 108, 108, 108, + 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, + 108, 108, 10, 52, 54, 0, 0, 39, 40, 41, + 42, 3, 2, 0, 0, 0, 46, 0, 0, 0, + 0, 0, 0, 111, 112, 0, 109, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 51, 0, 53, 54, 55, 56, 59, 60, 0, - 69, 70, 71, 0, 0, 0, 0, 57, 8, 11, - 41, 42, 0, 43, 0, 0, 0, 0, 0, 0, - 0, 0, 3, 108, 0, 0, 0, 91, 92, 93, - 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, - 104, 105, 58, 73, 0, 0, 0, 61, 0, 62, - 68, 65, 0, 0, 0, 0, 0, 0, 0, 0, - 45, 46, 47, 48, 49, 25, 28, 0, 12, 0, - 0, 0, 0, 0, 32, 0, 3, 0, 131, 0, - 74, 75, 76, 72, 0, 0, 82, 89, 81, 88, - 77, 84, 78, 85, 79, 86, 80, 87, 83, 90, - 30, 0, 14, 22, 16, 18, 0, 0, 0, 0, - 0, 24, 34, 3, 33, 0, 133, 134, 66, 67, - 63, 64, 29, 23, 20, 26, 0, 13, 0, 0, - 0, 35, 132, 31, 0, 15, 0, 17, 19, 0, - 36, 0, 21, 27, + 0, 53, 0, 55, 56, 57, 58, 61, 62, 0, + 71, 72, 73, 0, 0, 0, 0, 59, 8, 11, + 43, 44, 0, 45, 0, 0, 0, 0, 0, 0, + 0, 0, 3, 110, 0, 0, 0, 93, 94, 95, + 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, + 106, 107, 60, 75, 0, 0, 0, 63, 0, 64, + 70, 67, 0, 0, 0, 0, 0, 0, 0, 0, + 47, 48, 49, 50, 51, 25, 30, 0, 12, 0, + 0, 0, 0, 0, 34, 0, 3, 0, 133, 0, + 76, 77, 78, 74, 0, 0, 84, 91, 83, 90, + 79, 86, 80, 87, 81, 88, 82, 89, 85, 92, + 32, 0, 14, 22, 16, 18, 0, 0, 0, 0, + 0, 24, 36, 3, 35, 0, 135, 136, 68, 69, + 65, 66, 31, 23, 20, 26, 0, 28, 29, 13, + 0, 0, 0, 37, 134, 33, 0, 15, 0, 17, + 19, 0, 38, 0, 21, 27, } var exprTok1 = [...]int{ @@ -386,7 +390,7 @@ var exprTok2 = [...]int{ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, + 62, 63, 64, 65, 66, 67, 68, 69, 70, } var exprTok3 = [...]int{ 0, @@ -731,792 +735,804 @@ exprdefault: case 1: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:95 +//line pkg/logql/expr.y:97 { exprlex.(*lexer).expr = exprDollar[1].Expr } case 2: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:98 +//line pkg/logql/expr.y:100 { exprVAL.Expr = exprDollar[1].LogExpr } case 3: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:99 +//line pkg/logql/expr.y:101 { exprVAL.Expr = exprDollar[1].MetricExpr } case 4: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:103 +//line pkg/logql/expr.y:105 { exprVAL.MetricExpr = exprDollar[1].RangeAggregationExpr } case 5: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:104 +//line pkg/logql/expr.y:106 { exprVAL.MetricExpr = exprDollar[1].VectorAggregationExpr } case 6: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:105 +//line pkg/logql/expr.y:107 { exprVAL.MetricExpr = exprDollar[1].BinOpExpr } case 7: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:106 +//line pkg/logql/expr.y:108 { exprVAL.MetricExpr = exprDollar[1].LiteralExpr } case 8: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:107 +//line pkg/logql/expr.y:109 { exprVAL.MetricExpr = exprDollar[2].MetricExpr } case 9: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:111 +//line pkg/logql/expr.y:113 { exprVAL.LogExpr = newMatcherExpr(exprDollar[1].Selector) } case 10: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:112 +//line pkg/logql/expr.y:114 { exprVAL.LogExpr = newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr) } case 11: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:113 +//line pkg/logql/expr.y:115 { exprVAL.LogExpr = exprDollar[2].LogExpr } case 12: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:117 +//line pkg/logql/expr.y:119 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, nil) } case 13: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:118 +//line pkg/logql/expr.y:120 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, nil) } case 14: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:119 +//line pkg/logql/expr.y:121 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, exprDollar[3].UnwrapExpr) } case 15: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:120 +//line pkg/logql/expr.y:122 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, exprDollar[5].UnwrapExpr) } case 16: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:121 +//line pkg/logql/expr.y:123 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].duration, exprDollar[2].UnwrapExpr) } case 17: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:122 +//line pkg/logql/expr.y:124 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[5].duration, exprDollar[3].UnwrapExpr) } case 18: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:123 +//line pkg/logql/expr.y:125 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[3].duration, nil) } case 19: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:124 +//line pkg/logql/expr.y:126 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[5].duration, nil) } case 20: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:125 +//line pkg/logql/expr.y:127 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[4].duration, exprDollar[3].UnwrapExpr) } case 21: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:126 +//line pkg/logql/expr.y:128 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[6].duration, exprDollar[4].UnwrapExpr) } case 22: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:127 +//line pkg/logql/expr.y:129 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, nil) } case 23: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:128 +//line pkg/logql/expr.y:130 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, exprDollar[4].UnwrapExpr) } case 24: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:129 +//line pkg/logql/expr.y:131 { exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr } case 26: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:134 +//line pkg/logql/expr.y:136 { exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[3].str, "") } case 27: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:135 +//line pkg/logql/expr.y:137 { - exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[5].str, OpConvDuration) + exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[5].str, exprDollar[3].ConvOp) } case 28: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:141 + { + exprVAL.ConvOp = OpConvDuration + } + case 29: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:142 + { + exprVAL.ConvOp = OpConvDurationSeconds + } + case 30: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:139 +//line pkg/logql/expr.y:146 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, nil, nil) } - case 29: + case 31: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:140 +//line pkg/logql/expr.y:147 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, nil, &exprDollar[3].str) } - case 30: + case 32: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:141 +//line pkg/logql/expr.y:148 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[5].Grouping, nil) } - case 31: + case 33: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:142 +//line pkg/logql/expr.y:149 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[7].Grouping, &exprDollar[3].str) } - case 32: + case 34: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:147 +//line pkg/logql/expr.y:154 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } - case 33: + case 35: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:148 +//line pkg/logql/expr.y:155 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } - case 34: + case 36: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:149 +//line pkg/logql/expr.y:156 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } - case 35: + case 37: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:151 +//line pkg/logql/expr.y:158 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } - case 36: + case 38: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:152 +//line pkg/logql/expr.y:159 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } - case 37: + case 39: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:156 +//line pkg/logql/expr.y:163 { exprVAL.Filter = labels.MatchRegexp } - case 38: + case 40: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:157 +//line pkg/logql/expr.y:164 { exprVAL.Filter = labels.MatchEqual } - case 39: + case 41: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:158 +//line pkg/logql/expr.y:165 { exprVAL.Filter = labels.MatchNotRegexp } - case 40: + case 42: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:159 +//line pkg/logql/expr.y:166 { exprVAL.Filter = labels.MatchNotEqual } - case 41: + case 43: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:163 +//line pkg/logql/expr.y:170 { exprVAL.Selector = exprDollar[2].Matchers } - case 42: + case 44: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:164 +//line pkg/logql/expr.y:171 { exprVAL.Selector = exprDollar[2].Matchers } - case 43: + case 45: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:165 +//line pkg/logql/expr.y:172 { } - case 44: + case 46: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:169 +//line pkg/logql/expr.y:176 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } - case 45: + case 47: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:170 +//line pkg/logql/expr.y:177 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } - case 46: + case 48: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:174 +//line pkg/logql/expr.y:181 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } - case 47: + case 49: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:175 +//line pkg/logql/expr.y:182 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } - case 48: + case 50: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:176 +//line pkg/logql/expr.y:183 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } - case 49: + case 51: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:177 +//line pkg/logql/expr.y:184 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } - case 50: + case 52: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:181 +//line pkg/logql/expr.y:188 { exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} } - case 51: + case 53: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:182 +//line pkg/logql/expr.y:189 { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } - case 52: + case 54: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:186 +//line pkg/logql/expr.y:193 { exprVAL.PipelineStage = exprDollar[1].LineFilters } - case 53: + case 55: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:187 +//line pkg/logql/expr.y:194 { exprVAL.PipelineStage = exprDollar[2].LabelParser } - case 54: + case 56: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:188 +//line pkg/logql/expr.y:195 { exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} } - case 55: + case 57: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:189 +//line pkg/logql/expr.y:196 { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } - case 56: + case 58: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:190 +//line pkg/logql/expr.y:197 { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } - case 57: + case 59: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:194 +//line pkg/logql/expr.y:201 { exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) } - case 58: + case 60: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:195 +//line pkg/logql/expr.y:202 { exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } - case 59: + case 61: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:198 +//line pkg/logql/expr.y:205 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } - case 60: + case 62: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:199 +//line pkg/logql/expr.y:206 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } - case 61: + case 63: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:200 +//line pkg/logql/expr.y:207 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } - case 62: + case 64: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:203 +//line pkg/logql/expr.y:210 { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } - case 63: + case 65: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:206 +//line pkg/logql/expr.y:213 { exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 64: + case 66: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:207 +//line pkg/logql/expr.y:214 { exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } - case 65: + case 67: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:211 +//line pkg/logql/expr.y:218 { exprVAL.LabelsFormat = []labelFmt{exprDollar[1].LabelFormat} } - case 66: + case 68: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:212 +//line pkg/logql/expr.y:219 { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } - case 68: + case 70: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:216 +//line pkg/logql/expr.y:223 { exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } - case 69: + case 71: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:219 +//line pkg/logql/expr.y:226 { exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) } - case 70: + case 72: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:220 +//line pkg/logql/expr.y:227 { exprVAL.LabelFilter = exprDollar[1].DurationFilter } - case 71: + case 73: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:221 +//line pkg/logql/expr.y:228 { exprVAL.LabelFilter = exprDollar[1].NumberFilter } - case 72: + case 74: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:222 +//line pkg/logql/expr.y:229 { exprVAL.LabelFilter = exprDollar[2].LabelFilter } - case 73: + case 75: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:223 +//line pkg/logql/expr.y:230 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } - case 74: + case 76: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:224 +//line pkg/logql/expr.y:231 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 75: + case 77: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:225 +//line pkg/logql/expr.y:232 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 76: + case 78: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:226 +//line pkg/logql/expr.y:233 { exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } - case 77: + case 79: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:230 +//line pkg/logql/expr.y:237 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } - case 78: + case 80: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:231 +//line pkg/logql/expr.y:238 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 79: + case 81: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:232 +//line pkg/logql/expr.y:239 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } - case 80: + case 82: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:233 +//line pkg/logql/expr.y:240 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 81: + case 83: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:234 +//line pkg/logql/expr.y:241 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } - case 82: + case 84: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:235 +//line pkg/logql/expr.y:242 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 83: + case 85: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:236 +//line pkg/logql/expr.y:243 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 84: + case 86: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:240 +//line pkg/logql/expr.y:247 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 85: + case 87: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:241 +//line pkg/logql/expr.y:248 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 86: + case 88: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:242 +//line pkg/logql/expr.y:249 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 87: + case 89: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:243 +//line pkg/logql/expr.y:250 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 88: + case 90: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:244 +//line pkg/logql/expr.y:251 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 89: + case 91: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:245 +//line pkg/logql/expr.y:252 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 90: + case 92: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:246 +//line pkg/logql/expr.y:253 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 91: + case 93: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:252 +//line pkg/logql/expr.y:259 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 92: + case 94: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:253 +//line pkg/logql/expr.y:260 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 93: + case 95: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:254 +//line pkg/logql/expr.y:261 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 94: + case 96: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:255 +//line pkg/logql/expr.y:262 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 95: + case 97: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:256 +//line pkg/logql/expr.y:263 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 96: + case 98: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:257 +//line pkg/logql/expr.y:264 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 97: + case 99: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:258 +//line pkg/logql/expr.y:265 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 98: + case 100: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:259 +//line pkg/logql/expr.y:266 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 99: + case 101: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:260 +//line pkg/logql/expr.y:267 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 100: + case 102: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:261 +//line pkg/logql/expr.y:268 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 101: + case 103: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:262 +//line pkg/logql/expr.y:269 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 102: + case 104: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:263 +//line pkg/logql/expr.y:270 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 103: + case 105: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:264 +//line pkg/logql/expr.y:271 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 104: + case 106: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:265 +//line pkg/logql/expr.y:272 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 105: + case 107: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:266 +//line pkg/logql/expr.y:273 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 106: + case 108: exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:270 +//line pkg/logql/expr.y:277 { exprVAL.BinOpModifier = BinOpOptions{} } - case 107: + case 109: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:271 +//line pkg/logql/expr.y:278 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } - case 108: + case 110: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:275 +//line pkg/logql/expr.y:282 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 109: + case 111: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:276 +//line pkg/logql/expr.y:283 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 110: + case 112: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:277 +//line pkg/logql/expr.y:284 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 111: + case 113: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:281 +//line pkg/logql/expr.y:288 { exprVAL.VectorOp = OpTypeSum } - case 112: + case 114: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:282 +//line pkg/logql/expr.y:289 { exprVAL.VectorOp = OpTypeAvg } - case 113: + case 115: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:283 +//line pkg/logql/expr.y:290 { exprVAL.VectorOp = OpTypeCount } - case 114: + case 116: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:284 +//line pkg/logql/expr.y:291 { exprVAL.VectorOp = OpTypeMax } - case 115: + case 117: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:285 +//line pkg/logql/expr.y:292 { exprVAL.VectorOp = OpTypeMin } - case 116: + case 118: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:286 +//line pkg/logql/expr.y:293 { exprVAL.VectorOp = OpTypeStddev } - case 117: + case 119: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:287 +//line pkg/logql/expr.y:294 { exprVAL.VectorOp = OpTypeStdvar } - case 118: + case 120: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:288 +//line pkg/logql/expr.y:295 { exprVAL.VectorOp = OpTypeBottomK } - case 119: + case 121: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:289 +//line pkg/logql/expr.y:296 { exprVAL.VectorOp = OpTypeTopK } - case 120: + case 122: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:293 +//line pkg/logql/expr.y:300 { exprVAL.RangeOp = OpRangeTypeCount } - case 121: + case 123: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:294 +//line pkg/logql/expr.y:301 { exprVAL.RangeOp = OpRangeTypeRate } - case 122: + case 124: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:295 +//line pkg/logql/expr.y:302 { exprVAL.RangeOp = OpRangeTypeBytes } - case 123: + case 125: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:296 +//line pkg/logql/expr.y:303 { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 124: + case 126: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:297 +//line pkg/logql/expr.y:304 { exprVAL.RangeOp = OpRangeTypeAvg } - case 125: + case 127: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:298 +//line pkg/logql/expr.y:305 { exprVAL.RangeOp = OpRangeTypeSum } - case 126: + case 128: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:299 +//line pkg/logql/expr.y:306 { exprVAL.RangeOp = OpRangeTypeMin } - case 127: + case 129: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:300 +//line pkg/logql/expr.y:307 { exprVAL.RangeOp = OpRangeTypeMax } - case 128: + case 130: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:301 +//line pkg/logql/expr.y:308 { exprVAL.RangeOp = OpRangeTypeStdvar } - case 129: + case 131: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:302 +//line pkg/logql/expr.y:309 { exprVAL.RangeOp = OpRangeTypeStddev } - case 130: + case 132: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:303 +//line pkg/logql/expr.y:310 { exprVAL.RangeOp = OpRangeTypeQuantile } - case 131: + case 133: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:308 +//line pkg/logql/expr.y:315 { exprVAL.Labels = []string{exprDollar[1].str} } - case 132: + case 134: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:309 +//line pkg/logql/expr.y:316 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 133: + case 135: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:313 +//line pkg/logql/expr.y:320 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } - case 134: + case 136: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:314 +//line pkg/logql/expr.y:321 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/functions.go b/pkg/logql/functions.go index 8f11f03ba973f..ca2620317980d 100644 --- a/pkg/logql/functions.go +++ b/pkg/logql/functions.go @@ -16,7 +16,7 @@ func (r rangeAggregationExpr) Extractor() (SampleExtractor, error) { return nil, err } if r.left.unwrap != nil { - return newLabelSampleExtractor(r.left.unwrap.identifier, r.grouping), nil + return newLabelSampleExtractor(r.left.unwrap.identifier, r.left.unwrap.operation, r.grouping), nil } switch r.operation { case OpRangeTypeRate, OpRangeTypeCount: diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go index 2fcd7042fe1ca..2bd897635c7f6 100644 --- a/pkg/logql/lex.go +++ b/pkg/logql/lex.go @@ -31,30 +31,6 @@ var tokens = map[string]int{ "[": OPEN_BRACKET, "]": CLOSE_BRACKET, - // range vec ops - OpRangeTypeRate: RATE, - OpRangeTypeCount: COUNT_OVER_TIME, - OpRangeTypeBytesRate: BYTES_RATE, - OpRangeTypeBytes: BYTES_OVER_TIME, - OpRangeTypeAvg: AVG_OVER_TIME, - OpRangeTypeSum: SUM_OVER_TIME, - OpRangeTypeMin: MIN_OVER_TIME, - OpRangeTypeMax: MAX_OVER_TIME, - OpRangeTypeStdvar: STDVAR_OVER_TIME, - OpRangeTypeStddev: STDDEV_OVER_TIME, - OpRangeTypeQuantile: QUANTILE_OVER_TIME, - - // vec ops - OpTypeSum: SUM, - OpTypeAvg: AVG, - OpTypeMax: MAX, - OpTypeMin: MIN, - OpTypeCount: COUNT, - OpTypeStddev: STDDEV, - OpTypeStdvar: STDVAR, - OpTypeBottomK: BOTTOMK, - OpTypeTopK: TOPK, - // binops OpTypeOr: OR, OpTypeAnd: AND, @@ -80,9 +56,37 @@ var tokens = map[string]int{ // fmt OpFmtLabel: LABEL_FMT, OpFmtLine: LINE_FMT, +} + +// functionTokens are tokens that needs to be suffixes with parenthesis +var functionTokens = map[string]int{ + // range vec ops + OpRangeTypeRate: RATE, + OpRangeTypeCount: COUNT_OVER_TIME, + OpRangeTypeBytesRate: BYTES_RATE, + OpRangeTypeBytes: BYTES_OVER_TIME, + OpRangeTypeAvg: AVG_OVER_TIME, + OpRangeTypeSum: SUM_OVER_TIME, + OpRangeTypeMin: MIN_OVER_TIME, + OpRangeTypeMax: MAX_OVER_TIME, + OpRangeTypeStdvar: STDVAR_OVER_TIME, + OpRangeTypeStddev: STDDEV_OVER_TIME, + OpRangeTypeQuantile: QUANTILE_OVER_TIME, + + // vec ops + OpTypeSum: SUM, + OpTypeAvg: AVG, + OpTypeMax: MAX, + OpTypeMin: MIN, + OpTypeCount: COUNT, + OpTypeStddev: STDDEV, + OpTypeStdvar: STDVAR, + OpTypeBottomK: BOTTOMK, + OpTypeTopK: TOPK, // conversion Op - OpConvDuration: DURATION_CONV, + OpConvDuration: DURATION_CONV, + OpConvDurationSeconds: DURATION_SECONDS_CONV, } type lexer struct { @@ -137,6 +141,20 @@ func (l *lexer) Lex(lval *exprSymType) int { return 0 } + if tok, ok := functionTokens[l.TokenText()+string(l.Peek())]; ok { + // create a copy to advance to the entire token for testing suffix + sc := l.Scanner + sc.Next() + if isFunction(sc) { + l.Next() + return tok + } + } + + if tok, ok := functionTokens[l.TokenText()]; ok && isFunction(l.Scanner) { + return tok + } + if tok, ok := tokens[l.TokenText()+string(l.Peek())]; ok { l.Next() return tok @@ -193,3 +211,32 @@ func isDurationRune(r rune) bool { return false } } + +// isFunction check if the next runes are either an open parenthesis +// or by/without tokens. This allows to dissociate functions and identifier correctly. +func isFunction(sc scanner.Scanner) bool { + var sb strings.Builder + sc = trimSpace(sc) + for r := sc.Next(); r != scanner.EOF; r = sc.Next() { + sb.WriteRune(r) + switch sb.String() { + case "(": + return true + case "by", "without": + sc = trimSpace(sc) + return sc.Next() == '(' + } + } + return false +} + +func trimSpace(l scanner.Scanner) scanner.Scanner { + for n := l.Peek(); n != scanner.EOF; n = l.Peek() { + if unicode.IsSpace(n) { + l.Next() + continue + } + return l + } + return l +} diff --git a/pkg/logql/lex_test.go b/pkg/logql/lex_test.go index c347e3b51eb86..205c18f274cd9 100644 --- a/pkg/logql/lex_test.go +++ b/pkg/logql/lex_test.go @@ -23,6 +23,9 @@ func TestLex(t *testing.T) { {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s or foo == 4.00 and bar ="foo"`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING}}, + {`{foo="bar"} |~ "\\w+" | duration > 1h0.0m0s or avg == 4.00 and bar ="foo"`, + []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, + PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING}}, {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s or foo == 4.00 and bar ="foo" | unwrap foo`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING, PIPE, UNWRAP, IDENTIFIER}}, @@ -61,3 +64,45 @@ func TestLex(t *testing.T) { }) } } + +func Test_isFunction(t *testing.T) { + tests := []struct { + next string + want bool + }{ + {" (", true}, + {"(", true}, + {"by (", true}, + {"by(", true}, + {"by (", true}, + {" by (", true}, + {" by(", true}, + {"by (", true}, + {"without (", true}, + {"without(", true}, + {"without (", true}, + {" without (", true}, + {" without(", true}, + {"without (", true}, + {" ( whatever is this", true}, + {" (foo,bar)", true}, + {"\r\n \t\t\r\n \n (foo,bar)", true}, + + {" foo (", false}, + {"123", false}, + {"", false}, + {" ", false}, + {" )(", false}, + {"byfoo", false}, + {"without foo", false}, + } + for _, tt := range tests { + t.Run(tt.next, func(t *testing.T) { + sc := scanner.Scanner{} + sc.Init(strings.NewReader(tt.next)) + if got := isFunction(sc); got != tt.want { + t.Errorf("isFunction() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 69dbcc2df597b..d3a20baad0ee4 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -1179,6 +1179,35 @@ func TestParse(t *testing.T) { newUnwrapExpr("foo", "")), OpRangeTypeStdvar, nil, nil, ), + }, { + in: `stdvar_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration(foo) [5m])`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo", OpConvDuration)), + OpRangeTypeStdvar, nil, nil, + ), }, { in: `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms| unwrap latency [5m])`, @@ -1374,6 +1403,120 @@ func TestParse(t *testing.T) { nil, ), }, + { + in: `sum without (foo) ( + quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration(foo) [5m] + ) by (namespace,instance) + )`, + exp: mustNewVectorAggregationExpr( + newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo", OpConvDuration)), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + ), + OpTypeSum, + &grouping{without: true, groups: []string{"foo"}}, + nil, + ), + }, + { + in: `sum without (foo) ( + quantile_over_time(.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration(foo) [5m] + ) by (namespace,instance) + )`, + exp: mustNewVectorAggregationExpr( + newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo", OpConvDuration)), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString(".99998"), + ), + OpTypeSum, + &grouping{without: true, groups: []string{"foo"}}, + nil, + ), + }, + { + in: `sum without (foo) ( + quantile_over_time(.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap duration_seconds(foo) [5m] + ) by (namespace,instance) + )`, + exp: mustNewVectorAggregationExpr( + newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]labelFmt{ + newRenameLabelFmt("foo", "bar"), + newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo", OpConvDurationSeconds)), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString(".99998"), + ), + OpTypeSum, + &grouping{without: true, groups: []string{"foo"}}, + nil, + ), + }, { in: `topk(10, quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) diff --git a/pkg/logql/series_extractor.go b/pkg/logql/series_extractor.go index d9ccef7ba86df..83f7d9608a927 100644 --- a/pkg/logql/series_extractor.go +++ b/pkg/logql/series_extractor.go @@ -2,6 +2,7 @@ package logql import ( "strconv" + "time" "github.com/prometheus/prometheus/pkg/labels" ) @@ -32,6 +33,8 @@ func (bytesSampleExtractor) Extract(line []byte, lbs labels.Labels) (float64, la type labelSampleExtractor struct { labelName string gr *grouping + + conversion string } func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, labels.Labels) { @@ -40,7 +43,14 @@ func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, la // todo(cyriltovena) handle errors. return 0, lbs } - f, err := strconv.ParseFloat(stringValue, 64) + var f float64 + var err error + switch l.conversion { + case OpConvDuration, OpConvDurationSeconds: + f, err = convertDuration(stringValue) + default: + f, err = convertFloat(stringValue) + } if err != nil { // todo(cyriltovena) handle errors. return 0, lbs @@ -54,9 +64,22 @@ func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, la return f, lbs.WithoutLabels(l.labelName) } -func newLabelSampleExtractor(labelName string, gr *grouping) *labelSampleExtractor { +func newLabelSampleExtractor(labelName, conversion string, gr *grouping) *labelSampleExtractor { return &labelSampleExtractor{ - labelName: labelName, - gr: gr, + labelName: labelName, + conversion: conversion, + gr: gr, + } +} + +func convertFloat(v string) (float64, error) { + return strconv.ParseFloat(v, 64) +} + +func convertDuration(v string) (float64, error) { + d, err := time.ParseDuration(v) + if err != nil { + return 0, err } + return d.Seconds(), nil } diff --git a/pkg/logql/series_extractor_test.go b/pkg/logql/series_extractor_test.go index 9e88ab66a2b2d..4920a438c4f87 100644 --- a/pkg/logql/series_extractor_test.go +++ b/pkg/logql/series_extractor_test.go @@ -18,7 +18,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { }{ { "convert float", - newLabelSampleExtractor("foo", nil), + newLabelSampleExtractor("foo", "", nil), labels.Labels{labels.Label{Name: "foo", Value: "15.0"}}, 15, labels.Labels{}, @@ -26,6 +26,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { { "convert float without", newLabelSampleExtractor("foo", + "", &grouping{without: true, groups: []string{"bar", "buzz"}}, ), labels.Labels{ @@ -42,6 +43,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { { "convert float with", newLabelSampleExtractor("foo", + "", &grouping{without: false, groups: []string{"bar", "buzz"}}, ), labels.Labels{ @@ -56,6 +58,42 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { {Name: "buzz", Value: "blip"}, }, }, + { + "convert duration with", + newLabelSampleExtractor("foo", + OpConvDuration, + &grouping{without: false, groups: []string{"bar", "buzz"}}, + ), + labels.Labels{ + {Name: "foo", Value: "500ms"}, + {Name: "bar", Value: "foo"}, + {Name: "buzz", Value: "blip"}, + {Name: "namespace", Value: "dev"}, + }, + 0.5, + labels.Labels{ + {Name: "bar", Value: "foo"}, + {Name: "buzz", Value: "blip"}, + }, + }, + { + "convert duration_seconds with", + newLabelSampleExtractor("foo", + OpConvDurationSeconds, + &grouping{without: false, groups: []string{"bar", "buzz"}}, + ), + labels.Labels{ + {Name: "foo", Value: "250ms"}, + {Name: "bar", Value: "foo"}, + {Name: "buzz", Value: "blip"}, + {Name: "namespace", Value: "dev"}, + }, + 0.25, + labels.Labels{ + {Name: "bar", Value: "foo"}, + {Name: "buzz", Value: "blip"}, + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { From 6563d6e914d721284a4324d52f50807c18f5d378 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 6 Oct 2020 22:59:31 +0200 Subject: [PATCH 26/45] Fixes the frontend logs to include org_id. The auth middleware was happening after the stats one and so org_id was not set :facepalm:. Signed-off-by: Cyril Tovena --- pkg/loki/modules.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/loki/modules.go b/pkg/loki/modules.go index 9ab99376677bb..ce6c0df458ddb 100644 --- a/pkg/loki/modules.go +++ b/pkg/loki/modules.go @@ -354,8 +354,8 @@ func (t *Loki) initQueryFrontend() (_ services.Service, err error) { frontendHandler := middleware.Merge( serverutil.RecoveryHTTPMiddleware, - queryrange.StatsHTTPMiddleware, t.httpAuthMiddleware, + queryrange.StatsHTTPMiddleware, serverutil.NewPrepopulateMiddleware(), serverutil.ResponseJSONMiddleware(), ).Wrap(t.frontend.Handler()) From 5578dbb107f91a28f3091d48d869f38ef152a46e Mon Sep 17 00:00:00 2001 From: Karsten Jeschkies Date: Fri, 9 Oct 2020 16:44:40 +0200 Subject: [PATCH 27/45] Support byte sizes in label filters. This patch extends the duration label filter with support for byte sizes such as `1kB` and `42MiB`. --- pkg/logql/expr.y | 24 +- pkg/logql/expr.y.go | 866 ++++++++++++++------------- pkg/logql/labelfilter/bytes.go | 55 ++ pkg/logql/labelfilter/filter_test.go | 6 + pkg/logql/lex.go | 60 +- pkg/logql/lex_test.go | 3 + 6 files changed, 606 insertions(+), 408 deletions(-) create mode 100644 pkg/logql/labelfilter/bytes.go diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 5e885e0ebde3e..fcafd3cf4ed21 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -26,6 +26,7 @@ import ( VectorOp string BinOpExpr SampleExpr binOp string + bytes uint64 str string duration time.Duration LiteralExpr *literalExpr @@ -34,9 +35,11 @@ import ( LineFilters *lineFilterExpr PipelineExpr MultiPipelineExpr PipelineStage PipelineExpr + BytesFilter labelfilter.Filterer NumberFilter labelfilter.Filterer DurationFilter labelfilter.Filterer LabelFilter labelfilter.Filterer + UnitFilter labelfilter.Filterer LineFormatExpr *lineFmtExpr LabelFormatExpr *labelFmtExpr LabelFormat labelFmt @@ -67,6 +70,7 @@ import ( %type labelParser %type pipelineExpr %type pipelineStage +%type bytesFilter %type numberFilter %type durationFilter %type labelFilter @@ -76,7 +80,9 @@ import ( %type labelFormat %type labelsFormat %type unwrapExpr +%type unitFilter +%token BYTES %token IDENTIFIER STRING NUMBER %token DURATION RANGE %token MATCHERS LABELS EQ RE NRE OPEN_BRACE CLOSE_BRACE OPEN_BRACKET CLOSE_BRACKET COMMA DOT PIPE_MATCH PIPE_EXACT @@ -224,7 +230,7 @@ labelFormatExpr: LABEL_FMT labelsFormat { $$ = newLabelFmtExpr($2) }; labelFilter: matcher { $$ = labelfilter.NewString($1) } - | durationFilter { $$ = $1 } + | unitFilter { $$ = $1 } | numberFilter { $$ = $1 } | OPEN_PARENTHESIS labelFilter CLOSE_PARENTHESIS { $$ = $2 } | labelFilter labelFilter { $$ = labelfilter.NewAnd($1, $2 ) } @@ -233,8 +239,12 @@ labelFilter: | labelFilter OR labelFilter { $$ = labelfilter.NewOr($1, $3 ) } ; +unitFilter: + durationFilter { $$ = $1 } + | bytesFilter { $$ = $1 } + durationFilter: - IDENTIFIER GT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThan, $1, $3) } + IDENTIFIER GT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThan, $1, $3) } | IDENTIFIER GTE DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, $1, $3) } | IDENTIFIER LT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterLesserThan, $1, $3) } | IDENTIFIER LTE DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, $1, $3) } @@ -243,6 +253,16 @@ durationFilter: | IDENTIFIER CMP_EQ DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterEqual, $1, $3) } ; +bytesFilter: + IDENTIFIER GT BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterGreaterThan, $1, $3) } + | IDENTIFIER GTE BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterGreaterThanOrEqual, $1, $3) } + | IDENTIFIER LT BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterLesserThan, $1, $3) } + | IDENTIFIER LTE BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterLesserThanOrEqual, $1, $3) } + | IDENTIFIER NEQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterNotEqual, $1, $3) } + | IDENTIFIER EQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterEqual, $1, $3) } + | IDENTIFIER CMP_EQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterEqual, $1, $3) } + ; + numberFilter: IDENTIFIER GT NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, $1, mustNewFloat($3))} | IDENTIFIER GTE NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, $1, mustNewFloat($3))} diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index 88dab45cc41ad..2005118255f92 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -1,11 +1,11 @@ -// Code generated by goyacc -p expr -o pkg/logql/expr.y.go pkg/logql/expr.y. DO NOT EDIT. +// Code generated by goyacc -p expr -o expr.y.go expr.y. DO NOT EDIT. -//line pkg/logql/expr.y:2 +//line expr.y:2 package logql import __yyfmt__ "fmt" -//line pkg/logql/expr.y:2 +//line expr.y:2 import ( "github.com/grafana/loki/pkg/logql/labelfilter" @@ -13,7 +13,7 @@ import ( "time" ) -//line pkg/logql/expr.y:11 +//line expr.y:11 type exprSymType struct { yys int Expr Expr @@ -33,6 +33,7 @@ type exprSymType struct { VectorOp string BinOpExpr SampleExpr binOp string + bytes uint64 str string duration time.Duration LiteralExpr *literalExpr @@ -41,9 +42,11 @@ type exprSymType struct { LineFilters *lineFilterExpr PipelineExpr MultiPipelineExpr PipelineStage PipelineExpr + BytesFilter labelfilter.Filterer NumberFilter labelfilter.Filterer DurationFilter labelfilter.Filterer LabelFilter labelfilter.Filterer + UnitFilter labelfilter.Filterer LineFormatExpr *lineFmtExpr LabelFormatExpr *labelFmtExpr LabelFormat labelFmt @@ -51,78 +54,80 @@ type exprSymType struct { UnwrapExpr *unwrapExpr } -const IDENTIFIER = 57346 -const STRING = 57347 -const NUMBER = 57348 -const DURATION = 57349 -const RANGE = 57350 -const MATCHERS = 57351 -const LABELS = 57352 -const EQ = 57353 -const RE = 57354 -const NRE = 57355 -const OPEN_BRACE = 57356 -const CLOSE_BRACE = 57357 -const OPEN_BRACKET = 57358 -const CLOSE_BRACKET = 57359 -const COMMA = 57360 -const DOT = 57361 -const PIPE_MATCH = 57362 -const PIPE_EXACT = 57363 -const OPEN_PARENTHESIS = 57364 -const CLOSE_PARENTHESIS = 57365 -const BY = 57366 -const WITHOUT = 57367 -const COUNT_OVER_TIME = 57368 -const RATE = 57369 -const SUM = 57370 -const AVG = 57371 -const MAX = 57372 -const MIN = 57373 -const COUNT = 57374 -const STDDEV = 57375 -const STDVAR = 57376 -const BOTTOMK = 57377 -const TOPK = 57378 -const BYTES_OVER_TIME = 57379 -const BYTES_RATE = 57380 -const BOOL = 57381 -const JSON = 57382 -const REGEXP = 57383 -const LOGFMT = 57384 -const PIPE = 57385 -const LINE_FMT = 57386 -const LABEL_FMT = 57387 -const UNWRAP = 57388 -const AVG_OVER_TIME = 57389 -const SUM_OVER_TIME = 57390 -const MIN_OVER_TIME = 57391 -const MAX_OVER_TIME = 57392 -const STDVAR_OVER_TIME = 57393 -const STDDEV_OVER_TIME = 57394 -const QUANTILE_OVER_TIME = 57395 -const DURATION_CONV = 57396 -const DURATION_SECONDS_CONV = 57397 -const OR = 57398 -const AND = 57399 -const UNLESS = 57400 -const CMP_EQ = 57401 -const NEQ = 57402 -const LT = 57403 -const LTE = 57404 -const GT = 57405 -const GTE = 57406 -const ADD = 57407 -const SUB = 57408 -const MUL = 57409 -const DIV = 57410 -const MOD = 57411 -const POW = 57412 +const BYTES = 57346 +const IDENTIFIER = 57347 +const STRING = 57348 +const NUMBER = 57349 +const DURATION = 57350 +const RANGE = 57351 +const MATCHERS = 57352 +const LABELS = 57353 +const EQ = 57354 +const RE = 57355 +const NRE = 57356 +const OPEN_BRACE = 57357 +const CLOSE_BRACE = 57358 +const OPEN_BRACKET = 57359 +const CLOSE_BRACKET = 57360 +const COMMA = 57361 +const DOT = 57362 +const PIPE_MATCH = 57363 +const PIPE_EXACT = 57364 +const OPEN_PARENTHESIS = 57365 +const CLOSE_PARENTHESIS = 57366 +const BY = 57367 +const WITHOUT = 57368 +const COUNT_OVER_TIME = 57369 +const RATE = 57370 +const SUM = 57371 +const AVG = 57372 +const MAX = 57373 +const MIN = 57374 +const COUNT = 57375 +const STDDEV = 57376 +const STDVAR = 57377 +const BOTTOMK = 57378 +const TOPK = 57379 +const BYTES_OVER_TIME = 57380 +const BYTES_RATE = 57381 +const BOOL = 57382 +const JSON = 57383 +const REGEXP = 57384 +const LOGFMT = 57385 +const PIPE = 57386 +const LINE_FMT = 57387 +const LABEL_FMT = 57388 +const UNWRAP = 57389 +const AVG_OVER_TIME = 57390 +const SUM_OVER_TIME = 57391 +const MIN_OVER_TIME = 57392 +const MAX_OVER_TIME = 57393 +const STDVAR_OVER_TIME = 57394 +const STDDEV_OVER_TIME = 57395 +const QUANTILE_OVER_TIME = 57396 +const DURATION_CONV = 57397 +const DURATION_SECONDS_CONV = 57398 +const OR = 57399 +const AND = 57400 +const UNLESS = 57401 +const CMP_EQ = 57402 +const NEQ = 57403 +const LT = 57404 +const LTE = 57405 +const GT = 57406 +const GTE = 57407 +const ADD = 57408 +const SUB = 57409 +const MUL = 57410 +const DIV = 57411 +const MOD = 57412 +const POW = 57413 var exprToknames = [...]string{ "$end", "error", "$unk", + "BYTES", "IDENTIFIER", "STRING", "NUMBER", @@ -191,13 +196,14 @@ var exprToknames = [...]string{ "MOD", "POW", } + var exprStatenames = [...]string{} const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:323 +//line expr.y:343 //line yacctab:1 var exprExca = [...]int{ @@ -208,105 +214,105 @@ var exprExca = [...]int{ const exprPrivate = 57344 -const exprLast = 392 +const exprLast = 390 var exprAct = [...]int{ - - 70, 169, 53, 151, 4, 143, 177, 100, 45, 52, - 207, 61, 238, 5, 76, 118, 63, 2, 56, 66, - 245, 14, 40, 41, 42, 43, 44, 45, 225, 11, - 42, 43, 44, 45, 114, 116, 117, 6, 153, 116, - 117, 17, 18, 28, 29, 31, 32, 30, 33, 34, - 35, 36, 19, 20, 233, 91, 69, 106, 71, 72, - 214, 94, 21, 22, 23, 24, 25, 26, 27, 59, - 174, 145, 109, 92, 122, 103, 57, 58, 227, 228, - 15, 16, 120, 115, 71, 72, 159, 154, 157, 158, - 155, 156, 127, 224, 128, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 241, 148, - 144, 204, 215, 165, 106, 165, 60, 217, 215, 11, - 160, 108, 184, 216, 236, 175, 230, 121, 145, 176, - 170, 126, 103, 179, 222, 172, 211, 173, 46, 47, - 50, 51, 48, 49, 40, 41, 42, 43, 44, 45, - 180, 181, 182, 38, 39, 46, 47, 50, 51, 48, - 49, 40, 41, 42, 43, 44, 45, 200, 119, 106, - 202, 125, 206, 91, 209, 212, 11, 94, 203, 124, - 213, 120, 210, 201, 121, 68, 165, 103, 218, 37, - 38, 39, 46, 47, 50, 51, 48, 49, 40, 41, - 42, 43, 44, 45, 111, 223, 91, 166, 167, 113, - 185, 244, 232, 91, 205, 106, 123, 110, 74, 59, - 112, 240, 239, 235, 11, 229, 57, 58, 224, 231, - 204, 237, 6, 103, 242, 73, 17, 18, 28, 29, - 31, 32, 30, 33, 34, 35, 36, 19, 20, 171, - 164, 97, 99, 98, 163, 104, 105, 21, 22, 23, - 24, 25, 26, 27, 59, 168, 60, 220, 221, 205, - 59, 57, 58, 162, 59, 15, 16, 57, 58, 168, - 208, 57, 58, 161, 59, 162, 189, 188, 161, 187, - 186, 57, 58, 75, 171, 106, 199, 198, 3, 59, - 171, 106, 197, 196, 171, 62, 57, 58, 219, 145, - 152, 60, 149, 103, 171, 145, 147, 60, 142, 103, - 183, 60, 195, 194, 193, 192, 191, 190, 107, 55, - 243, 60, 77, 78, 79, 80, 81, 82, 83, 84, - 85, 86, 87, 88, 89, 90, 60, 146, 144, 106, - 65, 234, 67, 146, 144, 178, 67, 152, 150, 96, - 95, 54, 101, 102, 93, 10, 9, 103, 13, 8, - 226, 12, 7, 64, 1, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 97, 99, 98, 0, 104, - 105, 207, + 70, 171, 53, 153, 4, 145, 179, 100, 45, 52, + 216, 61, 247, 5, 56, 120, 63, 2, 76, 66, + 234, 14, 40, 41, 42, 43, 44, 45, 254, 11, + 42, 43, 44, 45, 116, 118, 119, 6, 155, 118, + 119, 17, 18, 28, 29, 31, 32, 30, 33, 34, + 35, 36, 19, 20, 69, 91, 71, 72, 71, 72, + 233, 94, 21, 22, 23, 24, 25, 26, 27, 92, + 236, 237, 242, 223, 124, 250, 176, 59, 213, 111, + 15, 16, 122, 117, 57, 58, 161, 156, 159, 160, + 157, 158, 129, 239, 130, 131, 132, 133, 134, 135, + 136, 137, 138, 139, 140, 141, 142, 143, 167, 150, + 46, 47, 50, 51, 48, 49, 40, 41, 42, 43, + 44, 45, 162, 110, 60, 106, 106, 245, 106, 224, + 231, 178, 172, 224, 226, 181, 214, 174, 225, 175, + 147, 59, 147, 103, 103, 128, 103, 127, 57, 58, + 186, 240, 182, 183, 184, 38, 39, 46, 47, 50, + 51, 48, 49, 40, 41, 42, 43, 44, 45, 209, + 11, 173, 211, 126, 215, 91, 218, 221, 123, 94, + 212, 146, 222, 122, 219, 210, 167, 167, 60, 68, + 227, 37, 38, 39, 46, 47, 50, 51, 48, 49, + 40, 41, 42, 43, 44, 45, 177, 214, 220, 168, + 121, 169, 59, 113, 232, 91, 115, 187, 11, 57, + 58, 241, 91, 170, 253, 125, 123, 112, 59, 249, + 114, 248, 244, 11, 238, 57, 58, 233, 217, 213, + 246, 6, 173, 251, 74, 17, 18, 28, 29, 31, + 32, 30, 33, 34, 35, 36, 19, 20, 173, 60, + 106, 192, 166, 164, 193, 191, 21, 22, 23, 24, + 25, 26, 27, 59, 170, 60, 229, 230, 103, 59, + 57, 58, 59, 73, 15, 16, 57, 58, 3, 57, + 58, 75, 106, 101, 165, 62, 97, 99, 98, 106, + 104, 105, 216, 173, 207, 164, 147, 208, 206, 173, + 103, 185, 55, 147, 163, 228, 151, 103, 154, 189, + 60, 163, 190, 188, 149, 144, 60, 109, 106, 60, + 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, + 87, 88, 89, 90, 148, 146, 103, 252, 243, 180, + 204, 148, 146, 205, 203, 201, 152, 198, 202, 200, + 199, 197, 67, 154, 97, 99, 98, 195, 104, 105, + 196, 194, 65, 96, 95, 67, 54, 107, 102, 108, + 93, 10, 9, 13, 8, 235, 12, 7, 64, 1, } -var exprPact = [...]int{ - 15, -1000, 133, -1000, -1000, 286, 15, -1000, -1000, -1000, - -1000, 348, 163, 34, -1000, 229, 212, -1000, -1000, -1000, +var exprPact = [...]int{ + 14, -1000, 134, -1000, -1000, 268, 14, -1000, -1000, -1000, + -1000, 370, 166, 31, -1000, 276, 237, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -22, -22, -22, + -22, -22, -22, -22, -22, -22, -22, -22, -22, -22, + -22, -22, 268, -1000, 63, 323, 321, -1000, -1000, -1000, + -1000, 99, 55, 134, 211, 200, -1000, 22, 203, 218, + 150, 124, 122, -1000, -1000, 14, -1000, 14, 14, 14, + 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, + 14, -1000, 319, -1000, 294, -1000, -1000, -1000, -1000, 318, + -1000, -1000, -1000, 120, 310, 358, 26, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, 357, -1000, 308, 299, 288, 256, + 185, 192, 265, 155, 52, 187, 14, 344, 344, 97, + 50, 50, -38, -38, -63, -63, -63, -63, -44, -44, + -44, -44, -44, -44, -1000, 294, 120, 120, 120, -1000, + 287, -1000, 131, -1000, 205, 315, 257, 363, 353, 351, + 346, 300, -1000, -1000, -1000, -1000, -1000, -1000, 33, 155, + 259, 230, 198, 255, 214, 184, 33, 14, 49, 114, + -1000, 110, 121, 294, 123, -1000, 313, 271, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -25, -25, -25, - -25, -25, -25, -25, -25, -25, -25, -25, -25, -25, - -25, -25, 286, -1000, 56, 211, 323, -1000, -1000, -1000, - -1000, 98, 49, 133, 202, 194, -1000, 23, 162, 210, - 157, 149, 109, -1000, -1000, 15, -1000, 15, 15, 15, - 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, - 15, -1000, 313, -1000, 291, -1000, -1000, -1000, -1000, 311, - -1000, -1000, -1000, 165, 307, 353, 27, -1000, -1000, -1000, - -1000, -1000, 352, -1000, 278, 268, 249, 245, 184, 190, - 271, 105, 47, 107, 15, 351, 351, 96, 79, 79, - -37, -37, -62, -62, -62, -62, -43, -43, -43, -43, - -43, -43, -1000, 291, 165, 165, 165, -1000, 297, -1000, - 104, -1000, 199, 283, 280, 320, 318, 316, 296, 290, - -1000, -1000, -1000, -1000, -1000, -1000, 60, 105, 251, 222, - 261, 345, 257, 113, 60, 15, 37, 100, -1000, 94, - 110, 291, 53, -1000, 306, 263, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 111, -1000, 251, -1000, -1000, 220, 24, 217, 103, - 206, -1000, -1000, 31, -1000, 347, -1000, -1000, -1000, -1000, - -1000, -1000, 60, -1000, -1000, -1000, 102, -1000, -1000, -31, - 214, 213, 85, 60, -1000, -1000, 326, -1000, -36, -1000, - -1000, 203, -1000, -3, -1000, -1000, + 106, -1000, 259, -1000, -1000, 228, 15, 225, 69, 127, + -1000, -1000, 48, -1000, 343, -1000, -1000, -1000, -1000, -1000, + -1000, 33, -1000, -1000, -1000, 104, -1000, -1000, -32, 222, + 220, 51, 33, -1000, -1000, 342, -1000, -37, -1000, -1000, + 215, -1000, 4, -1000, -1000, } -var exprPgo = [...]int{ - 0, 374, 16, 18, 0, 6, 298, 4, 15, 7, - 373, 372, 371, 370, 13, 369, 368, 366, 365, 293, - 364, 9, 2, 363, 362, 5, 361, 360, 359, 3, - 358, 1, +var exprPgo = [...]int{ + 0, 389, 16, 14, 0, 6, 288, 4, 15, 7, + 388, 387, 386, 385, 13, 384, 383, 382, 381, 291, + 380, 9, 2, 379, 378, 377, 5, 376, 374, 373, + 3, 356, 1, 293, } -var exprR1 = [...]int{ +var exprR1 = [...]int{ 0, 1, 2, 2, 7, 7, 7, 7, 7, 6, 6, 6, 8, 8, 8, 8, 8, 8, 8, 8, - 8, 8, 8, 8, 8, 8, 31, 31, 13, 13, + 8, 8, 8, 8, 8, 8, 32, 32, 13, 13, 11, 11, 11, 11, 15, 15, 15, 15, 15, 3, 3, 3, 3, 14, 14, 14, 10, 10, 9, 9, - 9, 9, 21, 21, 22, 22, 22, 22, 22, 26, - 26, 20, 20, 20, 27, 29, 29, 30, 30, 30, - 28, 25, 25, 25, 25, 25, 25, 25, 25, 24, - 24, 24, 24, 24, 24, 24, 23, 23, 23, 23, - 23, 23, 23, 17, 17, 17, 17, 17, 17, 17, - 17, 17, 17, 17, 17, 17, 17, 17, 19, 19, - 18, 18, 18, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 12, 12, 12, 12, 12, 12, 12, 12, - 12, 12, 12, 5, 5, 4, 4, + 9, 9, 21, 21, 22, 22, 22, 22, 22, 27, + 27, 20, 20, 20, 28, 30, 30, 31, 31, 31, + 29, 26, 26, 26, 26, 26, 26, 26, 26, 33, + 33, 25, 25, 25, 25, 25, 25, 25, 23, 23, + 23, 23, 23, 23, 23, 24, 24, 24, 24, 24, + 24, 24, 17, 17, 17, 17, 17, 17, 17, 17, + 17, 17, 17, 17, 17, 17, 17, 19, 19, 18, + 18, 18, 16, 16, 16, 16, 16, 16, 16, 16, + 16, 12, 12, 12, 12, 12, 12, 12, 12, 12, + 12, 12, 5, 5, 4, 4, } -var exprR2 = [...]int{ +var exprR2 = [...]int{ 0, 1, 1, 1, 1, 1, 1, 1, 3, 1, 2, 3, 2, 4, 3, 5, 3, 5, 3, 5, 4, 6, 3, 4, 3, 2, 3, 6, 1, 1, @@ -314,84 +320,88 @@ var exprR2 = [...]int{ 1, 1, 1, 3, 3, 3, 1, 3, 3, 3, 3, 3, 1, 2, 1, 2, 2, 2, 2, 2, 3, 1, 1, 2, 2, 3, 3, 1, 3, 3, - 2, 1, 1, 1, 3, 2, 3, 3, 3, 3, + 2, 1, 1, 1, 3, 2, 3, 3, 3, 1, + 1, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 0, 1, - 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, + 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 0, 1, 1, + 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 3, 4, 4, + 1, 1, 1, 3, 4, 4, } -var exprChk = [...]int{ - -1000, -1, -2, -6, -7, -14, 22, -11, -15, -17, - -18, 14, -12, -16, 6, 65, 66, 26, 27, 37, - 38, 47, 48, 49, 50, 51, 52, 53, 28, 29, - 32, 30, 31, 33, 34, 35, 36, 56, 57, 58, - 65, 66, 67, 68, 69, 70, 59, 60, 63, 64, - 61, 62, -21, -22, -26, 43, -3, 20, 21, 13, - 60, -7, -6, -2, -10, 2, -9, 4, 22, 22, - -4, 24, 25, 6, 6, -19, 39, -19, -19, -19, +var exprChk = [...]int{ + -1000, -1, -2, -6, -7, -14, 23, -11, -15, -17, + -18, 15, -12, -16, 7, 66, 67, 27, 28, 38, + 39, 48, 49, 50, 51, 52, 53, 54, 29, 30, + 33, 31, 32, 34, 35, 36, 37, 57, 58, 59, + 66, 67, 68, 69, 70, 71, 60, 61, 64, 65, + 62, 63, -21, -22, -27, 44, -3, 21, 22, 14, + 61, -7, -6, -2, -10, 2, -9, 5, 23, 23, + -4, 25, 26, 7, 7, -19, 40, -19, -19, -19, -19, -19, -19, -19, -19, -19, -19, -19, -19, -19, - -19, -22, -3, -20, -25, -27, -28, 40, 42, 41, - -9, -24, -23, 22, 44, 45, 4, 5, 23, 23, - 15, 2, 18, 15, 11, 60, 12, 13, -8, 6, - -14, 22, -7, 6, 22, 22, 22, -2, -2, -2, + -19, -22, -3, -20, -26, -28, -29, 41, 43, 42, + -9, -33, -24, 23, 45, 46, 5, -25, -23, 6, + 24, 24, 16, 2, 19, 16, 12, 61, 13, 14, + -8, 7, -14, 23, -7, 7, 23, 23, 23, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, 5, -25, 57, 18, 56, 5, -25, 5, - -30, -29, 4, 11, 60, 63, 64, 61, 62, 59, - -9, 5, 5, 5, 5, 2, 23, 18, 8, -31, - -21, 43, -14, -8, 23, 18, -7, -5, 4, -5, - -25, -25, -25, 23, 18, 11, 7, 6, 7, 6, - 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, - -4, -8, -31, -21, 8, 8, -31, 46, 23, -31, - -21, 23, -4, -7, 23, 18, 23, 23, -29, 2, - 4, 5, 23, -31, 8, 4, -13, 54, 55, 8, - 23, 23, -31, 23, 4, -4, 22, -31, 43, 8, - 8, 23, -4, 4, 8, 23, + -2, -2, -2, -2, 6, -26, 58, 19, 57, 6, + -26, 6, -31, -30, 5, 12, 61, 64, 65, 62, + 63, 60, -9, 6, 6, 6, 6, 2, 24, 19, + 9, -32, -21, 44, -14, -8, 24, 19, -7, -5, + 5, -5, -26, -26, -26, 24, 19, 12, 8, 4, + 7, 8, 4, 7, 8, 4, 7, 8, 4, 7, + 8, 4, 7, 8, 4, 7, 8, 4, 7, -4, + -8, -32, -21, 9, 9, -32, 47, 24, -32, -21, + 24, -4, -7, 24, 19, 24, 24, -30, 2, 5, + 6, 24, -32, 9, 5, -13, 55, 56, 9, 24, + 24, -32, 24, 5, -4, 23, -32, 44, 9, 9, + 24, -4, 5, 9, 24, } -var exprDef = [...]int{ +var exprDef = [...]int{ 0, -2, 1, 2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 110, 0, 0, 122, 123, 124, - 125, 126, 127, 128, 129, 130, 131, 132, 113, 114, - 115, 116, 117, 118, 119, 120, 121, 108, 108, 108, - 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, - 108, 108, 10, 52, 54, 0, 0, 39, 40, 41, + 7, 0, 0, 0, 119, 0, 0, 131, 132, 133, + 134, 135, 136, 137, 138, 139, 140, 141, 122, 123, + 124, 125, 126, 127, 128, 129, 130, 117, 117, 117, + 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, + 117, 117, 10, 52, 54, 0, 0, 39, 40, 41, 42, 3, 2, 0, 0, 0, 46, 0, 0, 0, - 0, 0, 0, 111, 112, 0, 109, 0, 0, 0, + 0, 0, 0, 120, 121, 0, 118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 53, 0, 55, 56, 57, 58, 61, 62, 0, - 71, 72, 73, 0, 0, 0, 0, 59, 8, 11, - 43, 44, 0, 45, 0, 0, 0, 0, 0, 0, - 0, 0, 3, 110, 0, 0, 0, 93, 94, 95, - 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 106, 107, 60, 75, 0, 0, 0, 63, 0, 64, - 70, 67, 0, 0, 0, 0, 0, 0, 0, 0, - 47, 48, 49, 50, 51, 25, 30, 0, 12, 0, - 0, 0, 0, 0, 34, 0, 3, 0, 133, 0, - 76, 77, 78, 74, 0, 0, 84, 91, 83, 90, - 79, 86, 80, 87, 81, 88, 82, 89, 85, 92, - 32, 0, 14, 22, 16, 18, 0, 0, 0, 0, - 0, 24, 36, 3, 35, 0, 135, 136, 68, 69, - 65, 66, 31, 23, 20, 26, 0, 28, 29, 13, - 0, 0, 0, 37, 134, 33, 0, 15, 0, 17, - 19, 0, 38, 0, 21, 27, + 71, 72, 73, 0, 0, 0, 0, 79, 80, 59, + 8, 11, 43, 44, 0, 45, 0, 0, 0, 0, + 0, 0, 0, 0, 3, 119, 0, 0, 0, 102, + 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, + 113, 114, 115, 116, 60, 75, 0, 0, 0, 63, + 0, 64, 70, 67, 0, 0, 0, 0, 0, 0, + 0, 0, 47, 48, 49, 50, 51, 25, 30, 0, + 12, 0, 0, 0, 0, 0, 34, 0, 3, 0, + 142, 0, 76, 77, 78, 74, 0, 0, 86, 93, + 100, 85, 92, 99, 81, 88, 95, 82, 89, 96, + 83, 90, 97, 84, 91, 98, 87, 94, 101, 32, + 0, 14, 22, 16, 18, 0, 0, 0, 0, 0, + 24, 36, 3, 35, 0, 144, 145, 68, 69, 65, + 66, 31, 23, 20, 26, 0, 28, 29, 13, 0, + 0, 0, 37, 143, 33, 0, 15, 0, 17, 19, + 0, 38, 0, 21, 27, } -var exprTok1 = [...]int{ +var exprTok1 = [...]int{ 1, } -var exprTok2 = [...]int{ +var exprTok2 = [...]int{ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 70, + 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, } + var exprTok3 = [...]int{ 0, } @@ -735,804 +745,858 @@ exprdefault: case 1: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:97 +//line expr.y:103 { exprlex.(*lexer).expr = exprDollar[1].Expr } case 2: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:100 +//line expr.y:106 { exprVAL.Expr = exprDollar[1].LogExpr } case 3: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:101 +//line expr.y:107 { exprVAL.Expr = exprDollar[1].MetricExpr } case 4: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:105 +//line expr.y:111 { exprVAL.MetricExpr = exprDollar[1].RangeAggregationExpr } case 5: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:106 +//line expr.y:112 { exprVAL.MetricExpr = exprDollar[1].VectorAggregationExpr } case 6: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:107 +//line expr.y:113 { exprVAL.MetricExpr = exprDollar[1].BinOpExpr } case 7: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:108 +//line expr.y:114 { exprVAL.MetricExpr = exprDollar[1].LiteralExpr } case 8: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:109 +//line expr.y:115 { exprVAL.MetricExpr = exprDollar[2].MetricExpr } case 9: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:113 +//line expr.y:119 { exprVAL.LogExpr = newMatcherExpr(exprDollar[1].Selector) } case 10: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:114 +//line expr.y:120 { exprVAL.LogExpr = newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr) } case 11: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:115 +//line expr.y:121 { exprVAL.LogExpr = exprDollar[2].LogExpr } case 12: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:119 +//line expr.y:125 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, nil) } case 13: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:120 +//line expr.y:126 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, nil) } case 14: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:121 +//line expr.y:127 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, exprDollar[3].UnwrapExpr) } case 15: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:122 +//line expr.y:128 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, exprDollar[5].UnwrapExpr) } case 16: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:123 +//line expr.y:129 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].duration, exprDollar[2].UnwrapExpr) } case 17: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:124 +//line expr.y:130 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[5].duration, exprDollar[3].UnwrapExpr) } case 18: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:125 +//line expr.y:131 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[3].duration, nil) } case 19: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:126 +//line expr.y:132 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[5].duration, nil) } case 20: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:127 +//line expr.y:133 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[4].duration, exprDollar[3].UnwrapExpr) } case 21: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:128 +//line expr.y:134 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[6].duration, exprDollar[4].UnwrapExpr) } case 22: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:129 +//line expr.y:135 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, nil) } case 23: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:130 +//line expr.y:136 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, exprDollar[4].UnwrapExpr) } case 24: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:131 +//line expr.y:137 { exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr } case 26: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:136 +//line expr.y:142 { exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[3].str, "") } case 27: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:137 +//line expr.y:143 { exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[5].str, exprDollar[3].ConvOp) } case 28: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:141 +//line expr.y:147 { exprVAL.ConvOp = OpConvDuration } case 29: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:142 +//line expr.y:148 { exprVAL.ConvOp = OpConvDurationSeconds } case 30: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:146 +//line expr.y:152 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, nil, nil) } case 31: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:147 +//line expr.y:153 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, nil, &exprDollar[3].str) } case 32: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:148 +//line expr.y:154 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[5].Grouping, nil) } case 33: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:149 +//line expr.y:155 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 34: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:154 +//line expr.y:160 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } case 35: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:155 +//line expr.y:161 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } case 36: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:156 +//line expr.y:162 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } case 37: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:158 +//line expr.y:164 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } case 38: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:159 +//line expr.y:165 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 39: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:163 +//line expr.y:169 { exprVAL.Filter = labels.MatchRegexp } case 40: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:164 +//line expr.y:170 { exprVAL.Filter = labels.MatchEqual } case 41: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:165 +//line expr.y:171 { exprVAL.Filter = labels.MatchNotRegexp } case 42: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:166 +//line expr.y:172 { exprVAL.Filter = labels.MatchNotEqual } case 43: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:170 +//line expr.y:176 { exprVAL.Selector = exprDollar[2].Matchers } case 44: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:171 +//line expr.y:177 { exprVAL.Selector = exprDollar[2].Matchers } case 45: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:172 +//line expr.y:178 { } case 46: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:176 +//line expr.y:182 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } case 47: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:177 +//line expr.y:183 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } case 48: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:181 +//line expr.y:187 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } case 49: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:182 +//line expr.y:188 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } case 50: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:183 +//line expr.y:189 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } case 51: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:184 +//line expr.y:190 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } case 52: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:188 +//line expr.y:194 { exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} } case 53: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:189 +//line expr.y:195 { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } case 54: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:193 +//line expr.y:199 { exprVAL.PipelineStage = exprDollar[1].LineFilters } case 55: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:194 +//line expr.y:200 { exprVAL.PipelineStage = exprDollar[2].LabelParser } case 56: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:195 +//line expr.y:201 { exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} } case 57: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:196 +//line expr.y:202 { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } case 58: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:197 +//line expr.y:203 { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } case 59: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:201 +//line expr.y:207 { exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) } case 60: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:202 +//line expr.y:208 { exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } case 61: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:205 +//line expr.y:211 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } case 62: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:206 +//line expr.y:212 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } case 63: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:207 +//line expr.y:213 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } case 64: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:210 +//line expr.y:216 { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } case 65: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:213 +//line expr.y:219 { exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } case 66: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:214 +//line expr.y:220 { exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } case 67: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:218 +//line expr.y:224 { exprVAL.LabelsFormat = []labelFmt{exprDollar[1].LabelFormat} } case 68: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:219 +//line expr.y:225 { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } case 70: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:223 +//line expr.y:229 { exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } case 71: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:226 +//line expr.y:232 { exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) } case 72: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:227 +//line expr.y:233 { - exprVAL.LabelFilter = exprDollar[1].DurationFilter + exprVAL.LabelFilter = exprDollar[1].UnitFilter } case 73: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:228 +//line expr.y:234 { exprVAL.LabelFilter = exprDollar[1].NumberFilter } case 74: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:229 +//line expr.y:235 { exprVAL.LabelFilter = exprDollar[2].LabelFilter } case 75: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:230 +//line expr.y:236 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } case 76: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:231 +//line expr.y:237 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 77: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:232 +//line expr.y:238 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 78: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:233 +//line expr.y:239 { exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 79: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line expr.y:243 + { + exprVAL.UnitFilter = exprDollar[1].DurationFilter + } + case 80: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line expr.y:244 + { + exprVAL.UnitFilter = exprDollar[1].BytesFilter + } + case 81: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:237 +//line expr.y:247 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } - case 80: + case 82: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:238 +//line expr.y:248 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 81: + case 83: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:239 +//line expr.y:249 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } - case 82: + case 84: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:240 +//line expr.y:250 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } - case 83: + case 85: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:241 +//line expr.y:251 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } - case 84: + case 86: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:242 +//line expr.y:252 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 85: + case 87: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:243 +//line expr.y:253 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } - case 86: + case 88: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line expr.y:257 + { + exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].bytes) + } + case 89: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line expr.y:258 + { + exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) + } + case 90: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line expr.y:259 + { + exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].bytes) + } + case 91: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line expr.y:260 + { + exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) + } + case 92: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line expr.y:261 + { + exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].bytes) + } + case 93: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line expr.y:262 + { + exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].bytes) + } + case 94: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:247 +//line expr.y:263 + { + exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].bytes) + } + case 95: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line expr.y:267 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 87: + case 96: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:248 +//line expr.y:268 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 88: + case 97: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:249 +//line expr.y:269 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 89: + case 98: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:250 +//line expr.y:270 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 90: + case 99: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:251 +//line expr.y:271 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 91: + case 100: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:252 +//line expr.y:272 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 92: + case 101: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:253 +//line expr.y:273 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } - case 93: + case 102: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:259 +//line expr.y:279 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 94: + case 103: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:260 +//line expr.y:280 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 95: + case 104: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:261 +//line expr.y:281 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 96: + case 105: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:262 +//line expr.y:282 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 97: + case 106: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:263 +//line expr.y:283 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 98: + case 107: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:264 +//line expr.y:284 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 99: + case 108: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:265 +//line expr.y:285 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 100: + case 109: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:266 +//line expr.y:286 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 101: + case 110: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:267 +//line expr.y:287 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 102: + case 111: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:268 +//line expr.y:288 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 103: + case 112: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:269 +//line expr.y:289 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 104: + case 113: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:270 +//line expr.y:290 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 105: + case 114: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:271 +//line expr.y:291 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 106: + case 115: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:272 +//line expr.y:292 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 107: + case 116: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:273 +//line expr.y:293 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } - case 108: + case 117: exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:277 +//line expr.y:297 { exprVAL.BinOpModifier = BinOpOptions{} } - case 109: + case 118: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:278 +//line expr.y:298 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } - case 110: + case 119: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:282 +//line expr.y:302 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } - case 111: + case 120: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:283 +//line expr.y:303 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } - case 112: + case 121: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:284 +//line expr.y:304 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } - case 113: + case 122: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:288 +//line expr.y:308 { exprVAL.VectorOp = OpTypeSum } - case 114: + case 123: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:289 +//line expr.y:309 { exprVAL.VectorOp = OpTypeAvg } - case 115: + case 124: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:290 +//line expr.y:310 { exprVAL.VectorOp = OpTypeCount } - case 116: + case 125: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:291 +//line expr.y:311 { exprVAL.VectorOp = OpTypeMax } - case 117: + case 126: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:292 +//line expr.y:312 { exprVAL.VectorOp = OpTypeMin } - case 118: + case 127: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:293 +//line expr.y:313 { exprVAL.VectorOp = OpTypeStddev } - case 119: + case 128: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:294 +//line expr.y:314 { exprVAL.VectorOp = OpTypeStdvar } - case 120: + case 129: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:295 +//line expr.y:315 { exprVAL.VectorOp = OpTypeBottomK } - case 121: + case 130: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:296 +//line expr.y:316 { exprVAL.VectorOp = OpTypeTopK } - case 122: + case 131: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:300 +//line expr.y:320 { exprVAL.RangeOp = OpRangeTypeCount } - case 123: + case 132: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:301 +//line expr.y:321 { exprVAL.RangeOp = OpRangeTypeRate } - case 124: + case 133: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:302 +//line expr.y:322 { exprVAL.RangeOp = OpRangeTypeBytes } - case 125: + case 134: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:303 +//line expr.y:323 { exprVAL.RangeOp = OpRangeTypeBytesRate } - case 126: + case 135: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:304 +//line expr.y:324 { exprVAL.RangeOp = OpRangeTypeAvg } - case 127: + case 136: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:305 +//line expr.y:325 { exprVAL.RangeOp = OpRangeTypeSum } - case 128: + case 137: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:306 +//line expr.y:326 { exprVAL.RangeOp = OpRangeTypeMin } - case 129: + case 138: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:307 +//line expr.y:327 { exprVAL.RangeOp = OpRangeTypeMax } - case 130: + case 139: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:308 +//line expr.y:328 { exprVAL.RangeOp = OpRangeTypeStdvar } - case 131: + case 140: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:309 +//line expr.y:329 { exprVAL.RangeOp = OpRangeTypeStddev } - case 132: + case 141: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:310 +//line expr.y:330 { exprVAL.RangeOp = OpRangeTypeQuantile } - case 133: + case 142: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:315 +//line expr.y:335 { exprVAL.Labels = []string{exprDollar[1].str} } - case 134: + case 143: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:316 +//line expr.y:336 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } - case 135: + case 144: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:320 +//line expr.y:340 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } - case 136: + case 145: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:321 +//line expr.y:341 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/labelfilter/bytes.go b/pkg/logql/labelfilter/bytes.go new file mode 100644 index 0000000000000..a07f6dce9a6e3 --- /dev/null +++ b/pkg/logql/labelfilter/bytes.go @@ -0,0 +1,55 @@ +package labelfilter + +import ( + "fmt" + + "github.com/dustin/go-humanize" + "github.com/prometheus/prometheus/pkg/labels" +) + +type Bytes struct { + Name string + Value uint64 + Type FilterType +} + +func NewBytes(t FilterType, name string, b uint64) *Bytes{ + return &Bytes{ + Name: name, + Type: t, + Value: b, + } +} + +func (d *Bytes) Filter(lbs labels.Labels) (bool, error) { + for _, l := range lbs { + if l.Name == d.Name { + value, err := humanize.ParseBytes(l.Value) + if err != nil { + return false, errConversion + } + switch d.Type { + case FilterEqual: + return value == d.Value, nil + case FilterNotEqual: + return value != d.Value, nil + case FilterGreaterThan: + return value > d.Value, nil + case FilterGreaterThanOrEqual: + return value >= d.Value, nil + case FilterLesserThan: + return value < d.Value, nil + case FilterLesserThanOrEqual: + return value <= d.Value, nil + default: + return false, errUnsupportedType + } + } + } + // we have not found this label. + return false, nil +} + +func (d *Bytes) String() string { + return fmt.Sprintf("%s%s%d", d.Name, d.Type, d.Value) +} \ No newline at end of file diff --git a/pkg/logql/labelfilter/filter_test.go b/pkg/logql/labelfilter/filter_test.go index 2da0bf5cf77d6..9399fe1e48c2f 100644 --- a/pkg/logql/labelfilter/filter_test.go +++ b/pkg/logql/labelfilter/filter_test.go @@ -22,6 +22,12 @@ func TestBinary_Filter(t *testing.T) { true, false, }, + { + NewAnd(NewNumeric(FilterEqual, "foo", 5), NewBytes(FilterEqual, "bar", 42)), + labels.Labels{labels.Label{Name: "foo", Value: "5"}, labels.Label{Name: "bar", Value: "42B"}}, + true, + false, + }, { NewAnd( NewNumeric(FilterEqual, "foo", 5), diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go index 2bd897635c7f6..336b56674afc5 100644 --- a/pkg/logql/lex.go +++ b/pkg/logql/lex.go @@ -7,6 +7,7 @@ import ( "time" "unicode" + "github.com/dustin/go-humanize" "github.com/prometheus/common/model" ) @@ -104,13 +105,21 @@ func (l *lexer) Lex(lval *exprSymType) int { case scanner.Int, scanner.Float: numberText := l.TokenText() + duration, ok := tryScanDuration(numberText, &l.Scanner) - if !ok { - lval.str = numberText - return NUMBER + if ok { + lval.duration = duration + return DURATION + } + + bytes, ok := tryScanBytes(numberText, &l.Scanner) + if ok { + lval.bytes = bytes + return BYTES } - lval.duration = duration - return DURATION + + lval.str = numberText + return NUMBER case scanner.String, scanner.RawString: var err error @@ -212,6 +221,47 @@ func isDurationRune(r rune) bool { } } +func tryScanBytes(number string, l *scanner.Scanner) (uint64, bool) { + var sb strings.Builder + sb.WriteString(number) + //copy the scanner to avoid advancing it in case it's not a duration. + s := *l + consumed := 0 + for r := s.Peek(); r != scanner.EOF && !unicode.IsSpace(r); r = s.Peek() { + if !unicode.IsNumber(r) && !isBytesSizeRune(r) && r != '.' { + break + } + _, _ = sb.WriteRune(r) + _ = s.Next() + consumed++ + } + + if consumed == 0 { + return 0, false + } + // we've found more characters before a whitespace or the end + b, err := humanize.ParseBytes(sb.String()) + if err != nil { + return 0, false + } + // we need to consume the scanner, now that we know this is a duration. + for i := 0; i < consumed; i++ { + _ = l.Next() + } + return b, true +} + +func isBytesSizeRune(r rune) bool { + // B, kB, MB, GB, TB, PB, EB, ZB, YB + // KB, KiB, MiB, GiB, TiB, PiB, EiB, ZiB, YiB + switch r { + case 'B', 'i', 'k', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y': + return true + default: + return false + } +} + // isFunction check if the next runes are either an open parenthesis // or by/without tokens. This allows to dissociate functions and identifier correctly. func isFunction(sc scanner.Scanner) bool { diff --git a/pkg/logql/lex_test.go b/pkg/logql/lex_test.go index 205c18f274cd9..1d49fd8dfe8ec 100644 --- a/pkg/logql/lex_test.go +++ b/pkg/logql/lex_test.go @@ -29,6 +29,9 @@ func TestLex(t *testing.T) { {`{foo="bar"} |~ "\\w+" | latency > 1h0.0m0s or foo == 4.00 and bar ="foo" | unwrap foo`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING, PIPE, UNWRAP, IDENTIFIER}}, + {`{foo="bar"} |~ "\\w+" | size > 250kB`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, BYTES}}, + {`{foo="bar"} |~ "\\w+" | size > 200MiB or foo == 4.00`, + []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, BYTES, OR, IDENTIFIER, CMP_EQ, NUMBER}}, {`{ foo = "bar" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, {`{ foo != "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, {`{ foo =~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, RE, STRING, CLOSE_BRACE}}, From 13132ad8a5eb70593437df0db52f29bbcb33e6c6 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Mon, 12 Oct 2020 10:07:24 +0200 Subject: [PATCH 28/45] Wip on error handling. Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 17 +- pkg/logql/ast_test.go | 8 + pkg/logql/error.go | 9 + pkg/logql/expr.y | 1 + pkg/logql/expr.y.go | 569 +++++++++++++++-------------- pkg/logql/functions.go | 2 +- pkg/logql/labels_parser.go | 9 +- pkg/logql/labels_parser_test.go | 2 +- pkg/logql/series_extractor.go | 21 +- pkg/logql/series_extractor_test.go | 6 +- 10 files changed, 342 insertions(+), 302 deletions(-) create mode 100644 pkg/logql/error.go diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 2e8ffa3a1f497..82eef2dac26cf 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -473,13 +473,26 @@ func mustNewFloat(s string) float64 { type unwrapExpr struct { identifier string operation string + + postFilters []labelfilter.Filterer } func (u unwrapExpr) String() string { + var sb strings.Builder if u.operation != "" { - return fmt.Sprintf("%s %s %s(%s)", OpPipe, OpUnwrap, u.operation, u.identifier) + sb.WriteString(fmt.Sprintf(" %s %s %s(%s)", OpPipe, OpUnwrap, u.operation, u.identifier)) + } else { + sb.WriteString(fmt.Sprintf(" %s %s %s", OpPipe, OpUnwrap, u.identifier)) + } + for _, f := range u.postFilters { + sb.WriteString(fmt.Sprintf(" %s %s", OpPipe, f)) } - return fmt.Sprintf("%s %s %s", OpPipe, OpUnwrap, u.identifier) + return sb.String() +} + +func (u *unwrapExpr) addPostFilter(f labelfilter.Filterer) *unwrapExpr { + u.postFilters = append(u.postFilters, f) + return u } func newUnwrapExpr(id string, operation string) *unwrapExpr { diff --git a/pkg/logql/ast_test.go b/pkg/logql/ast_test.go index e3250b0d4fdb6..2340ecaab002b 100644 --- a/pkg/logql/ast_test.go +++ b/pkg/logql/ast_test.go @@ -90,6 +90,14 @@ func Test_SampleExpr_String(t *testing.T) { / count_over_time({namespace="tns"} | logfmt | label_format foo=bar[5m]) )`, + `sum_over_time({namespace="tns"} |= "level=error" | json |foo>=5,bar<25ms | unwrap latency | __error__!~".*" | foo >5[5m])`, + `sum by (job) ( + sum_over_time( + {namespace="tns"} |= "level=error" | json | avg=5 and bar<25ms | unwrap duration(latency) | __error__!~".*" [5m] + ) + / + count_over_time({namespace="tns"} | logfmt | label_format foo=bar[5m]) + )`, } { t.Run(tc, func(t *testing.T) { expr, err := ParseExpr(tc) diff --git a/pkg/logql/error.go b/pkg/logql/error.go new file mode 100644 index 0000000000000..7f0ce0e34bac6 --- /dev/null +++ b/pkg/logql/error.go @@ -0,0 +1,9 @@ +package logql + +var ( + errJSON = "JSONParser" + errLogfmt = "LogfmtParser" + errSampleExtraction = "SampleExtraction" + + errorLabel = "__error__" +) diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 5e885e0ebde3e..f2718b52bc243 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -135,6 +135,7 @@ logRangeExpr: unwrapExpr: PIPE UNWRAP IDENTIFIER { $$ = newUnwrapExpr($3, "")} | PIPE UNWRAP convOp OPEN_PARENTHESIS IDENTIFIER CLOSE_PARENTHESIS { $$ = newUnwrapExpr($5, $3)} + | unwrapExpr PIPE labelFilter { $$ = $1.addPostFilter($3) } ; convOp: diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index 88dab45cc41ad..aa97f77e5468f 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -197,7 +197,7 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:323 +//line pkg/logql/expr.y:324 //line yacctab:1 var exprExca = [...]int{ @@ -208,119 +208,118 @@ var exprExca = [...]int{ const exprPrivate = 57344 -const exprLast = 392 +const exprLast = 387 var exprAct = [...]int{ - 70, 169, 53, 151, 4, 143, 177, 100, 45, 52, - 207, 61, 238, 5, 76, 118, 63, 2, 56, 66, - 245, 14, 40, 41, 42, 43, 44, 45, 225, 11, - 42, 43, 44, 45, 114, 116, 117, 6, 153, 116, - 117, 17, 18, 28, 29, 31, 32, 30, 33, 34, - 35, 36, 19, 20, 233, 91, 69, 106, 71, 72, - 214, 94, 21, 22, 23, 24, 25, 26, 27, 59, - 174, 145, 109, 92, 122, 103, 57, 58, 227, 228, - 15, 16, 120, 115, 71, 72, 159, 154, 157, 158, - 155, 156, 127, 224, 128, 129, 130, 131, 132, 133, - 134, 135, 136, 137, 138, 139, 140, 141, 241, 148, - 144, 204, 215, 165, 106, 165, 60, 217, 215, 11, - 160, 108, 184, 216, 236, 175, 230, 121, 145, 176, - 170, 126, 103, 179, 222, 172, 211, 173, 46, 47, - 50, 51, 48, 49, 40, 41, 42, 43, 44, 45, - 180, 181, 182, 38, 39, 46, 47, 50, 51, 48, - 49, 40, 41, 42, 43, 44, 45, 200, 119, 106, - 202, 125, 206, 91, 209, 212, 11, 94, 203, 124, - 213, 120, 210, 201, 121, 68, 165, 103, 218, 37, - 38, 39, 46, 47, 50, 51, 48, 49, 40, 41, - 42, 43, 44, 45, 111, 223, 91, 166, 167, 113, - 185, 244, 232, 91, 205, 106, 123, 110, 74, 59, - 112, 240, 239, 235, 11, 229, 57, 58, 224, 231, - 204, 237, 6, 103, 242, 73, 17, 18, 28, 29, - 31, 32, 30, 33, 34, 35, 36, 19, 20, 171, - 164, 97, 99, 98, 163, 104, 105, 21, 22, 23, - 24, 25, 26, 27, 59, 168, 60, 220, 221, 205, - 59, 57, 58, 162, 59, 15, 16, 57, 58, 168, - 208, 57, 58, 161, 59, 162, 189, 188, 161, 187, - 186, 57, 58, 75, 171, 106, 199, 198, 3, 59, - 171, 106, 197, 196, 171, 62, 57, 58, 219, 145, - 152, 60, 149, 103, 171, 145, 147, 60, 142, 103, - 183, 60, 195, 194, 193, 192, 191, 190, 107, 55, - 243, 60, 77, 78, 79, 80, 81, 82, 83, 84, - 85, 86, 87, 88, 89, 90, 60, 146, 144, 106, - 65, 234, 67, 146, 144, 178, 67, 152, 150, 96, - 95, 54, 101, 102, 93, 10, 9, 103, 13, 8, - 226, 12, 7, 64, 1, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 97, 99, 98, 0, 104, - 105, 207, + 70, 169, 53, 151, 143, 4, 177, 100, 63, 2, + 52, 45, 61, 208, 5, 56, 118, 205, 226, 66, + 14, 40, 41, 42, 43, 44, 45, 240, 11, 42, + 43, 44, 45, 243, 76, 69, 6, 71, 72, 247, + 17, 18, 28, 29, 31, 32, 30, 33, 34, 35, + 36, 19, 20, 205, 106, 91, 114, 116, 117, 227, + 94, 21, 22, 23, 24, 25, 26, 27, 145, 206, + 92, 235, 103, 183, 59, 122, 153, 116, 117, 15, + 16, 57, 58, 120, 127, 106, 128, 129, 130, 131, + 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, + 204, 165, 226, 103, 171, 115, 146, 144, 148, 229, + 230, 71, 72, 204, 215, 232, 174, 238, 216, 126, + 160, 60, 223, 218, 159, 154, 157, 158, 155, 156, + 176, 170, 109, 179, 168, 205, 172, 205, 173, 59, + 216, 108, 125, 124, 113, 217, 57, 58, 205, 180, + 181, 182, 37, 38, 39, 46, 47, 50, 51, 48, + 49, 40, 41, 42, 43, 44, 45, 200, 68, 171, + 202, 184, 207, 91, 210, 213, 94, 11, 175, 203, + 167, 214, 120, 211, 201, 121, 60, 165, 219, 38, + 39, 46, 47, 50, 51, 48, 49, 40, 41, 42, + 43, 44, 45, 119, 165, 224, 91, 185, 212, 106, + 225, 11, 246, 234, 91, 242, 206, 106, 123, 121, + 164, 59, 241, 145, 237, 166, 11, 103, 57, 58, + 231, 233, 74, 239, 6, 103, 244, 73, 17, 18, + 28, 29, 31, 32, 30, 33, 34, 35, 36, 19, + 20, 171, 106, 97, 99, 98, 163, 104, 105, 21, + 22, 23, 24, 25, 26, 27, 145, 162, 60, 75, + 103, 162, 189, 188, 161, 187, 186, 15, 16, 46, + 47, 50, 51, 48, 49, 40, 41, 42, 43, 44, + 45, 168, 59, 161, 59, 149, 59, 199, 198, 57, + 58, 57, 58, 57, 58, 144, 209, 147, 77, 78, + 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, + 89, 90, 171, 59, 55, 142, 171, 106, 106, 245, + 57, 58, 197, 196, 111, 195, 194, 193, 192, 60, + 107, 60, 145, 60, 3, 103, 103, 110, 191, 190, + 112, 62, 221, 222, 220, 65, 152, 67, 236, 178, + 67, 152, 150, 97, 99, 98, 96, 104, 105, 208, + 60, 95, 54, 101, 102, 93, 10, 9, 13, 8, + 146, 144, 228, 12, 7, 64, 1, } var exprPact = [...]int{ - 15, -1000, 133, -1000, -1000, 286, 15, -1000, -1000, -1000, - -1000, 348, 163, 34, -1000, 229, 212, -1000, -1000, -1000, + 14, -1000, 96, -1000, -1000, 281, 14, -1000, -1000, -1000, + -1000, 353, 146, 13, -1000, 231, 226, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -25, -25, -25, - -25, -25, -25, -25, -25, -25, -25, -25, -25, -25, - -25, -25, 286, -1000, 56, 211, 323, -1000, -1000, -1000, - -1000, 98, 49, 133, 202, 194, -1000, 23, 162, 210, - 157, 149, 109, -1000, -1000, 15, -1000, 15, 15, 15, - 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, - 15, -1000, 313, -1000, 291, -1000, -1000, -1000, -1000, 311, - -1000, -1000, -1000, 165, 307, 353, 27, -1000, -1000, -1000, - -1000, -1000, 352, -1000, 278, 268, 249, 245, 184, 190, - 271, 105, 47, 107, 15, 351, 351, 96, 79, 79, - -37, -37, -62, -62, -62, -62, -43, -43, -43, -43, - -43, -43, -1000, 291, 165, 165, 165, -1000, 297, -1000, - 104, -1000, 199, 283, 280, 320, 318, 316, 296, 290, - -1000, -1000, -1000, -1000, -1000, -1000, 60, 105, 251, 222, - 261, 345, 257, 113, 60, 15, 37, 100, -1000, 94, - 110, 291, 53, -1000, 306, 263, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -5, -5, -5, + -5, -5, -5, -5, -5, -5, -5, -5, -5, -5, + -5, -5, 281, -1000, 310, 213, 335, -1000, -1000, -1000, + -1000, 118, 109, 96, 332, 129, -1000, 45, 197, 212, + 121, 120, 97, -1000, -1000, 14, -1000, 14, 14, 14, + 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, + 14, -1000, 320, -1000, 324, -1000, -1000, -1000, -1000, 302, + -1000, -1000, -1000, 81, 290, 357, 65, -1000, -1000, -1000, + -1000, -1000, 356, -1000, 288, 262, 251, 215, 202, 162, + 126, 163, 93, 160, 14, 355, 355, 132, 220, 220, + -38, -38, -59, -59, -59, -59, -44, -44, -44, -44, + -44, -44, -1000, 324, 81, 81, 81, -1000, 50, -1000, + 153, -1000, 196, 269, 266, 342, 331, 329, 326, 291, + -1000, -1000, -1000, -1000, -1000, -1000, 87, 163, 279, 105, + 61, 323, 283, 185, 87, 14, 91, 122, -1000, 100, + 205, 324, 248, -1000, 352, 348, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, 111, -1000, 251, -1000, -1000, 220, 24, 217, 103, - 206, -1000, -1000, 31, -1000, 347, -1000, -1000, -1000, -1000, - -1000, -1000, 60, -1000, -1000, -1000, 102, -1000, -1000, -31, - 214, 213, 85, 60, -1000, -1000, 326, -1000, -36, -1000, - -1000, 203, -1000, -3, -1000, -1000, + -1000, 99, -26, 279, -1000, 81, -1000, 94, 55, 222, + 92, 208, -1000, -1000, 48, -1000, 354, -1000, -1000, -1000, + -1000, -1000, -1000, 87, -26, 324, -1000, -1000, 95, -1000, + -1000, -16, 214, 207, 10, 87, -1000, -1000, 325, -26, + -33, -1000, -1000, 204, -1000, 16, -1000, -1000, } var exprPgo = [...]int{ - 0, 374, 16, 18, 0, 6, 298, 4, 15, 7, - 373, 372, 371, 370, 13, 369, 368, 366, 365, 293, - 364, 9, 2, 363, 362, 5, 361, 360, 359, 3, - 358, 1, + 0, 386, 8, 15, 0, 6, 344, 5, 16, 7, + 385, 384, 383, 382, 14, 379, 378, 377, 376, 269, + 375, 10, 2, 374, 373, 4, 372, 371, 366, 3, + 362, 1, } var exprR1 = [...]int{ 0, 1, 2, 2, 7, 7, 7, 7, 7, 6, 6, 6, 8, 8, 8, 8, 8, 8, 8, 8, - 8, 8, 8, 8, 8, 8, 31, 31, 13, 13, - 11, 11, 11, 11, 15, 15, 15, 15, 15, 3, - 3, 3, 3, 14, 14, 14, 10, 10, 9, 9, - 9, 9, 21, 21, 22, 22, 22, 22, 22, 26, - 26, 20, 20, 20, 27, 29, 29, 30, 30, 30, - 28, 25, 25, 25, 25, 25, 25, 25, 25, 24, - 24, 24, 24, 24, 24, 24, 23, 23, 23, 23, - 23, 23, 23, 17, 17, 17, 17, 17, 17, 17, - 17, 17, 17, 17, 17, 17, 17, 17, 19, 19, - 18, 18, 18, 16, 16, 16, 16, 16, 16, 16, - 16, 16, 12, 12, 12, 12, 12, 12, 12, 12, - 12, 12, 12, 5, 5, 4, 4, + 8, 8, 8, 8, 8, 8, 31, 31, 31, 13, + 13, 11, 11, 11, 11, 15, 15, 15, 15, 15, + 3, 3, 3, 3, 14, 14, 14, 10, 10, 9, + 9, 9, 9, 21, 21, 22, 22, 22, 22, 22, + 26, 26, 20, 20, 20, 27, 29, 29, 30, 30, + 30, 28, 25, 25, 25, 25, 25, 25, 25, 25, + 24, 24, 24, 24, 24, 24, 24, 23, 23, 23, + 23, 23, 23, 23, 17, 17, 17, 17, 17, 17, + 17, 17, 17, 17, 17, 17, 17, 17, 17, 19, + 19, 18, 18, 18, 16, 16, 16, 16, 16, 16, + 16, 16, 16, 12, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 12, 5, 5, 4, 4, } var exprR2 = [...]int{ 0, 1, 1, 1, 1, 1, 1, 1, 3, 1, 2, 3, 2, 4, 3, 5, 3, 5, 3, 5, - 4, 6, 3, 4, 3, 2, 3, 6, 1, 1, - 4, 6, 5, 7, 4, 5, 5, 6, 7, 1, - 1, 1, 1, 3, 3, 3, 1, 3, 3, 3, - 3, 3, 1, 2, 1, 2, 2, 2, 2, 2, - 3, 1, 1, 2, 2, 3, 3, 1, 3, 3, - 2, 1, 1, 1, 3, 2, 3, 3, 3, 3, + 4, 6, 3, 4, 3, 2, 3, 6, 3, 1, + 1, 4, 6, 5, 7, 4, 5, 5, 6, 7, + 1, 1, 1, 1, 3, 3, 3, 1, 3, 3, + 3, 3, 3, 1, 2, 1, 2, 2, 2, 2, + 2, 3, 1, 1, 2, 2, 3, 3, 1, 3, + 3, 2, 1, 1, 1, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 4, 4, 4, 4, 4, 4, 0, 1, - 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, + 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, + 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 3, 4, 4, + 1, 1, 1, 1, 1, 3, 4, 4, } var exprChk = [...]int{ @@ -344,39 +343,39 @@ var exprChk = [...]int{ -21, 43, -14, -8, 23, 18, -7, -5, 4, -5, -25, -25, -25, 23, 18, 11, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, 7, 6, - -4, -8, -31, -21, 8, 8, -31, 46, 23, -31, - -21, 23, -4, -7, 23, 18, 23, 23, -29, 2, - 4, 5, 23, -31, 8, 4, -13, 54, 55, 8, - 23, 23, -31, 23, 4, -4, 22, -31, 43, 8, - 8, 23, -4, 4, 8, 23, + -4, -8, -31, -21, 8, 43, 8, -31, 46, 23, + -31, -21, 23, -4, -7, 23, 18, 23, 23, -29, + 2, 4, 5, 23, -31, -25, 8, 4, -13, 54, + 55, 8, 23, 23, -31, 23, 4, -4, 22, -31, + 43, 8, 8, 23, -4, 4, 8, 23, } var exprDef = [...]int{ 0, -2, 1, 2, 3, 9, 0, 4, 5, 6, - 7, 0, 0, 0, 110, 0, 0, 122, 123, 124, - 125, 126, 127, 128, 129, 130, 131, 132, 113, 114, - 115, 116, 117, 118, 119, 120, 121, 108, 108, 108, - 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, - 108, 108, 10, 52, 54, 0, 0, 39, 40, 41, - 42, 3, 2, 0, 0, 0, 46, 0, 0, 0, - 0, 0, 0, 111, 112, 0, 109, 0, 0, 0, + 7, 0, 0, 0, 111, 0, 0, 123, 124, 125, + 126, 127, 128, 129, 130, 131, 132, 133, 114, 115, + 116, 117, 118, 119, 120, 121, 122, 109, 109, 109, + 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, + 109, 109, 10, 53, 55, 0, 0, 40, 41, 42, + 43, 3, 2, 0, 0, 0, 47, 0, 0, 0, + 0, 0, 0, 112, 113, 0, 110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 53, 0, 55, 56, 57, 58, 61, 62, 0, - 71, 72, 73, 0, 0, 0, 0, 59, 8, 11, - 43, 44, 0, 45, 0, 0, 0, 0, 0, 0, - 0, 0, 3, 110, 0, 0, 0, 93, 94, 95, - 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, - 106, 107, 60, 75, 0, 0, 0, 63, 0, 64, - 70, 67, 0, 0, 0, 0, 0, 0, 0, 0, - 47, 48, 49, 50, 51, 25, 30, 0, 12, 0, - 0, 0, 0, 0, 34, 0, 3, 0, 133, 0, - 76, 77, 78, 74, 0, 0, 84, 91, 83, 90, - 79, 86, 80, 87, 81, 88, 82, 89, 85, 92, - 32, 0, 14, 22, 16, 18, 0, 0, 0, 0, - 0, 24, 36, 3, 35, 0, 135, 136, 68, 69, - 65, 66, 31, 23, 20, 26, 0, 28, 29, 13, - 0, 0, 0, 37, 134, 33, 0, 15, 0, 17, - 19, 0, 38, 0, 21, 27, + 0, 54, 0, 56, 57, 58, 59, 62, 63, 0, + 72, 73, 74, 0, 0, 0, 0, 60, 8, 11, + 44, 45, 0, 46, 0, 0, 0, 0, 0, 0, + 0, 0, 3, 111, 0, 0, 0, 94, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, + 107, 108, 61, 76, 0, 0, 0, 64, 0, 65, + 71, 68, 0, 0, 0, 0, 0, 0, 0, 0, + 48, 49, 50, 51, 52, 25, 31, 0, 12, 0, + 0, 0, 0, 0, 35, 0, 3, 0, 134, 0, + 77, 78, 79, 75, 0, 0, 85, 92, 84, 91, + 80, 87, 81, 88, 82, 89, 83, 90, 86, 93, + 33, 0, 14, 22, 16, 0, 18, 0, 0, 0, + 0, 0, 24, 37, 3, 36, 0, 136, 137, 69, + 70, 66, 67, 32, 23, 28, 20, 26, 0, 29, + 30, 13, 0, 0, 0, 38, 135, 34, 0, 15, + 0, 17, 19, 0, 39, 0, 21, 27, } var exprTok1 = [...]int{ @@ -890,100 +889,100 @@ exprdefault: exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[5].str, exprDollar[3].ConvOp) } case 28: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:141 + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:138 { - exprVAL.ConvOp = OpConvDuration + exprVAL.UnwrapExpr = exprDollar[1].UnwrapExpr.addPostFilter(exprDollar[3].LabelFilter) } case 29: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:142 { - exprVAL.ConvOp = OpConvDurationSeconds + exprVAL.ConvOp = OpConvDuration } case 30: - exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:146 + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:143 { - exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, nil, nil) + exprVAL.ConvOp = OpConvDurationSeconds } case 31: - exprDollar = exprS[exprpt-6 : exprpt+1] + exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:147 { - exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, nil, &exprDollar[3].str) + exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, nil, nil) } case 32: - exprDollar = exprS[exprpt-5 : exprpt+1] + exprDollar = exprS[exprpt-6 : exprpt+1] //line pkg/logql/expr.y:148 { - exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[5].Grouping, nil) + exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, nil, &exprDollar[3].str) } case 33: - exprDollar = exprS[exprpt-7 : exprpt+1] + exprDollar = exprS[exprpt-5 : exprpt+1] //line pkg/logql/expr.y:149 { - exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[7].Grouping, &exprDollar[3].str) + exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[5].Grouping, nil) } case 34: - exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:154 + exprDollar = exprS[exprpt-7 : exprpt+1] +//line pkg/logql/expr.y:150 { - exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) + exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 35: - exprDollar = exprS[exprpt-5 : exprpt+1] + exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:155 { - exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) + exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } case 36: exprDollar = exprS[exprpt-5 : exprpt+1] //line pkg/logql/expr.y:156 { - exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) + exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } case 37: - exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:158 + exprDollar = exprS[exprpt-5 : exprpt+1] +//line pkg/logql/expr.y:157 { - exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) + exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } case 38: - exprDollar = exprS[exprpt-7 : exprpt+1] + exprDollar = exprS[exprpt-6 : exprpt+1] //line pkg/logql/expr.y:159 { - exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) + exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } case 39: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:163 + exprDollar = exprS[exprpt-7 : exprpt+1] +//line pkg/logql/expr.y:160 { - exprVAL.Filter = labels.MatchRegexp + exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 40: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:164 { - exprVAL.Filter = labels.MatchEqual + exprVAL.Filter = labels.MatchRegexp } case 41: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:165 { - exprVAL.Filter = labels.MatchNotRegexp + exprVAL.Filter = labels.MatchEqual } case 42: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:166 { - exprVAL.Filter = labels.MatchNotEqual + exprVAL.Filter = labels.MatchNotRegexp } case 43: - exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:170 + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:167 { - exprVAL.Selector = exprDollar[2].Matchers + exprVAL.Filter = labels.MatchNotEqual } case 44: exprDollar = exprS[exprpt-3 : exprpt+1] @@ -995,186 +994,186 @@ exprdefault: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:172 { + exprVAL.Selector = exprDollar[2].Matchers } case 46: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:176 + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:173 { - exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } case 47: - exprDollar = exprS[exprpt-3 : exprpt+1] + exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:177 { - exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) + exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } case 48: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:181 +//line pkg/logql/expr.y:178 { - exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) + exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } case 49: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:182 { - exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) + exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } case 50: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:183 { - exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) + exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } case 51: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:184 { - exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) + exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } case 52: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:188 + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:185 { - exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} + exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } case 53: - exprDollar = exprS[exprpt-2 : exprpt+1] + exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:189 { - exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) + exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} } case 54: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:193 + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:190 { - exprVAL.PipelineStage = exprDollar[1].LineFilters + exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } case 55: - exprDollar = exprS[exprpt-2 : exprpt+1] + exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:194 { - exprVAL.PipelineStage = exprDollar[2].LabelParser + exprVAL.PipelineStage = exprDollar[1].LineFilters } case 56: exprDollar = exprS[exprpt-2 : exprpt+1] //line pkg/logql/expr.y:195 { - exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} + exprVAL.PipelineStage = exprDollar[2].LabelParser } case 57: exprDollar = exprS[exprpt-2 : exprpt+1] //line pkg/logql/expr.y:196 { - exprVAL.PipelineStage = exprDollar[2].LineFormatExpr + exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} } case 58: exprDollar = exprS[exprpt-2 : exprpt+1] //line pkg/logql/expr.y:197 { - exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr + exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } case 59: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:201 +//line pkg/logql/expr.y:198 { - exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) + exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } case 60: - exprDollar = exprS[exprpt-3 : exprpt+1] + exprDollar = exprS[exprpt-2 : exprpt+1] //line pkg/logql/expr.y:202 { - exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) + exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) } case 61: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:205 + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:203 { - exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") + exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } case 62: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:206 { - exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") + exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } case 63: - exprDollar = exprS[exprpt-2 : exprpt+1] + exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:207 { - exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) + exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } case 64: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:210 +//line pkg/logql/expr.y:208 { - exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) + exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } case 65: - exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:213 + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:211 { - exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) + exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } case 66: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:214 { - exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) + exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } case 67: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:215 + { + exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) + } + case 68: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:218 +//line pkg/logql/expr.y:219 { exprVAL.LabelsFormat = []labelFmt{exprDollar[1].LabelFormat} } - case 68: + case 69: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:219 +//line pkg/logql/expr.y:220 { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } - case 70: + case 71: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:223 +//line pkg/logql/expr.y:224 { exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } - case 71: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:226 - { - exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) - } case 72: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:227 { - exprVAL.LabelFilter = exprDollar[1].DurationFilter + exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) } case 73: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:228 { - exprVAL.LabelFilter = exprDollar[1].NumberFilter + exprVAL.LabelFilter = exprDollar[1].DurationFilter } case 74: - exprDollar = exprS[exprpt-3 : exprpt+1] + exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:229 { - exprVAL.LabelFilter = exprDollar[2].LabelFilter + exprVAL.LabelFilter = exprDollar[1].NumberFilter } case 75: - exprDollar = exprS[exprpt-2 : exprpt+1] + exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:230 { - exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) + exprVAL.LabelFilter = exprDollar[2].LabelFilter } case 76: - exprDollar = exprS[exprpt-3 : exprpt+1] + exprDollar = exprS[exprpt-2 : exprpt+1] //line pkg/logql/expr.y:231 { - exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) + exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } case 77: exprDollar = exprS[exprpt-3 : exprpt+1] @@ -1186,43 +1185,43 @@ exprdefault: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:233 { - exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) + exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 79: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:237 +//line pkg/logql/expr.y:234 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) + exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 80: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:238 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } case 81: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:239 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 82: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:240 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } case 83: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:241 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 84: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:242 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } case 85: exprDollar = exprS[exprpt-3 : exprpt+1] @@ -1232,39 +1231,39 @@ exprdefault: } case 86: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:247 +//line pkg/logql/expr.y:244 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 87: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:248 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 88: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:249 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 89: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:250 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 90: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:251 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 91: exprDollar = exprS[exprpt-3 : exprpt+1] //line pkg/logql/expr.y:252 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 92: exprDollar = exprS[exprpt-3 : exprpt+1] @@ -1273,266 +1272,272 @@ exprdefault: exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 93: - exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:259 + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:254 { - exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 94: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:260 { - exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 95: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:261 { - exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 96: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:262 { - exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 97: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:263 { - exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 98: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:264 { - exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 99: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:265 { - exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 100: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:266 { - exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 101: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:267 { - exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 102: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:268 { - exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 103: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:269 { - exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 104: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:270 { - exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 105: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:271 { - exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 106: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:272 { - exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 107: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:273 { - exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) + exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 108: - exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:277 + exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:274 { - exprVAL.BinOpModifier = BinOpOptions{} + exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 109: - exprDollar = exprS[exprpt-1 : exprpt+1] + exprDollar = exprS[exprpt-0 : exprpt+1] //line pkg/logql/expr.y:278 { - exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} + exprVAL.BinOpModifier = BinOpOptions{} } case 110: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:282 +//line pkg/logql/expr.y:279 { - exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) + exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } case 111: - exprDollar = exprS[exprpt-2 : exprpt+1] + exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:283 { - exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) + exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } case 112: exprDollar = exprS[exprpt-2 : exprpt+1] //line pkg/logql/expr.y:284 { - exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) + exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } case 113: - exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:288 + exprDollar = exprS[exprpt-2 : exprpt+1] +//line pkg/logql/expr.y:285 { - exprVAL.VectorOp = OpTypeSum + exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } case 114: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:289 { - exprVAL.VectorOp = OpTypeAvg + exprVAL.VectorOp = OpTypeSum } case 115: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:290 { - exprVAL.VectorOp = OpTypeCount + exprVAL.VectorOp = OpTypeAvg } case 116: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:291 { - exprVAL.VectorOp = OpTypeMax + exprVAL.VectorOp = OpTypeCount } case 117: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:292 { - exprVAL.VectorOp = OpTypeMin + exprVAL.VectorOp = OpTypeMax } case 118: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:293 { - exprVAL.VectorOp = OpTypeStddev + exprVAL.VectorOp = OpTypeMin } case 119: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:294 { - exprVAL.VectorOp = OpTypeStdvar + exprVAL.VectorOp = OpTypeStddev } case 120: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:295 { - exprVAL.VectorOp = OpTypeBottomK + exprVAL.VectorOp = OpTypeStdvar } case 121: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:296 { - exprVAL.VectorOp = OpTypeTopK + exprVAL.VectorOp = OpTypeBottomK } case 122: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:300 +//line pkg/logql/expr.y:297 { - exprVAL.RangeOp = OpRangeTypeCount + exprVAL.VectorOp = OpTypeTopK } case 123: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:301 { - exprVAL.RangeOp = OpRangeTypeRate + exprVAL.RangeOp = OpRangeTypeCount } case 124: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:302 { - exprVAL.RangeOp = OpRangeTypeBytes + exprVAL.RangeOp = OpRangeTypeRate } case 125: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:303 { - exprVAL.RangeOp = OpRangeTypeBytesRate + exprVAL.RangeOp = OpRangeTypeBytes } case 126: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:304 { - exprVAL.RangeOp = OpRangeTypeAvg + exprVAL.RangeOp = OpRangeTypeBytesRate } case 127: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:305 { - exprVAL.RangeOp = OpRangeTypeSum + exprVAL.RangeOp = OpRangeTypeAvg } case 128: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:306 { - exprVAL.RangeOp = OpRangeTypeMin + exprVAL.RangeOp = OpRangeTypeSum } case 129: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:307 { - exprVAL.RangeOp = OpRangeTypeMax + exprVAL.RangeOp = OpRangeTypeMin } case 130: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:308 { - exprVAL.RangeOp = OpRangeTypeStdvar + exprVAL.RangeOp = OpRangeTypeMax } case 131: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:309 { - exprVAL.RangeOp = OpRangeTypeStddev + exprVAL.RangeOp = OpRangeTypeStdvar } case 132: exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:310 { - exprVAL.RangeOp = OpRangeTypeQuantile + exprVAL.RangeOp = OpRangeTypeStddev } case 133: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:315 +//line pkg/logql/expr.y:311 { - exprVAL.Labels = []string{exprDollar[1].str} + exprVAL.RangeOp = OpRangeTypeQuantile } case 134: - exprDollar = exprS[exprpt-3 : exprpt+1] + exprDollar = exprS[exprpt-1 : exprpt+1] //line pkg/logql/expr.y:316 { - exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) + exprVAL.Labels = []string{exprDollar[1].str} } case 135: - exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:320 + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:317 { - exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} + exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } case 136: exprDollar = exprS[exprpt-4 : exprpt+1] //line pkg/logql/expr.y:321 + { + exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} + } + case 137: + exprDollar = exprS[exprpt-4 : exprpt+1] +//line pkg/logql/expr.y:322 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/functions.go b/pkg/logql/functions.go index ca2620317980d..b5c4d77ebf48d 100644 --- a/pkg/logql/functions.go +++ b/pkg/logql/functions.go @@ -16,7 +16,7 @@ func (r rangeAggregationExpr) Extractor() (SampleExtractor, error) { return nil, err } if r.left.unwrap != nil { - return newLabelSampleExtractor(r.left.unwrap.identifier, r.left.unwrap.operation, r.grouping), nil + return newLabelSampleExtractor(r.left.unwrap.identifier, r.left.unwrap.operation, r.left.unwrap.postFilters, r.grouping), nil } switch r.operation { case OpRangeTypeRate, OpRangeTypeCount: diff --git a/pkg/logql/labels_parser.go b/pkg/logql/labels_parser.go index cd9a13201292f..b85e69796fe78 100644 --- a/pkg/logql/labels_parser.go +++ b/pkg/logql/labels_parser.go @@ -14,12 +14,7 @@ import ( ) const ( - jsonSpacer = "_" - - errJson = "JSONParserError" - errLogfmt = "LogfmtParserError" - errorLabel = "__error__" - + jsonSpacer = "_" duplicateSuffix = "_extracted" ) @@ -57,7 +52,7 @@ func (j *jsonParser) Parse(line []byte, lbs labels.Labels) labels.Labels { j.builder.Reset(lbs) err := jsoniter.ConfigFastest.Unmarshal(line, &data) if err != nil { - j.builder.Set(errorLabel, errJson) + j.builder.Set(errorLabel, errJSON) return j.builder.Labels() } parseMap("", data, addLabel(j.builder, lbs)) diff --git a/pkg/logql/labels_parser_test.go b/pkg/logql/labels_parser_test.go index 4f952ab885b8d..166798b687774 100644 --- a/pkg/logql/labels_parser_test.go +++ b/pkg/logql/labels_parser_test.go @@ -54,7 +54,7 @@ func Test_jsonParser_Parse(t *testing.T) { []byte(`{n}`), labels.Labels{}, labels.Labels{ - labels.Label{Name: errorLabel, Value: errJson}, + labels.Label{Name: errorLabel, Value: errJSON}, }, }, { diff --git a/pkg/logql/series_extractor.go b/pkg/logql/series_extractor.go index 83f7d9608a927..ad9b7d2f37074 100644 --- a/pkg/logql/series_extractor.go +++ b/pkg/logql/series_extractor.go @@ -4,6 +4,7 @@ import ( "strconv" "time" + "github.com/grafana/loki/pkg/logql/labelfilter" "github.com/prometheus/prometheus/pkg/labels" ) @@ -31,10 +32,10 @@ func (bytesSampleExtractor) Extract(line []byte, lbs labels.Labels) (float64, la } type labelSampleExtractor struct { - labelName string - gr *grouping - - conversion string + labelName string + gr *grouping + postFilters []labelfilter.Filterer + conversion string // the sample conversion operation to attempt } func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, labels.Labels) { @@ -55,16 +56,20 @@ func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, la // todo(cyriltovena) handle errors. return 0, lbs } + return f, l.groupLabels(lbs) +} + +func (l *labelSampleExtractor) groupLabels(lbs labels.Labels) labels.Labels { if l.gr != nil { if l.gr.without { - return f, lbs.WithoutLabels(append(l.gr.groups, l.labelName)...) + return lbs.WithoutLabels(append(l.gr.groups, l.labelName)...) } - return f, lbs.WithLabels(l.gr.groups...) + return lbs.WithLabels(l.gr.groups...) } - return f, lbs.WithoutLabels(l.labelName) + return lbs.WithoutLabels(l.labelName) } -func newLabelSampleExtractor(labelName, conversion string, gr *grouping) *labelSampleExtractor { +func newLabelSampleExtractor(labelName, conversion string, postFilters []labelfilter.Filterer, gr *grouping) *labelSampleExtractor { return &labelSampleExtractor{ labelName: labelName, conversion: conversion, diff --git a/pkg/logql/series_extractor_test.go b/pkg/logql/series_extractor_test.go index 4920a438c4f87..d9f626e41a402 100644 --- a/pkg/logql/series_extractor_test.go +++ b/pkg/logql/series_extractor_test.go @@ -18,7 +18,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { }{ { "convert float", - newLabelSampleExtractor("foo", "", nil), + newLabelSampleExtractor("foo", "", nil, nil), labels.Labels{labels.Label{Name: "foo", Value: "15.0"}}, 15, labels.Labels{}, @@ -27,6 +27,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { "convert float without", newLabelSampleExtractor("foo", "", + nil, &grouping{without: true, groups: []string{"bar", "buzz"}}, ), labels.Labels{ @@ -44,6 +45,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { "convert float with", newLabelSampleExtractor("foo", "", + nil, &grouping{without: false, groups: []string{"bar", "buzz"}}, ), labels.Labels{ @@ -62,6 +64,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { "convert duration with", newLabelSampleExtractor("foo", OpConvDuration, + nil, &grouping{without: false, groups: []string{"bar", "buzz"}}, ), labels.Labels{ @@ -80,6 +83,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { "convert duration_seconds with", newLabelSampleExtractor("foo", OpConvDurationSeconds, + nil, &grouping{without: false, groups: []string{"bar", "buzz"}}, ), labels.Labels{ From db0744665422680835b3a8584400849ff24190eb Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Mon, 12 Oct 2020 10:18:56 +0200 Subject: [PATCH 29/45] Fixes json parser with prometheus label name rules. Signed-off-by: Cyril Tovena --- pkg/logql/labels_parser.go | 12 ++++++------ pkg/logql/labels_parser_test.go | 9 +++++++++ 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/pkg/logql/labels_parser.go b/pkg/logql/labels_parser.go index b85e69796fe78..d01df7ecca011 100644 --- a/pkg/logql/labels_parser.go +++ b/pkg/logql/labels_parser.go @@ -1,11 +1,11 @@ package logql import ( - "bytes" "errors" "fmt" "regexp" "strconv" + "strings" "github.com/grafana/loki/pkg/logql/logfmt" jsoniter "github.com/json-iterator/go" @@ -22,9 +22,9 @@ var ( errMissingCapture = errors.New("at least one named capture must be supplied") NoopLabelParser = noopParser{} - underscore = []byte("_") - point = []byte(".") - dash = []byte("-") + underscore = "_" + point = "." + dash = "-" ) type LabelParser interface { @@ -61,6 +61,7 @@ func (j *jsonParser) Parse(line []byte, lbs labels.Labels) labels.Labels { func addLabel(builder *labels.Builder, lbs labels.Labels) func(key, value string) { return func(key, value string) { + key = strings.ReplaceAll(strings.ReplaceAll(key, point, underscore), dash, underscore) if lbs.Has(key) { key = fmt.Sprintf("%s%s", key, duplicateSuffix) } @@ -162,8 +163,7 @@ func (l *logfmtParser) Parse(line []byte, lbs labels.Labels) labels.Labels { l.dec.Reset(line) for l.dec.ScanKeyval() { - k := string(bytes.ReplaceAll(bytes.ReplaceAll(l.dec.Key(), point, underscore), dash, underscore)) - addLabel(l.builder, lbs)(k, string(l.dec.Value())) + addLabel(l.builder, lbs)(string(l.dec.Key()), string(l.dec.Value())) } if l.dec.Err() != nil { l.builder.Set(errorLabel, errLogfmt) diff --git a/pkg/logql/labels_parser_test.go b/pkg/logql/labels_parser_test.go index 166798b687774..6df0f1c1f0d69 100644 --- a/pkg/logql/labels_parser_test.go +++ b/pkg/logql/labels_parser_test.go @@ -48,6 +48,15 @@ func Test_jsonParser_Parse(t *testing.T) { labels.Label{Name: "counter", Value: "1"}, }, }, + { + "bad key replaced", + NewJSONParser(), + []byte(`{"cou-nter":1}`), + labels.Labels{}, + labels.Labels{ + labels.Label{Name: "cou_nter", Value: "1"}, + }, + }, { "errors", NewJSONParser(), From 78973cfec0e2a880e57154c40b537a1386eee5fc Mon Sep 17 00:00:00 2001 From: Karsten Jeschkies Date: Mon, 12 Oct 2020 11:50:44 +0200 Subject: [PATCH 30/45] fixup! Support byte sizes in label filters. --- pkg/logql/expr.y | 8 +- pkg/logql/expr.y.go | 296 ++++++++++++++++----------------- pkg/logql/labelfilter/bytes.go | 2 +- pkg/logql/lex_test.go | 3 + pkg/logql/parser_test.go | 19 +++ 5 files changed, 175 insertions(+), 153 deletions(-) diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index fcafd3cf4ed21..afb2d3a070485 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -244,7 +244,7 @@ unitFilter: | bytesFilter { $$ = $1 } durationFilter: - IDENTIFIER GT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThan, $1, $3) } + IDENTIFIER GT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThan, $1, $3) } | IDENTIFIER GTE DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, $1, $3) } | IDENTIFIER LT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterLesserThan, $1, $3) } | IDENTIFIER LTE DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, $1, $3) } @@ -254,12 +254,12 @@ durationFilter: ; bytesFilter: - IDENTIFIER GT BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterGreaterThan, $1, $3) } + IDENTIFIER GT BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterGreaterThan, $1, $3) } | IDENTIFIER GTE BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterGreaterThanOrEqual, $1, $3) } - | IDENTIFIER LT BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterLesserThan, $1, $3) } + | IDENTIFIER LT BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterLesserThan, $1, $3) } | IDENTIFIER LTE BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterLesserThanOrEqual, $1, $3) } | IDENTIFIER NEQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterNotEqual, $1, $3) } - | IDENTIFIER EQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterEqual, $1, $3) } + | IDENTIFIER EQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterEqual, $1, $3) } | IDENTIFIER CMP_EQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterEqual, $1, $3) } ; diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index 2005118255f92..71b9cbb2aa1ae 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -1,11 +1,11 @@ -// Code generated by goyacc -p expr -o expr.y.go expr.y. DO NOT EDIT. +// Code generated by goyacc -p expr -o pkg/logql/expr.y.go pkg/logql/expr.y. DO NOT EDIT. -//line expr.y:2 +//line pkg/logql/expr.y:2 package logql import __yyfmt__ "fmt" -//line expr.y:2 +//line pkg/logql/expr.y:2 import ( "github.com/grafana/loki/pkg/logql/labelfilter" @@ -13,7 +13,7 @@ import ( "time" ) -//line expr.y:11 +//line pkg/logql/expr.y:11 type exprSymType struct { yys int Expr Expr @@ -203,7 +203,7 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line expr.y:343 +//line pkg/logql/expr.y:343 //line yacctab:1 var exprExca = [...]int{ @@ -745,858 +745,858 @@ exprdefault: case 1: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:103 +//line pkg/logql/expr.y:103 { exprlex.(*lexer).expr = exprDollar[1].Expr } case 2: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:106 +//line pkg/logql/expr.y:106 { exprVAL.Expr = exprDollar[1].LogExpr } case 3: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:107 +//line pkg/logql/expr.y:107 { exprVAL.Expr = exprDollar[1].MetricExpr } case 4: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:111 +//line pkg/logql/expr.y:111 { exprVAL.MetricExpr = exprDollar[1].RangeAggregationExpr } case 5: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:112 +//line pkg/logql/expr.y:112 { exprVAL.MetricExpr = exprDollar[1].VectorAggregationExpr } case 6: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:113 +//line pkg/logql/expr.y:113 { exprVAL.MetricExpr = exprDollar[1].BinOpExpr } case 7: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:114 +//line pkg/logql/expr.y:114 { exprVAL.MetricExpr = exprDollar[1].LiteralExpr } case 8: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:115 +//line pkg/logql/expr.y:115 { exprVAL.MetricExpr = exprDollar[2].MetricExpr } case 9: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:119 +//line pkg/logql/expr.y:119 { exprVAL.LogExpr = newMatcherExpr(exprDollar[1].Selector) } case 10: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:120 +//line pkg/logql/expr.y:120 { exprVAL.LogExpr = newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr) } case 11: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:121 +//line pkg/logql/expr.y:121 { exprVAL.LogExpr = exprDollar[2].LogExpr } case 12: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:125 +//line pkg/logql/expr.y:125 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, nil) } case 13: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:126 +//line pkg/logql/expr.y:126 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, nil) } case 14: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:127 +//line pkg/logql/expr.y:127 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, exprDollar[3].UnwrapExpr) } case 15: exprDollar = exprS[exprpt-5 : exprpt+1] -//line expr.y:128 +//line pkg/logql/expr.y:128 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, exprDollar[5].UnwrapExpr) } case 16: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:129 +//line pkg/logql/expr.y:129 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].duration, exprDollar[2].UnwrapExpr) } case 17: exprDollar = exprS[exprpt-5 : exprpt+1] -//line expr.y:130 +//line pkg/logql/expr.y:130 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[5].duration, exprDollar[3].UnwrapExpr) } case 18: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:131 +//line pkg/logql/expr.y:131 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[3].duration, nil) } case 19: exprDollar = exprS[exprpt-5 : exprpt+1] -//line expr.y:132 +//line pkg/logql/expr.y:132 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[5].duration, nil) } case 20: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:133 +//line pkg/logql/expr.y:133 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[4].duration, exprDollar[3].UnwrapExpr) } case 21: exprDollar = exprS[exprpt-6 : exprpt+1] -//line expr.y:134 +//line pkg/logql/expr.y:134 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[6].duration, exprDollar[4].UnwrapExpr) } case 22: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:135 +//line pkg/logql/expr.y:135 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, nil) } case 23: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:136 +//line pkg/logql/expr.y:136 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, exprDollar[4].UnwrapExpr) } case 24: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:137 +//line pkg/logql/expr.y:137 { exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr } case 26: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:142 +//line pkg/logql/expr.y:142 { exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[3].str, "") } case 27: exprDollar = exprS[exprpt-6 : exprpt+1] -//line expr.y:143 +//line pkg/logql/expr.y:143 { exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[5].str, exprDollar[3].ConvOp) } case 28: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:147 +//line pkg/logql/expr.y:147 { exprVAL.ConvOp = OpConvDuration } case 29: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:148 +//line pkg/logql/expr.y:148 { exprVAL.ConvOp = OpConvDurationSeconds } case 30: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:152 +//line pkg/logql/expr.y:152 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, nil, nil) } case 31: exprDollar = exprS[exprpt-6 : exprpt+1] -//line expr.y:153 +//line pkg/logql/expr.y:153 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, nil, &exprDollar[3].str) } case 32: exprDollar = exprS[exprpt-5 : exprpt+1] -//line expr.y:154 +//line pkg/logql/expr.y:154 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[5].Grouping, nil) } case 33: exprDollar = exprS[exprpt-7 : exprpt+1] -//line expr.y:155 +//line pkg/logql/expr.y:155 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 34: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:160 +//line pkg/logql/expr.y:160 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } case 35: exprDollar = exprS[exprpt-5 : exprpt+1] -//line expr.y:161 +//line pkg/logql/expr.y:161 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } case 36: exprDollar = exprS[exprpt-5 : exprpt+1] -//line expr.y:162 +//line pkg/logql/expr.y:162 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } case 37: exprDollar = exprS[exprpt-6 : exprpt+1] -//line expr.y:164 +//line pkg/logql/expr.y:164 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } case 38: exprDollar = exprS[exprpt-7 : exprpt+1] -//line expr.y:165 +//line pkg/logql/expr.y:165 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 39: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:169 +//line pkg/logql/expr.y:169 { exprVAL.Filter = labels.MatchRegexp } case 40: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:170 +//line pkg/logql/expr.y:170 { exprVAL.Filter = labels.MatchEqual } case 41: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:171 +//line pkg/logql/expr.y:171 { exprVAL.Filter = labels.MatchNotRegexp } case 42: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:172 +//line pkg/logql/expr.y:172 { exprVAL.Filter = labels.MatchNotEqual } case 43: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:176 +//line pkg/logql/expr.y:176 { exprVAL.Selector = exprDollar[2].Matchers } case 44: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:177 +//line pkg/logql/expr.y:177 { exprVAL.Selector = exprDollar[2].Matchers } case 45: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:178 +//line pkg/logql/expr.y:178 { } case 46: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:182 +//line pkg/logql/expr.y:182 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } case 47: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:183 +//line pkg/logql/expr.y:183 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } case 48: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:187 +//line pkg/logql/expr.y:187 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } case 49: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:188 +//line pkg/logql/expr.y:188 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } case 50: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:189 +//line pkg/logql/expr.y:189 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } case 51: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:190 +//line pkg/logql/expr.y:190 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } case 52: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:194 +//line pkg/logql/expr.y:194 { exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} } case 53: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:195 +//line pkg/logql/expr.y:195 { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } case 54: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:199 +//line pkg/logql/expr.y:199 { exprVAL.PipelineStage = exprDollar[1].LineFilters } case 55: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:200 +//line pkg/logql/expr.y:200 { exprVAL.PipelineStage = exprDollar[2].LabelParser } case 56: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:201 +//line pkg/logql/expr.y:201 { exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} } case 57: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:202 +//line pkg/logql/expr.y:202 { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } case 58: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:203 +//line pkg/logql/expr.y:203 { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } case 59: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:207 +//line pkg/logql/expr.y:207 { exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) } case 60: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:208 +//line pkg/logql/expr.y:208 { exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } case 61: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:211 +//line pkg/logql/expr.y:211 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } case 62: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:212 +//line pkg/logql/expr.y:212 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } case 63: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:213 +//line pkg/logql/expr.y:213 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } case 64: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:216 +//line pkg/logql/expr.y:216 { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } case 65: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:219 +//line pkg/logql/expr.y:219 { exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } case 66: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:220 +//line pkg/logql/expr.y:220 { exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } case 67: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:224 +//line pkg/logql/expr.y:224 { exprVAL.LabelsFormat = []labelFmt{exprDollar[1].LabelFormat} } case 68: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:225 +//line pkg/logql/expr.y:225 { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } case 70: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:229 +//line pkg/logql/expr.y:229 { exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } case 71: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:232 +//line pkg/logql/expr.y:232 { exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) } case 72: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:233 +//line pkg/logql/expr.y:233 { exprVAL.LabelFilter = exprDollar[1].UnitFilter } case 73: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:234 +//line pkg/logql/expr.y:234 { exprVAL.LabelFilter = exprDollar[1].NumberFilter } case 74: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:235 +//line pkg/logql/expr.y:235 { exprVAL.LabelFilter = exprDollar[2].LabelFilter } case 75: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:236 +//line pkg/logql/expr.y:236 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } case 76: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:237 +//line pkg/logql/expr.y:237 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 77: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:238 +//line pkg/logql/expr.y:238 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 78: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:239 +//line pkg/logql/expr.y:239 { exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 79: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:243 +//line pkg/logql/expr.y:243 { exprVAL.UnitFilter = exprDollar[1].DurationFilter } case 80: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:244 +//line pkg/logql/expr.y:244 { exprVAL.UnitFilter = exprDollar[1].BytesFilter } case 81: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:247 +//line pkg/logql/expr.y:247 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } case 82: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:248 +//line pkg/logql/expr.y:248 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 83: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:249 +//line pkg/logql/expr.y:249 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } case 84: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:250 +//line pkg/logql/expr.y:250 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 85: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:251 +//line pkg/logql/expr.y:251 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } case 86: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:252 +//line pkg/logql/expr.y:252 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 87: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:253 +//line pkg/logql/expr.y:253 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 88: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:257 +//line pkg/logql/expr.y:257 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].bytes) } case 89: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:258 +//line pkg/logql/expr.y:258 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } case 90: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:259 +//line pkg/logql/expr.y:259 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].bytes) } case 91: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:260 +//line pkg/logql/expr.y:260 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } case 92: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:261 +//line pkg/logql/expr.y:261 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].bytes) } case 93: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:262 +//line pkg/logql/expr.y:262 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].bytes) } case 94: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:263 +//line pkg/logql/expr.y:263 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].bytes) } case 95: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:267 +//line pkg/logql/expr.y:267 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 96: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:268 +//line pkg/logql/expr.y:268 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 97: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:269 +//line pkg/logql/expr.y:269 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 98: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:270 +//line pkg/logql/expr.y:270 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 99: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:271 +//line pkg/logql/expr.y:271 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 100: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:272 +//line pkg/logql/expr.y:272 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 101: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:273 +//line pkg/logql/expr.y:273 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 102: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:279 +//line pkg/logql/expr.y:279 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 103: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:280 +//line pkg/logql/expr.y:280 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 104: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:281 +//line pkg/logql/expr.y:281 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 105: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:282 +//line pkg/logql/expr.y:282 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 106: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:283 +//line pkg/logql/expr.y:283 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 107: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:284 +//line pkg/logql/expr.y:284 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 108: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:285 +//line pkg/logql/expr.y:285 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 109: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:286 +//line pkg/logql/expr.y:286 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 110: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:287 +//line pkg/logql/expr.y:287 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 111: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:288 +//line pkg/logql/expr.y:288 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 112: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:289 +//line pkg/logql/expr.y:289 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 113: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:290 +//line pkg/logql/expr.y:290 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 114: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:291 +//line pkg/logql/expr.y:291 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 115: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:292 +//line pkg/logql/expr.y:292 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 116: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:293 +//line pkg/logql/expr.y:293 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 117: exprDollar = exprS[exprpt-0 : exprpt+1] -//line expr.y:297 +//line pkg/logql/expr.y:297 { exprVAL.BinOpModifier = BinOpOptions{} } case 118: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:298 +//line pkg/logql/expr.y:298 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } case 119: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:302 +//line pkg/logql/expr.y:302 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } case 120: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:303 +//line pkg/logql/expr.y:303 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } case 121: exprDollar = exprS[exprpt-2 : exprpt+1] -//line expr.y:304 +//line pkg/logql/expr.y:304 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } case 122: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:308 +//line pkg/logql/expr.y:308 { exprVAL.VectorOp = OpTypeSum } case 123: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:309 +//line pkg/logql/expr.y:309 { exprVAL.VectorOp = OpTypeAvg } case 124: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:310 +//line pkg/logql/expr.y:310 { exprVAL.VectorOp = OpTypeCount } case 125: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:311 +//line pkg/logql/expr.y:311 { exprVAL.VectorOp = OpTypeMax } case 126: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:312 +//line pkg/logql/expr.y:312 { exprVAL.VectorOp = OpTypeMin } case 127: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:313 +//line pkg/logql/expr.y:313 { exprVAL.VectorOp = OpTypeStddev } case 128: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:314 +//line pkg/logql/expr.y:314 { exprVAL.VectorOp = OpTypeStdvar } case 129: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:315 +//line pkg/logql/expr.y:315 { exprVAL.VectorOp = OpTypeBottomK } case 130: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:316 +//line pkg/logql/expr.y:316 { exprVAL.VectorOp = OpTypeTopK } case 131: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:320 +//line pkg/logql/expr.y:320 { exprVAL.RangeOp = OpRangeTypeCount } case 132: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:321 +//line pkg/logql/expr.y:321 { exprVAL.RangeOp = OpRangeTypeRate } case 133: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:322 +//line pkg/logql/expr.y:322 { exprVAL.RangeOp = OpRangeTypeBytes } case 134: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:323 +//line pkg/logql/expr.y:323 { exprVAL.RangeOp = OpRangeTypeBytesRate } case 135: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:324 +//line pkg/logql/expr.y:324 { exprVAL.RangeOp = OpRangeTypeAvg } case 136: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:325 +//line pkg/logql/expr.y:325 { exprVAL.RangeOp = OpRangeTypeSum } case 137: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:326 +//line pkg/logql/expr.y:326 { exprVAL.RangeOp = OpRangeTypeMin } case 138: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:327 +//line pkg/logql/expr.y:327 { exprVAL.RangeOp = OpRangeTypeMax } case 139: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:328 +//line pkg/logql/expr.y:328 { exprVAL.RangeOp = OpRangeTypeStdvar } case 140: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:329 +//line pkg/logql/expr.y:329 { exprVAL.RangeOp = OpRangeTypeStddev } case 141: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:330 +//line pkg/logql/expr.y:330 { exprVAL.RangeOp = OpRangeTypeQuantile } case 142: exprDollar = exprS[exprpt-1 : exprpt+1] -//line expr.y:335 +//line pkg/logql/expr.y:335 { exprVAL.Labels = []string{exprDollar[1].str} } case 143: exprDollar = exprS[exprpt-3 : exprpt+1] -//line expr.y:336 +//line pkg/logql/expr.y:336 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } case 144: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:340 +//line pkg/logql/expr.y:340 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } case 145: exprDollar = exprS[exprpt-4 : exprpt+1] -//line expr.y:341 +//line pkg/logql/expr.y:341 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/labelfilter/bytes.go b/pkg/logql/labelfilter/bytes.go index a07f6dce9a6e3..1f04ee62efd0d 100644 --- a/pkg/logql/labelfilter/bytes.go +++ b/pkg/logql/labelfilter/bytes.go @@ -52,4 +52,4 @@ func (d *Bytes) Filter(lbs labels.Labels) (bool, error) { func (d *Bytes) String() string { return fmt.Sprintf("%s%s%d", d.Name, d.Type, d.Value) -} \ No newline at end of file +} diff --git a/pkg/logql/lex_test.go b/pkg/logql/lex_test.go index 1d49fd8dfe8ec..7c004b90eef97 100644 --- a/pkg/logql/lex_test.go +++ b/pkg/logql/lex_test.go @@ -30,6 +30,9 @@ func TestLex(t *testing.T) { []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING, PIPE, UNWRAP, IDENTIFIER}}, {`{foo="bar"} |~ "\\w+" | size > 250kB`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, BYTES}}, + {`{foo="bar"} |~ "\\w+" | size > 250kB and latency <= 1h15m30s or bar=1`, + []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, + IDENTIFIER, GT, BYTES, AND, IDENTIFIER, LTE, DURATION, OR, IDENTIFIER, EQ, NUMBER}}, {`{foo="bar"} |~ "\\w+" | size > 200MiB or foo == 4.00`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, BYTES, OR, IDENTIFIER, CMP_EQ, NUMBER}}, {`{ foo = "bar" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index d3a20baad0ee4..54ea789699af2 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -1150,6 +1150,25 @@ func TestParse(t *testing.T) { exp: nil, err: ParseError{msg: "invalid aggregation count_over_time with unwrap"}, }, + { + in: `{app="foo"} |= "bar" | json | status_code < 500 or status_code > 200 and size >= 2.5KiB `, + exp: &pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiPipelineExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + Filterer: labelfilter.NewOr( + labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), + labelfilter.NewAnd( + labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + labelfilter.NewBytes(labelfilter.FilterGreaterThanOrEqual, "size", 2560), + ), + ), + }, + }, + }, + }, { in: `stdvar_over_time({app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo [5m])`, From c054a5d75efa6f0cca78bcc57a51023a9be93b5d Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 13 Oct 2020 10:12:20 +0200 Subject: [PATCH 31/45] Wip error handling, commit before big refactoring. Signed-off-by: Cyril Tovena --- pkg/chunkenc/memchunk.go | 14 +++++--- pkg/logql/ast.go | 27 ++++++++++++++-- pkg/logql/error.go | 1 + pkg/logql/labelfilter/filter.go | 21 ++++++++++++ pkg/logql/series_extractor.go | 52 +++++++++++++++++------------- pkg/logql/series_extractor_test.go | 19 ++++++----- pkg/logql/test_utils.go | 5 ++- 7 files changed, 101 insertions(+), 38 deletions(-) diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index 949e8468f5f9f..6250e11d0efb9 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -629,7 +629,10 @@ func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, lbs l } var value float64 var found bool - value, parsedLabels = extractor.Extract(newLine, parsedLabels) + ok, value, parsedLabels = extractor.Extract(newLine, parsedLabels) + if !ok { + continue + } var s *logproto.Series lhash := parsedLabels.Hash() if s, found = series[lhash]; !found { @@ -845,10 +848,13 @@ type sampleBufferedIterator struct { } func (e *sampleBufferedIterator) Next() bool { - var newLabels labels.Labels for e.bufferedIterator.Next() { - e.currValue, newLabels = e.extractor.Extract(e.currLine, e.bufferedIterator.currLabels) - e.currLabels = newLabels.String() + ok, val, labels := e.extractor.Extract(e.currLine, e.bufferedIterator.currLabels) + if !ok { + continue + } + e.currValue = val + e.currLabels = labels.String() return true } return false diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 82eef2dac26cf..7364ce9f9eb25 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -374,13 +374,36 @@ type labelFilterExpr struct { } func (e *labelFilterExpr) Pipeline() (Pipeline, error) { + f := newLabelFilter(e.Filterer) return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - //todo (cyriltovena): handle error - ok, _ := e.Filterer.Filter(lbs) + ok, lbs := f.Filter(lbs) return line, lbs, ok }), nil } +type labelFilter struct { + labelfilter.Filterer + + builder *labels.Builder +} + +func newLabelFilter(f labelfilter.Filterer) *labelFilter { + return &labelFilter{ + Filterer: f, + builder: labels.NewBuilder(nil), + } +} + +func (l *labelFilter) Filter(lbs labels.Labels) (bool, labels.Labels) { + l.builder.Reset(lbs) + ok, err := l.Filterer.Filter(lbs) + if err != nil { + l.builder.Set(errorLabel, errFilter) + return true, l.builder.Labels() + } + return ok, nil +} + func (e *labelFilterExpr) String() string { return fmt.Sprintf("%s %s", OpPipe, e.Filterer.String()) } diff --git a/pkg/logql/error.go b/pkg/logql/error.go index 7f0ce0e34bac6..82eecb1bba1c8 100644 --- a/pkg/logql/error.go +++ b/pkg/logql/error.go @@ -4,6 +4,7 @@ var ( errJSON = "JSONParser" errLogfmt = "LogfmtParser" errSampleExtraction = "SampleExtraction" + errFilter = "Filter" errorLabel = "__error__" ) diff --git a/pkg/logql/labelfilter/filter.go b/pkg/logql/labelfilter/filter.go index 84f120055e083..177585c31bd23 100644 --- a/pkg/logql/labelfilter/filter.go +++ b/pkg/logql/labelfilter/filter.go @@ -7,6 +7,10 @@ import ( "github.com/prometheus/prometheus/pkg/labels" ) +var ( + Noop = noopFilter{} +) + type Filterer interface { Filter(lbs labels.Labels) (bool, error) fmt.Stringer @@ -64,3 +68,20 @@ func (b *Binary) String() string { sb.WriteString(" )") return sb.String() } + +type noopFilter struct{} + +func (noopFilter) Filter(lbs labels.Labels) (bool, error) { return true, nil } + +func (noopFilter) String() string { return "" } + +func ReduceAnd(filters []Filterer) Filterer { + if len(filters) == 0 { + return Noop + } + result := filters[0] + for _, f := range filters[0:] { + result = NewAnd(result, f) + } + return result +} diff --git a/pkg/logql/series_extractor.go b/pkg/logql/series_extractor.go index ad9b7d2f37074..e7cdec934ff1b 100644 --- a/pkg/logql/series_extractor.go +++ b/pkg/logql/series_extractor.go @@ -5,6 +5,7 @@ import ( "time" "github.com/grafana/loki/pkg/logql/labelfilter" + "github.com/prometheus/prometheus/pkg/labels" ) @@ -16,33 +17,46 @@ var ( // SampleExtractor transforms a log entry into a sample. // In case of failure the second return value will be false. type SampleExtractor interface { - Extract(line []byte, lbs labels.Labels) (float64, labels.Labels) + Extract(line []byte, lbs labels.Labels) (bool, float64, labels.Labels) } type countSampleExtractor struct{} -func (countSampleExtractor) Extract(line []byte, lbs labels.Labels) (float64, labels.Labels) { - return 1., lbs +func (countSampleExtractor) Extract(line []byte, lbs labels.Labels) (bool, float64, labels.Labels) { + return true, 1., lbs } type bytesSampleExtractor struct{} -func (bytesSampleExtractor) Extract(line []byte, lbs labels.Labels) (float64, labels.Labels) { - return float64(len(line)), lbs +func (bytesSampleExtractor) Extract(line []byte, lbs labels.Labels) (bool, float64, labels.Labels) { + return true, float64(len(line)), lbs } type labelSampleExtractor struct { - labelName string - gr *grouping - postFilters []labelfilter.Filterer - conversion string // the sample conversion operation to attempt + labelName string + gr *grouping + postFilter labelfilter.Filterer + conversion string // the sample conversion operation to attempt + + builder *labels.Builder +} + +func newLabelSampleExtractor(labelName, conversion string, postFilters []labelfilter.Filterer, gr *grouping) *labelSampleExtractor { + return &labelSampleExtractor{ + labelName: labelName, + conversion: conversion, + gr: gr, + postFilter: labelfilter.ReduceAnd(postFilters), + builder: labels.NewBuilder(nil), + } } -func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, labels.Labels) { +func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (bool, float64, labels.Labels) { stringValue := lbs.Get(l.labelName) + l.builder.Reset(lbs) if stringValue == "" { - // todo(cyriltovena) handle errors. - return 0, lbs + l.builder.Set(errorLabel, errSampleExtraction) + return true, 0, lbs } var f float64 var err error @@ -53,10 +67,10 @@ func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (float64, la f, err = convertFloat(stringValue) } if err != nil { - // todo(cyriltovena) handle errors. - return 0, lbs + l.builder.Set(errorLabel, errSampleExtraction) + return true, 0, lbs } - return f, l.groupLabels(lbs) + return true, f, l.groupLabels(lbs) } func (l *labelSampleExtractor) groupLabels(lbs labels.Labels) labels.Labels { @@ -69,14 +83,6 @@ func (l *labelSampleExtractor) groupLabels(lbs labels.Labels) labels.Labels { return lbs.WithoutLabels(l.labelName) } -func newLabelSampleExtractor(labelName, conversion string, postFilters []labelfilter.Filterer, gr *grouping) *labelSampleExtractor { - return &labelSampleExtractor{ - labelName: labelName, - conversion: conversion, - gr: gr, - } -} - func convertFloat(v string) (float64, error) { return strconv.ParseFloat(v, 64) } diff --git a/pkg/logql/series_extractor_test.go b/pkg/logql/series_extractor_test.go index d9f626e41a402..9f949a00432e7 100644 --- a/pkg/logql/series_extractor_test.go +++ b/pkg/logql/series_extractor_test.go @@ -1,11 +1,11 @@ package logql import ( - "reflect" "sort" "testing" "github.com/prometheus/prometheus/pkg/labels" + "github.com/stretchr/testify/require" ) func Test_labelSampleExtractor_Extract(t *testing.T) { @@ -15,6 +15,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { in labels.Labels want float64 wantLbs labels.Labels + wantOk bool }{ { "convert float", @@ -22,6 +23,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { labels.Labels{labels.Label{Name: "foo", Value: "15.0"}}, 15, labels.Labels{}, + true, }, { "convert float without", @@ -40,6 +42,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { labels.Labels{ {Name: "namespace", Value: "dev"}, }, + true, }, { "convert float with", @@ -59,6 +62,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { {Name: "bar", Value: "foo"}, {Name: "buzz", Value: "blip"}, }, + true, }, { "convert duration with", @@ -78,6 +82,7 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { {Name: "bar", Value: "foo"}, {Name: "buzz", Value: "blip"}, }, + true, }, { "convert duration_seconds with", @@ -97,18 +102,16 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { {Name: "bar", Value: "foo"}, {Name: "buzz", Value: "blip"}, }, + true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { sort.Sort(tt.in) - outval, outlbs := tt.ex.Extract([]byte(""), tt.in) - if outval != tt.want { - t.Errorf("labelSampleExtractor.Extract() val = %v, want %v", outval, tt.want) - } - if !reflect.DeepEqual(outlbs, tt.wantLbs) { - t.Errorf("labelSampleExtractor.Extract() lbs = %v, want %v", outlbs, tt.wantLbs) - } + ok, outval, outlbs := tt.ex.Extract([]byte(""), tt.in) + require.Equal(t, tt.wantOk, ok) + require.Equal(t, tt.want, outval) + require.Equal(t, tt.wantLbs, outlbs) }) } } diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index 77d7e471fb1ed..50d6eebc39dd1 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -122,7 +122,10 @@ func processSeries(in []logproto.Stream, pipeline Pipeline, ex SampleExtractor) for _, stream := range in { for _, e := range stream.Entries { if l, out, ok := pipeline.Process([]byte(e.Line), mustParseLabels(stream.Labels)); ok { - f, lbs := ex.Extract(l, out) + ok, f, lbs := ex.Extract(l, out) + if !ok { + continue + } var s *logproto.Series var found bool s, found = resBySeries[lbs.String()] From 5ab8b5c80139925a3e1f40c50672bb09914fdeb2 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 13 Oct 2020 12:02:25 +0200 Subject: [PATCH 32/45] Refactoring in progress. Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 69 ++++++------------- pkg/logql/expr.y | 6 +- pkg/logql/labelfilter/bytes.go | 4 +- pkg/logql/{ => log}/error.go | 2 +- pkg/logql/{ => log}/filter.go | 56 +++++++-------- pkg/logql/{ => log}/filter_test.go | 18 ++--- pkg/logql/{ => log}/fmt.go | 36 +++++----- pkg/logql/{ => log}/fmt_test.go | 38 +++++----- pkg/logql/log/labels.go | 3 + pkg/logql/{ => log}/logfmt/decode.go | 0 pkg/logql/{ => log}/logfmt/jsonstring.go | 0 pkg/logql/{labels_parser.go => log/parser.go} | 9 +-- .../parser_test.go} | 2 +- pkg/logql/log/pipeline.go | 53 ++++++++++++++ 14 files changed, 163 insertions(+), 133 deletions(-) rename pkg/logql/{ => log}/error.go (92%) rename pkg/logql/{ => log}/filter.go (85%) rename pkg/logql/{ => log}/filter_test.go (94%) rename pkg/logql/{ => log}/fmt.go (75%) rename pkg/logql/{ => log}/fmt_test.go (77%) create mode 100644 pkg/logql/log/labels.go rename pkg/logql/{ => log}/logfmt/decode.go (100%) rename pkg/logql/{ => log}/logfmt/jsonstring.go (100%) rename pkg/logql/{labels_parser.go => log/parser.go} (96%) rename pkg/logql/{labels_parser_test.go => log/parser_test.go} (99%) create mode 100644 pkg/logql/log/pipeline.go diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 7364ce9f9eb25..69f6a95d34236 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -14,6 +14,7 @@ import ( "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql/labelfilter" + "github.com/grafana/loki/pkg/logql/log" ) // Expr is the root expression which can be a SampleExpr or LogSelectorExpr @@ -81,62 +82,36 @@ type LogSelectorExpr interface { } type PipelineExpr interface { - Pipeline() (Pipeline, error) + Pipeline() (log.Pipeline, error) Expr } -type Pipeline interface { - Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) -} - -var NoopPipeline = &noopPipeline{} - -type noopPipeline struct{} - -func (noopPipeline) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - return line, lbs, true -} - -type PipelineFunc func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) - -func (fn PipelineFunc) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - return fn(line, lbs) -} - -type MultiPipeline []Pipeline - -func (m MultiPipeline) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - var ok bool - for _, p := range m { - line, lbs, ok = p.Process(line, lbs) - if !ok { - return line, lbs, ok - } - } - return line, lbs, ok +type StageExpr interface { + Stage() (log.Stage, error) + Expr } -type MultiPipelineExpr []PipelineExpr +type MultiStageExpr []StageExpr -func (m MultiPipelineExpr) Pipeline() (Pipeline, error) { - c := make(MultiPipeline, 0, len(m)) +func (m MultiStageExpr) Pipeline() (log.Pipeline, error) { + c := make(log.MultiStage, 0, len(m)) for _, e := range m { - p, err := e.Pipeline() + p, err := e.Stage() if err != nil { return nil, err } - if p == NoopPipeline { + if p == log.NoopStage { continue } c = append(c, p) } if len(c) == 0 { - return NoopPipeline, nil + return log.NoopPipeline, nil } return c, nil } -func (m MultiPipelineExpr) String() string { +func (m MultiStageExpr) String() string { var sb strings.Builder for i, e := range m { sb.WriteString(e.String()) @@ -147,7 +122,7 @@ func (m MultiPipelineExpr) String() string { return sb.String() } -func (MultiPipelineExpr) logQLExpr() {} +func (MultiStageExpr) logQLExpr() {} func FilterToPipeline(f LineFilter) Pipeline { if f == nil || f == TrueFilter { @@ -191,8 +166,8 @@ func (e *matchersExpr) String() string { return sb.String() } -func (e *matchersExpr) Pipeline() (Pipeline, error) { - return NoopPipeline, nil +func (e *matchersExpr) Pipeline() (log.Pipeline, error) { + return log.NoopPipeline, nil } func (e *matchersExpr) HasFilter() bool { @@ -200,12 +175,12 @@ func (e *matchersExpr) HasFilter() bool { } type pipelineExpr struct { - pipeline MultiPipelineExpr + pipeline MultiStageExpr left *matchersExpr implicit } -func newPipelineExpr(left *matchersExpr, pipeline MultiPipelineExpr) LogSelectorExpr { +func newPipelineExpr(left *matchersExpr, pipeline MultiStageExpr) LogSelectorExpr { return &pipelineExpr{ left: left, pipeline: pipeline, @@ -224,7 +199,7 @@ func (e *pipelineExpr) String() string { return sb.String() } -func (e *pipelineExpr) Pipeline() (Pipeline, error) { +func (e *pipelineExpr) Pipeline() (log.Pipeline, error) { return e.pipeline.Pipeline() } @@ -291,8 +266,8 @@ func (e *lineFilterExpr) String() string { return sb.String() } -func (e *lineFilterExpr) Filter() (LineFilter, error) { - f, err := newFilter(e.match, e.ty) +func (e *lineFilterExpr) Filter() (log.Filterer, error) { + f, err := log.NewFilter(e.match, e.ty) if err != nil { return nil, err } @@ -302,11 +277,11 @@ func (e *lineFilterExpr) Filter() (LineFilter, error) { return nil, err } if nextFilter != nil { - f = newAndFilter(nextFilter, f) + f = log.NewAndFilter(nextFilter, f) } } - if f == TrueFilter { + if f == log.TrueFilter { return nil, nil } diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 55a97d4a324b3..23f386ce0e670 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -33,8 +33,8 @@ import ( BinOpModifier BinOpOptions LabelParser *labelParserExpr LineFilters *lineFilterExpr - PipelineExpr MultiPipelineExpr - PipelineStage PipelineExpr + PipelineExpr MultiStageExpr + PipelineStage StageExpr BytesFilter labelfilter.Filterer NumberFilter labelfilter.Filterer DurationFilter labelfilter.Filterer @@ -192,7 +192,7 @@ matcher: ; pipelineExpr: - pipelineStage { $$ = MultiPipelineExpr{ $1 } } + pipelineStage { $$ = MultiStageExpr{ $1 } } | pipelineExpr pipelineStage { $$ = append($1, $2)} ; diff --git a/pkg/logql/labelfilter/bytes.go b/pkg/logql/labelfilter/bytes.go index 1f04ee62efd0d..b6faaa59b6a9f 100644 --- a/pkg/logql/labelfilter/bytes.go +++ b/pkg/logql/labelfilter/bytes.go @@ -9,11 +9,11 @@ import ( type Bytes struct { Name string - Value uint64 + Value uint64 Type FilterType } -func NewBytes(t FilterType, name string, b uint64) *Bytes{ +func NewBytes(t FilterType, name string, b uint64) *Bytes { return &Bytes{ Name: name, Type: t, diff --git a/pkg/logql/error.go b/pkg/logql/log/error.go similarity index 92% rename from pkg/logql/error.go rename to pkg/logql/log/error.go index 82eecb1bba1c8..b0bf9442e47fd 100644 --- a/pkg/logql/error.go +++ b/pkg/logql/log/error.go @@ -1,4 +1,4 @@ -package logql +package log var ( errJSON = "JSONParser" diff --git a/pkg/logql/filter.go b/pkg/logql/log/filter.go similarity index 85% rename from pkg/logql/filter.go rename to pkg/logql/log/filter.go index b9d77a903efa6..bb5a948fc9b8f 100644 --- a/pkg/logql/filter.go +++ b/pkg/logql/log/filter.go @@ -1,4 +1,4 @@ -package logql +package log import ( "bytes" @@ -9,15 +9,15 @@ import ( "github.com/prometheus/prometheus/pkg/labels" ) -// LineFilter is a interface to filter log lines. -type LineFilter interface { +// Filterer is a interface to filter log lines. +type Filterer interface { Filter(line []byte) bool } // LineFilterFunc is a syntax sugar for creating line filter from a function -type LineFilterFunc func(line []byte) bool +type FiltererFunc func(line []byte) bool -func (f LineFilterFunc) Filter(line []byte) bool { +func (f FiltererFunc) Filter(line []byte) bool { return f(line) } @@ -29,30 +29,30 @@ func (trueFilter) Filter(_ []byte) bool { return true } var TrueFilter = trueFilter{} type notFilter struct { - LineFilter + Filterer } func (n notFilter) Filter(line []byte) bool { - return !n.LineFilter.Filter(line) + return !n.Filterer.Filter(line) } // newNotFilter creates a new filter which matches only if the base filter doesn't match. // If the base filter is a `or` it will recursively simplify with `and` operations. -func newNotFilter(base LineFilter) LineFilter { +func newNotFilter(base Filterer) Filterer { // not(a|b) = not(a) and not(b) , and operation can't benefit from this optimization because both legs always needs to be executed. if or, ok := base.(orFilter); ok { - return newAndFilter(newNotFilter(or.left), newNotFilter(or.right)) + return NewAndFilter(newNotFilter(or.left), newNotFilter(or.right)) } - return notFilter{LineFilter: base} + return notFilter{Filterer: base} } type andFilter struct { - left LineFilter - right LineFilter + left Filterer + right Filterer } -// newAndFilter creates a new filter which matches only if left and right matches. -func newAndFilter(left LineFilter, right LineFilter) LineFilter { +// NewAndFilter creates a new filter which matches only if left and right matches. +func NewAndFilter(left Filterer, right Filterer) Filterer { // Make sure we take care of panics in case a nil or noop filter is passed. if right == nil || right == TrueFilter { return left @@ -73,12 +73,12 @@ func (a andFilter) Filter(line []byte) bool { } type orFilter struct { - left LineFilter - right LineFilter + left Filterer + right Filterer } // newOrFilter creates a new filter which matches only if left or right matches. -func newOrFilter(left LineFilter, right LineFilter) LineFilter { +func newOrFilter(left Filterer, right Filterer) Filterer { if left == nil || left == TrueFilter { return right } @@ -94,7 +94,7 @@ func newOrFilter(left LineFilter, right LineFilter) LineFilter { } // chainOrFilter is a syntax sugar to chain multiple `or` filters. (1 or many) -func chainOrFilter(curr, new LineFilter) LineFilter { +func chainOrFilter(curr, new Filterer) Filterer { if curr == nil { return new } @@ -111,7 +111,7 @@ type regexpFilter struct { // newRegexpFilter creates a new line filter for a given regexp. // If match is false the filter is the negation of the regexp. -func newRegexpFilter(re string, match bool) (LineFilter, error) { +func newRegexpFilter(re string, match bool) (Filterer, error) { reg, err := regexp.Compile(re) if err != nil { return nil, err @@ -143,7 +143,7 @@ func (l containsFilter) String() string { return string(l.match) } -func newContainsFilter(match []byte, caseInsensitive bool) LineFilter { +func newContainsFilter(match []byte, caseInsensitive bool) Filterer { if len(match) == 0 { return TrueFilter } @@ -156,8 +156,8 @@ func newContainsFilter(match []byte, caseInsensitive bool) LineFilter { } } -// newFilter creates a new line filter from a match string and type. -func newFilter(match string, mt labels.MatchType) (LineFilter, error) { +// NewFilter creates a new line filter from a match string and type. +func NewFilter(match string, mt labels.MatchType) (Filterer, error) { switch mt { case labels.MatchRegexp: return parseRegexpFilter(match, true) @@ -174,7 +174,7 @@ func newFilter(match string, mt labels.MatchType) (LineFilter, error) { // parseRegexpFilter parses a regexp and attempt to simplify it with only literal filters. // If not possible it will returns the original regexp filter. -func parseRegexpFilter(re string, match bool) (LineFilter, error) { +func parseRegexpFilter(re string, match bool) (Filterer, error) { reg, err := syntax.Parse(re, syntax.Perl) if err != nil { return nil, err @@ -194,7 +194,7 @@ func parseRegexpFilter(re string, match bool) (LineFilter, error) { // simplify a regexp expression by replacing it, when possible, with a succession of literal filters. // For example `(foo|bar)` will be replaced by `containsFilter(foo) or containsFilter(bar)` -func simplify(reg *syntax.Regexp) (LineFilter, bool) { +func simplify(reg *syntax.Regexp) (Filterer, bool) { switch reg.Op { case syntax.OpAlternate: return simplifyAlternate(reg) @@ -230,7 +230,7 @@ func clearCapture(regs ...*syntax.Regexp) { // simplifyAlternate simplifies, when possible, alternate regexp expressions such as: // (foo|bar) or (foo|(bar|buzz)). -func simplifyAlternate(reg *syntax.Regexp) (LineFilter, bool) { +func simplifyAlternate(reg *syntax.Regexp) (Filterer, bool) { clearCapture(reg.Sub...) // attempt to simplify the first leg f, ok := simplify(reg.Sub[0]) @@ -253,7 +253,7 @@ func simplifyAlternate(reg *syntax.Regexp) (LineFilter, bool) { // which is a literalFilter. // Or a literal and alternates operation (see simplifyConcatAlternate), which represent a multiplication of alternates. // Anything else is rejected. -func simplifyConcat(reg *syntax.Regexp, baseLiteral []byte) (LineFilter, bool) { +func simplifyConcat(reg *syntax.Regexp, baseLiteral []byte) (Filterer, bool) { clearCapture(reg.Sub...) // we support only simplication of concat operation with 3 sub expressions. // for instance .*foo.*bar contains 4 subs (.*+foo+.*+bar) and can't be simplified. @@ -261,7 +261,7 @@ func simplifyConcat(reg *syntax.Regexp, baseLiteral []byte) (LineFilter, bool) { return nil, false } - var curr LineFilter + var curr Filterer var ok bool literals := 0 for _, sub := range reg.Sub { @@ -304,7 +304,7 @@ func simplifyConcat(reg *syntax.Regexp, baseLiteral []byte) (LineFilter, bool) { // A concat alternate is found when a concat operation has a sub alternate and is preceded by a literal. // For instance bar|b|buzz is expressed as b(ar|(?:)|uzz) => b concat alternate(ar,(?:),uzz). // (?:) being an OpEmptyMatch and b being the literal to concat all alternates (ar,(?:),uzz) with. -func simplifyConcatAlternate(reg *syntax.Regexp, literal []byte, curr LineFilter) (LineFilter, bool) { +func simplifyConcatAlternate(reg *syntax.Regexp, literal []byte, curr Filterer) (Filterer, bool) { for _, alt := range reg.Sub { switch alt.Op { case syntax.OpEmptyMatch: diff --git a/pkg/logql/filter_test.go b/pkg/logql/log/filter_test.go similarity index 94% rename from pkg/logql/filter_test.go rename to pkg/logql/log/filter_test.go index bd6fd5748dbfe..59936a420f982 100644 --- a/pkg/logql/filter_test.go +++ b/pkg/logql/log/filter_test.go @@ -1,4 +1,4 @@ -package logql +package log import ( "fmt" @@ -14,7 +14,7 @@ func Test_SimplifiedRegex(t *testing.T) { for _, test := range []struct { re string simplified bool - expected LineFilter + expected Filterer match bool }{ // regex we intend to support. @@ -97,23 +97,23 @@ func Test_TrueFilter(t *testing.T) { empty := []byte("") for _, test := range []struct { name string - f LineFilter + f Filterer expectTrue bool }{ {"empty match", newContainsFilter(empty, false), true}, {"not empty match", newNotFilter(newContainsFilter(empty, true)), false}, {"match", newContainsFilter([]byte("foo"), false), false}, - {"empty match and", newAndFilter(newContainsFilter(empty, false), newContainsFilter(empty, false)), true}, + {"empty match and", NewAndFilter(newContainsFilter(empty, false), newContainsFilter(empty, false)), true}, {"empty match or", newOrFilter(newContainsFilter(empty, false), newContainsFilter(empty, false)), true}, - {"nil right and", newAndFilter(newContainsFilter(empty, false), nil), true}, + {"nil right and", NewAndFilter(newContainsFilter(empty, false), nil), true}, {"nil left or", newOrFilter(nil, newContainsFilter(empty, false)), true}, - {"nil right and not empty", newAndFilter(newContainsFilter([]byte("foo"), false), nil), false}, + {"nil right and not empty", NewAndFilter(newContainsFilter([]byte("foo"), false), nil), false}, {"nil left or not empty", newOrFilter(nil, newContainsFilter([]byte("foo"), false)), false}, - {"nil both and", newAndFilter(nil, nil), false}, // returns nil + {"nil both and", NewAndFilter(nil, nil), false}, // returns nil {"nil both or", newOrFilter(nil, nil), false}, // returns nil - {"empty match and chained", newAndFilter(newContainsFilter(empty, false), newAndFilter(newContainsFilter(empty, false), newAndFilter(newContainsFilter(empty, false), newContainsFilter(empty, false)))), true}, + {"empty match and chained", NewAndFilter(newContainsFilter(empty, false), NewAndFilter(newContainsFilter(empty, false), NewAndFilter(newContainsFilter(empty, false), newContainsFilter(empty, false)))), true}, {"empty match or chained", newOrFilter(newContainsFilter(empty, false), newOrFilter(newContainsFilter(empty, true), newOrFilter(newContainsFilter(empty, false), newContainsFilter(empty, false)))), true}, - {"empty match and", newNotFilter(newAndFilter(newContainsFilter(empty, false), newContainsFilter(empty, false))), false}, + {"empty match and", newNotFilter(NewAndFilter(newContainsFilter(empty, false), newContainsFilter(empty, false))), false}, {"empty match or", newNotFilter(newOrFilter(newContainsFilter(empty, false), newContainsFilter(empty, false))), false}, } { t.Run(test.name, func(t *testing.T) { diff --git a/pkg/logql/fmt.go b/pkg/logql/log/fmt.go similarity index 75% rename from pkg/logql/fmt.go rename to pkg/logql/log/fmt.go index 1e96aa180cc62..c3b43f34681c7 100644 --- a/pkg/logql/fmt.go +++ b/pkg/logql/log/fmt.go @@ -1,4 +1,4 @@ -package logql +package log import ( "bytes" @@ -6,8 +6,6 @@ import ( "regexp" "strings" "text/template" - - "github.com/prometheus/prometheus/pkg/labels" ) var ( @@ -37,8 +35,8 @@ type lineFormatter struct { buf *bytes.Buffer } -func newLineFormatter(tmpl string) (*lineFormatter, error) { - t, err := template.New(OpFmtLine).Option("missingkey=zero").Funcs(functionMap).Parse(tmpl) +func NewFormatter(tmpl string) (*lineFormatter, error) { + t, err := template.New("line").Option("missingkey=zero").Funcs(functionMap).Parse(tmpl) if err != nil { return nil, fmt.Errorf("invalid line template: %s", err) } @@ -48,14 +46,14 @@ func newLineFormatter(tmpl string) (*lineFormatter, error) { }, nil } -func (lf *lineFormatter) Format(_ []byte, lbs labels.Labels) ([]byte, labels.Labels) { +func (lf *lineFormatter) Process(_ []byte, lbs Labels) ([]byte, bool) { lf.buf.Reset() // todo(cyriltovena) handle error - _ = lf.Template.Execute(lf.buf, lbs.Map()) + _ = lf.Template.Execute(lf.buf, lbs) // todo we might want to reuse the input line. res := make([]byte, len(lf.buf.Bytes())) copy(res, lf.buf.Bytes()) - return res, lbs + return res, true } type labelFmt struct { @@ -87,11 +85,10 @@ type labelFormatter struct { type labelsFormatter struct { formats []labelFormatter - builder *labels.Builder buf *bytes.Buffer } -func newLabelsFormatter(fmts []labelFmt) (*labelsFormatter, error) { +func NewLabelsFormatter(fmts []labelFmt) (*labelsFormatter, error) { if err := validate(fmts); err != nil { return nil, err } @@ -99,7 +96,7 @@ func newLabelsFormatter(fmts []labelFmt) (*labelsFormatter, error) { for _, fm := range fmts { toAdd := labelFormatter{labelFmt: fm} if !fm.rename { - t, err := template.New(OpFmtLabel).Option("missingkey=zero").Funcs(functionMap).Parse(fm.value) + t, err := template.New("label").Option("missingkey=zero").Funcs(functionMap).Parse(fm.value) if err != nil { return nil, fmt.Errorf("invalid template for label '%s': %s", fm.name, err) } @@ -109,7 +106,6 @@ func newLabelsFormatter(fmts []labelFmt) (*labelsFormatter, error) { } return &labelsFormatter{ formats: formats, - builder: labels.NewBuilder(nil), buf: bytes.NewBuffer(make([]byte, 1024)), }, nil } @@ -119,6 +115,9 @@ func validate(fmts []labelFmt) error { // To avoid confusion we allow to have a label name only once per stage. uniqueLabelName := map[string]struct{}{} for _, f := range fmts { + if f.name == errorLabel { + return fmt.Errorf("%s cannot be formatted", f.name) + } if _, ok := uniqueLabelName[f.name]; ok { return fmt.Errorf("multiple label name '%s' not allowed in a single format operation", f.name) } @@ -127,18 +126,17 @@ func validate(fmts []labelFmt) error { return nil } -func (lf *labelsFormatter) Format(lbs labels.Labels) labels.Labels { - lf.builder.Reset(lbs) +func (lf *labelsFormatter) Process(l []byte, lbs Labels) ([]byte, bool) { for _, f := range lf.formats { if f.rename { - lf.builder.Set(f.name, lbs.Get(f.value)) - lf.builder.Del(f.value) + lbs[f.name] = lbs[f.value] + delete(lbs, f.value) continue } lf.buf.Reset() //todo (cyriltovena): handle error - _ = f.Template.Execute(lf.buf, lbs.Map()) - lf.builder.Set(f.name, lf.buf.String()) + _ = f.Template.Execute(lf.buf, lbs) + lbs[f.name] = lf.buf.String() } - return lf.builder.Labels() + return l, true } diff --git a/pkg/logql/fmt_test.go b/pkg/logql/log/fmt_test.go similarity index 77% rename from pkg/logql/fmt_test.go rename to pkg/logql/log/fmt_test.go index ec984b26addf6..cb87bf278b58e 100644 --- a/pkg/logql/fmt_test.go +++ b/pkg/logql/log/fmt_test.go @@ -1,4 +1,4 @@ -package logql +package log import ( "sort" @@ -12,46 +12,44 @@ func Test_lineFormatter_Format(t *testing.T) { tests := []struct { name string fmter *lineFormatter - lbs labels.Labels + lbs map[string]string want []byte - wantLbs labels.Labels + wantLbs map[string]string }{ { "combining", newMustLineFormatter("foo{{.foo}}buzz{{ .bar }}"), - labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + map[string]string{"foo": "blip", "bar": "blop"}, []byte("fooblipbuzzblop"), - labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + map[string]string{"foo": "blip", "bar": "blop"}, }, { "missing", newMustLineFormatter("foo {{.foo}}buzz{{ .bar }}"), - labels.Labels{{Name: "bar", Value: "blop"}}, + map[string]string{"bar": "blop"}, []byte("foo buzzblop"), - labels.Labels{{Name: "bar", Value: "blop"}}, + map[string]string{"bar": "blop"}, }, { "function", newMustLineFormatter("foo {{.foo | ToUpper }} buzz{{ .bar }}"), - labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + map[string]string{"foo": "blip", "bar": "blop"}, []byte("foo BLIP buzzblop"), - labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, + map[string]string{"foo": "blip", "bar": "blop"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - outLine, outLbs := tt.fmter.Format(nil, tt.lbs) + outLine, _ := tt.fmter.Process(nil, tt.lbs) require.Equal(t, tt.want, outLine) - sort.Sort(tt.wantLbs) - sort.Sort(outLbs) - require.Equal(t, tt.wantLbs, outLbs) + require.Equal(t, tt.wantLbs, tt.lbs) }) } } func newMustLineFormatter(tmpl string) *lineFormatter { - l, err := newLineFormatter(tmpl) + l, err := NewFormatter(tmpl) if err != nil { panic(err) } @@ -62,8 +60,9 @@ func Test_labelsFormatter_Format(t *testing.T) { tests := []struct { name string fmter *labelsFormatter - in labels.Labels - want labels.Labels + + in labels.Labels + want labels.Labels }{ { "combined with template", @@ -93,14 +92,14 @@ func Test_labelsFormatter_Format(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { sort.Sort(tt.want) - out := tt.fmter.Format(tt.in) - require.Equal(t, tt.want, out) + _, _ = tt.fmter.Process(nil, tt.in) + require.Equal(t, tt.want, tt.in) }) } } func mustNewLabelsFormatter(fmts []labelFmt) *labelsFormatter { - lf, err := newLabelsFormatter(fmts) + lf, err := NewLabelsFormatter(fmts) if err != nil { panic(err) } @@ -115,6 +114,7 @@ func Test_validate(t *testing.T) { }{ {"no dup", []labelFmt{newRenameLabelFmt("foo", "bar"), newRenameLabelFmt("bar", "foo")}, false}, {"dup", []labelFmt{newRenameLabelFmt("foo", "bar"), newRenameLabelFmt("foo", "blip")}, true}, + {"no error", []labelFmt{newRenameLabelFmt(errorLabel, "bar")}, true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/logql/log/labels.go b/pkg/logql/log/labels.go new file mode 100644 index 0000000000000..04652c7f4e647 --- /dev/null +++ b/pkg/logql/log/labels.go @@ -0,0 +1,3 @@ +package log + +type Labels map[string]string diff --git a/pkg/logql/logfmt/decode.go b/pkg/logql/log/logfmt/decode.go similarity index 100% rename from pkg/logql/logfmt/decode.go rename to pkg/logql/log/logfmt/decode.go diff --git a/pkg/logql/logfmt/jsonstring.go b/pkg/logql/log/logfmt/jsonstring.go similarity index 100% rename from pkg/logql/logfmt/jsonstring.go rename to pkg/logql/log/logfmt/jsonstring.go diff --git a/pkg/logql/labels_parser.go b/pkg/logql/log/parser.go similarity index 96% rename from pkg/logql/labels_parser.go rename to pkg/logql/log/parser.go index d01df7ecca011..8896d38d90819 100644 --- a/pkg/logql/labels_parser.go +++ b/pkg/logql/log/parser.go @@ -1,4 +1,4 @@ -package logql +package log import ( "errors" @@ -7,7 +7,8 @@ import ( "strconv" "strings" - "github.com/grafana/loki/pkg/logql/logfmt" + "github.com/grafana/loki/pkg/logql/log/logfmt" + jsoniter "github.com/json-iterator/go" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/pkg/labels" @@ -27,8 +28,8 @@ var ( dash = "-" ) -type LabelParser interface { - Parse(line []byte, lbs labels.Labels) labels.Labels +type Parser interface { + Parse(line []byte, lbs Labels) } type noopParser struct{} diff --git a/pkg/logql/labels_parser_test.go b/pkg/logql/log/parser_test.go similarity index 99% rename from pkg/logql/labels_parser_test.go rename to pkg/logql/log/parser_test.go index 6df0f1c1f0d69..9b6eac12bd1a6 100644 --- a/pkg/logql/labels_parser_test.go +++ b/pkg/logql/log/parser_test.go @@ -1,4 +1,4 @@ -package logql +package log import ( "sort" diff --git a/pkg/logql/log/pipeline.go b/pkg/logql/log/pipeline.go new file mode 100644 index 0000000000000..cbfa734ee26ac --- /dev/null +++ b/pkg/logql/log/pipeline.go @@ -0,0 +1,53 @@ +package log + +import ( + "github.com/prometheus/prometheus/pkg/labels" +) + +type Pipeline interface { + Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) +} + +type Stage interface { + Process(line []byte, lbs Labels) ([]byte, bool) +} + +var ( + NoopPipeline Pipeline = &noopPipeline{} + NoopStage Stage = &noopStage{} +) + +type noopPipeline struct{} + +func (noopPipeline) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + return line, lbs, true +} + +type noopStage struct{} + +func (noopStage) Process(line []byte, lbs Labels) ([]byte, bool) { + return line, true +} + +type PipelineFunc func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) + +func (fn PipelineFunc) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + return fn(line, lbs) +} + +type MultiStage []Stage + +func (m MultiStage) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { + var ok bool + if len(m) == 0 { + return line, lbs, ok + } + labelmap := lbs.Map() + for _, p := range m { + line, ok = p.Process(line, labelmap) + if !ok { + return line, labels.FromMap(labelmap), ok + } + } + return line, labels.FromMap(labelmap), ok +} From 5272d91d3a3b854ae430e10b8633490ea498de66 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Tue, 13 Oct 2020 17:57:56 +0200 Subject: [PATCH 33/45] Work in progress. Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 113 ++---- pkg/logql/expr.y | 16 +- pkg/logql/expr.y.go | 332 +++++++++--------- pkg/logql/labelfilter/bytes.go | 55 --- pkg/logql/labelfilter/duration.go | 93 ----- pkg/logql/labelfilter/filter.go | 87 ----- pkg/logql/labelfilter/number.go | 54 --- pkg/logql/log/filter.go | 26 ++ pkg/logql/log/fmt.go | 69 ++-- pkg/logql/log/fmt_test.go | 43 ++- pkg/logql/log/labelfilter/bytes.go | 63 ++++ pkg/logql/log/labelfilter/duration.go | 61 ++++ pkg/logql/log/labelfilter/filter.go | 118 +++++++ .../{ => log}/labelfilter/filter_test.go | 95 ++--- pkg/logql/log/labelfilter/number.go | 60 ++++ pkg/logql/{ => log}/labelfilter/string.go | 12 +- pkg/logql/log/labels.go | 14 + pkg/logql/log/parser.go | 85 ++--- pkg/logql/log/parser_test.go | 211 ++++++----- pkg/logql/log/pipeline.go | 4 +- pkg/logql/{ => log}/series_extractor.go | 2 +- pkg/logql/{ => log}/series_extractor_test.go | 0 pkg/logql/parser_test.go | 2 +- 23 files changed, 798 insertions(+), 817 deletions(-) delete mode 100644 pkg/logql/labelfilter/bytes.go delete mode 100644 pkg/logql/labelfilter/duration.go delete mode 100644 pkg/logql/labelfilter/filter.go delete mode 100644 pkg/logql/labelfilter/number.go create mode 100644 pkg/logql/log/labelfilter/bytes.go create mode 100644 pkg/logql/log/labelfilter/duration.go create mode 100644 pkg/logql/log/labelfilter/filter.go rename pkg/logql/{ => log}/labelfilter/filter_test.go (59%) create mode 100644 pkg/logql/log/labelfilter/number.go rename pkg/logql/{ => log}/labelfilter/string.go (50%) rename pkg/logql/{ => log}/series_extractor.go (97%) rename pkg/logql/{ => log}/series_extractor_test.go (100%) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 69f6a95d34236..12e595ed77750 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -13,8 +13,8 @@ import ( "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/logql/labelfilter" "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/pkg/logql/log/labelfilter" ) // Expr is the root expression which can be a SampleExpr or LogSelectorExpr @@ -124,22 +124,6 @@ func (m MultiStageExpr) String() string { func (MultiStageExpr) logQLExpr() {} -func FilterToPipeline(f LineFilter) Pipeline { - if f == nil || f == TrueFilter { - return NoopPipeline - } - return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - return line, lbs, f.Filter(line) - }) -} - -func ParserToPipeline(p LabelParser) Pipeline { - return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - lbs = p.Parse(line, lbs) - return line, lbs, true - }) -} - type matchersExpr struct { matchers []*labels.Matcher implicit @@ -235,7 +219,7 @@ func AddFilterExpr(expr LogSelectorExpr, ty labels.MatchType, match string) (Log filter := newLineFilterExpr(nil, ty, match) switch e := expr.(type) { case *matchersExpr: - return newPipelineExpr(e, MultiPipelineExpr{filter}), nil + return newPipelineExpr(e, MultiStageExpr{filter}), nil case *pipelineExpr: e.pipeline = append(e.pipeline, filter) return e, nil @@ -281,19 +265,15 @@ func (e *lineFilterExpr) Filter() (log.Filterer, error) { } } - if f == log.TrueFilter { - return nil, nil - } - return f, nil } -func (e *lineFilterExpr) Pipeline() (Pipeline, error) { +func (e *lineFilterExpr) Stage() (log.Stage, error) { f, err := e.Filter() if err != nil { return nil, err } - return FilterToPipeline(f), nil + return f.ToStage(), nil } type labelParserExpr struct { @@ -310,27 +290,19 @@ func newLabelParserExpr(op, param string) *labelParserExpr { } } -func (e *labelParserExpr) parser() (LabelParser, error) { +func (e *labelParserExpr) Stage() (log.Stage, error) { switch e.op { case OpParserTypeJSON: - return NewJSONParser(), nil + return log.NewJSONParser(), nil case OpParserTypeLogfmt: - return NewLogfmtParser(), nil + return log.NewLogfmtParser(), nil case OpParserTypeRegexp: - return NewRegexpParser(e.param) + return log.NewRegexpParser(e.param) default: return nil, fmt.Errorf("unknown parser operator: %s", e.op) } } -func (e *labelParserExpr) Pipeline() (Pipeline, error) { - p, err := e.parser() - if err != nil { - return nil, err - } - return ParserToPipeline(p), nil -} - func (e *labelParserExpr) String() string { var sb strings.Builder sb.WriteString(OpPipe) @@ -348,35 +320,8 @@ type labelFilterExpr struct { implicit } -func (e *labelFilterExpr) Pipeline() (Pipeline, error) { - f := newLabelFilter(e.Filterer) - return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - ok, lbs := f.Filter(lbs) - return line, lbs, ok - }), nil -} - -type labelFilter struct { - labelfilter.Filterer - - builder *labels.Builder -} - -func newLabelFilter(f labelfilter.Filterer) *labelFilter { - return &labelFilter{ - Filterer: f, - builder: labels.NewBuilder(nil), - } -} - -func (l *labelFilter) Filter(lbs labels.Labels) (bool, labels.Labels) { - l.builder.Reset(lbs) - ok, err := l.Filterer.Filter(lbs) - if err != nil { - l.builder.Set(errorLabel, errFilter) - return true, l.builder.Labels() - } - return ok, nil +func (e *labelFilterExpr) Stage() (log.Stage, error) { + return e.Filterer, nil } func (e *labelFilterExpr) String() string { @@ -394,15 +339,8 @@ func newLineFmtExpr(value string) *lineFmtExpr { } } -func (e *lineFmtExpr) Pipeline() (Pipeline, error) { - f, err := newLineFormatter(e.value) - if err != nil { - return nil, err - } - return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - line, lbs = f.Format(line, lbs) - return line, lbs, true - }), nil +func (e *lineFmtExpr) Stage() (log.Stage, error) { + return log.NewFormatter(e.value) } func (e *lineFmtExpr) String() string { @@ -410,40 +348,31 @@ func (e *lineFmtExpr) String() string { } type labelFmtExpr struct { - formats []labelFmt + formats []log.LabelFmt implicit } -func newLabelFmtExpr(fmts []labelFmt) *labelFmtExpr { - if err := validate(fmts); err != nil { - panic(newParseError(err.Error(), 0, 0)) - } +func newLabelFmtExpr(fmts []log.LabelFmt) *labelFmtExpr { return &labelFmtExpr{ formats: fmts, } } -func (e *labelFmtExpr) Pipeline() (Pipeline, error) { - f, err := newLabelsFormatter(e.formats) - if err != nil { - return nil, err - } - return PipelineFunc(func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { - return line, f.Format(lbs), true - }), nil +func (e *labelFmtExpr) Stage() (log.Stage, error) { + return log.NewLabelsFormatter(e.formats) } func (e *labelFmtExpr) String() string { var sb strings.Builder sb.WriteString(fmt.Sprintf("%s %s ", OpPipe, OpFmtLabel)) for i, f := range e.formats { - sb.WriteString(f.name) + sb.WriteString(f.Name) sb.WriteString("=") - if f.rename { - sb.WriteString(f.value) + if f.Rename { + sb.WriteString(f.Value) } else { - sb.WriteString(strconv.Quote(f.value)) + sb.WriteString(strconv.Quote(f.Value)) } if i+1 != len(e.formats) { sb.WriteString(",") @@ -907,7 +836,7 @@ func (e *literalExpr) String() string { func (e *literalExpr) Selector() LogSelectorExpr { return e } func (e *literalExpr) HasFilter() bool { return false } func (e *literalExpr) Operations() []string { return nil } -func (e *literalExpr) Pipeline() (Pipeline, error) { return NoopPipeline, nil } +func (e *literalExpr) Pipeline() (log.Pipeline, error) { return log.NoopPipeline, nil } func (e *literalExpr) Matchers() []*labels.Matcher { return nil } func (e *literalExpr) Extractor() (SampleExtractor, error) { return nil, nil } diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index 23f386ce0e670..de2bd80e0248d 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -4,7 +4,9 @@ package logql import ( "time" "github.com/prometheus/prometheus/pkg/labels" - "github.com/grafana/loki/pkg/logql/labelfilter" + "github.com/grafana/loki/pkg/logql/log/labelfilter" + "github.com/grafana/loki/pkg/logql/log" + ) %} @@ -42,8 +44,8 @@ import ( UnitFilter labelfilter.Filterer LineFormatExpr *lineFmtExpr LabelFormatExpr *labelFmtExpr - LabelFormat labelFmt - LabelsFormat []labelFmt + LabelFormat log.LabelFmt + LabelsFormat []log.LabelFmt UnwrapExpr *unwrapExpr } @@ -217,17 +219,17 @@ labelParser: lineFormatExpr: LINE_FMT STRING { $$ = newLineFmtExpr($2) }; labelFormat: - IDENTIFIER EQ IDENTIFIER { $$ = newRenameLabelFmt($1, $3)} - | IDENTIFIER EQ STRING { $$ = newTemplateLabelFmt($1, $3)} + IDENTIFIER EQ IDENTIFIER { $$ = log.NewRenameLabelFmt($1, $3)} + | IDENTIFIER EQ STRING { $$ = log.NewTemplateLabelFmt($1, $3)} ; labelsFormat: - labelFormat { $$ = []labelFmt{ $1 } } + labelFormat { $$ = []LabelFmt{ $1 } } | labelsFormat COMMA labelFormat { $$ = append($1, $3) } | labelsFormat COMMA error ; -labelFormatExpr: LABEL_FMT labelsFormat { $$ = newLabelFmtExpr($2) }; +labelFormatExpr: LABEL_FMT labelsFormat { $$ = log.NewLabelFmtExpr($2) }; labelFilter: matcher { $$ = labelfilter.NewString($1) } diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index b047d4e39605c..bedbf991d2c77 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -8,12 +8,13 @@ import __yyfmt__ "fmt" //line pkg/logql/expr.y:2 import ( - "github.com/grafana/loki/pkg/logql/labelfilter" + "github.com/grafana/loki/pkg/logql/log" + "github.com/grafana/loki/pkg/logql/log/labelfilter" "github.com/prometheus/prometheus/pkg/labels" "time" ) -//line pkg/logql/expr.y:11 +//line pkg/logql/expr.y:13 type exprSymType struct { yys int Expr Expr @@ -40,8 +41,8 @@ type exprSymType struct { BinOpModifier BinOpOptions LabelParser *labelParserExpr LineFilters *lineFilterExpr - PipelineExpr MultiPipelineExpr - PipelineStage PipelineExpr + PipelineExpr MultiStageExpr + PipelineStage StageExpr BytesFilter labelfilter.Filterer NumberFilter labelfilter.Filterer DurationFilter labelfilter.Filterer @@ -49,8 +50,8 @@ type exprSymType struct { UnitFilter labelfilter.Filterer LineFormatExpr *lineFmtExpr LabelFormatExpr *labelFmtExpr - LabelFormat labelFmt - LabelsFormat []labelFmt + LabelFormat log.LabelFmt + LabelsFormat []log.LabelFmt UnwrapExpr *unwrapExpr } @@ -196,14 +197,13 @@ var exprToknames = [...]string{ "MOD", "POW", } - var exprStatenames = [...]string{} const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:344 +//line pkg/logql/expr.y:346 //line yacctab:1 var exprExca = [...]int{ @@ -217,6 +217,7 @@ const exprPrivate = 57344 const exprLast = 396 var exprAct = [...]int{ + 70, 171, 53, 153, 145, 4, 179, 100, 63, 2, 52, 45, 61, 56, 5, 217, 120, 214, 235, 66, 14, 40, 41, 42, 43, 44, 45, 249, 11, 42, @@ -258,8 +259,8 @@ var exprAct = [...]int{ 108, 93, 10, 9, 13, 8, 97, 99, 98, 237, 104, 105, 12, 7, 64, 1, } - var exprPact = [...]int{ + 13, -1000, 97, -1000, -1000, 271, 13, -1000, -1000, -1000, -1000, 370, 124, 77, -1000, 232, 224, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, @@ -287,15 +288,15 @@ var exprPact = [...]int{ -17, 204, 198, 9, 81, -1000, -1000, 324, -27, -32, -1000, -1000, 196, -1000, 14, -1000, -1000, } - var exprPgo = [...]int{ + 0, 395, 8, 13, 0, 6, 324, 5, 16, 7, 394, 393, 392, 389, 14, 385, 384, 383, 382, 294, 381, 10, 2, 380, 379, 378, 4, 377, 367, 359, 3, 352, 1, 318, } - var exprR1 = [...]int{ + 0, 1, 2, 2, 7, 7, 7, 7, 7, 6, 6, 6, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 32, 32, 32, 13, @@ -312,8 +313,8 @@ var exprR1 = [...]int{ 16, 16, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 5, 5, 4, 4, } - var exprR2 = [...]int{ + 0, 1, 1, 1, 1, 1, 1, 1, 3, 1, 2, 3, 2, 4, 3, 5, 3, 5, 3, 5, 4, 6, 3, 4, 3, 2, 3, 6, 3, 1, @@ -330,8 +331,8 @@ var exprR2 = [...]int{ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 4, 4, } - var exprChk = [...]int{ + -1000, -1, -2, -6, -7, -14, 23, -11, -15, -17, -18, 15, -12, -16, 7, 66, 67, 27, 28, 38, 39, 48, 49, 50, 51, 52, 53, 54, 29, 30, @@ -359,8 +360,8 @@ var exprChk = [...]int{ 9, 24, 24, -32, 24, 5, -4, 23, -32, 44, 9, 9, 24, -4, 5, 9, 24, } - var exprDef = [...]int{ + 0, -2, 1, 2, 3, 9, 0, 4, 5, 6, 7, 0, 0, 0, 120, 0, 0, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 123, 124, @@ -388,12 +389,12 @@ var exprDef = [...]int{ 13, 0, 0, 0, 38, 144, 34, 0, 15, 0, 17, 19, 0, 39, 0, 21, 27, } - var exprTok1 = [...]int{ + 1, } - var exprTok2 = [...]int{ + 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, @@ -402,7 +403,6 @@ var exprTok2 = [...]int{ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, } - var exprTok3 = [...]int{ 0, } @@ -746,864 +746,864 @@ exprdefault: case 1: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:103 +//line pkg/logql/expr.y:105 { exprlex.(*lexer).expr = exprDollar[1].Expr } case 2: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:106 +//line pkg/logql/expr.y:108 { exprVAL.Expr = exprDollar[1].LogExpr } case 3: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:107 +//line pkg/logql/expr.y:109 { exprVAL.Expr = exprDollar[1].MetricExpr } case 4: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:111 +//line pkg/logql/expr.y:113 { exprVAL.MetricExpr = exprDollar[1].RangeAggregationExpr } case 5: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:112 +//line pkg/logql/expr.y:114 { exprVAL.MetricExpr = exprDollar[1].VectorAggregationExpr } case 6: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:113 +//line pkg/logql/expr.y:115 { exprVAL.MetricExpr = exprDollar[1].BinOpExpr } case 7: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:114 +//line pkg/logql/expr.y:116 { exprVAL.MetricExpr = exprDollar[1].LiteralExpr } case 8: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:115 +//line pkg/logql/expr.y:117 { exprVAL.MetricExpr = exprDollar[2].MetricExpr } case 9: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:119 +//line pkg/logql/expr.y:121 { exprVAL.LogExpr = newMatcherExpr(exprDollar[1].Selector) } case 10: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:120 +//line pkg/logql/expr.y:122 { exprVAL.LogExpr = newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr) } case 11: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:121 +//line pkg/logql/expr.y:123 { exprVAL.LogExpr = exprDollar[2].LogExpr } case 12: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:125 +//line pkg/logql/expr.y:127 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, nil) } case 13: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:126 +//line pkg/logql/expr.y:128 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, nil) } case 14: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:127 +//line pkg/logql/expr.y:129 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, exprDollar[3].UnwrapExpr) } case 15: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:128 +//line pkg/logql/expr.y:130 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, exprDollar[5].UnwrapExpr) } case 16: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:129 +//line pkg/logql/expr.y:131 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].duration, exprDollar[2].UnwrapExpr) } case 17: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:130 +//line pkg/logql/expr.y:132 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[5].duration, exprDollar[3].UnwrapExpr) } case 18: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:131 +//line pkg/logql/expr.y:133 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[3].duration, nil) } case 19: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:132 +//line pkg/logql/expr.y:134 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[5].duration, nil) } case 20: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:133 +//line pkg/logql/expr.y:135 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[4].duration, exprDollar[3].UnwrapExpr) } case 21: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:134 +//line pkg/logql/expr.y:136 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[6].duration, exprDollar[4].UnwrapExpr) } case 22: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:135 +//line pkg/logql/expr.y:137 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, nil) } case 23: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:136 +//line pkg/logql/expr.y:138 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, exprDollar[4].UnwrapExpr) } case 24: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:137 +//line pkg/logql/expr.y:139 { exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr } case 26: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:142 +//line pkg/logql/expr.y:144 { exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[3].str, "") } case 27: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:143 +//line pkg/logql/expr.y:145 { exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[5].str, exprDollar[3].ConvOp) } case 28: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:144 +//line pkg/logql/expr.y:146 { exprVAL.UnwrapExpr = exprDollar[1].UnwrapExpr.addPostFilter(exprDollar[3].LabelFilter) } case 29: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:148 +//line pkg/logql/expr.y:150 { exprVAL.ConvOp = OpConvDuration } case 30: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:149 +//line pkg/logql/expr.y:151 { exprVAL.ConvOp = OpConvDurationSeconds } case 31: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:153 +//line pkg/logql/expr.y:155 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, nil, nil) } case 32: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:154 +//line pkg/logql/expr.y:156 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, nil, &exprDollar[3].str) } case 33: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:155 +//line pkg/logql/expr.y:157 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[5].Grouping, nil) } case 34: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:156 +//line pkg/logql/expr.y:158 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 35: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:161 +//line pkg/logql/expr.y:163 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } case 36: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:162 +//line pkg/logql/expr.y:164 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } case 37: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:163 +//line pkg/logql/expr.y:165 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } case 38: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:165 +//line pkg/logql/expr.y:167 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } case 39: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:166 +//line pkg/logql/expr.y:168 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 40: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:170 +//line pkg/logql/expr.y:172 { exprVAL.Filter = labels.MatchRegexp } case 41: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:171 +//line pkg/logql/expr.y:173 { exprVAL.Filter = labels.MatchEqual } case 42: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:172 +//line pkg/logql/expr.y:174 { exprVAL.Filter = labels.MatchNotRegexp } case 43: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:173 +//line pkg/logql/expr.y:175 { exprVAL.Filter = labels.MatchNotEqual } case 44: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:177 +//line pkg/logql/expr.y:179 { exprVAL.Selector = exprDollar[2].Matchers } case 45: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:178 +//line pkg/logql/expr.y:180 { exprVAL.Selector = exprDollar[2].Matchers } case 46: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:179 +//line pkg/logql/expr.y:181 { } case 47: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:183 +//line pkg/logql/expr.y:185 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } case 48: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:184 +//line pkg/logql/expr.y:186 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } case 49: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:188 +//line pkg/logql/expr.y:190 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } case 50: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:189 +//line pkg/logql/expr.y:191 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } case 51: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:190 +//line pkg/logql/expr.y:192 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } case 52: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:191 +//line pkg/logql/expr.y:193 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } case 53: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:195 +//line pkg/logql/expr.y:197 { - exprVAL.PipelineExpr = MultiPipelineExpr{exprDollar[1].PipelineStage} + exprVAL.PipelineExpr = MultiStageExpr{exprDollar[1].PipelineStage} } case 54: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:196 +//line pkg/logql/expr.y:198 { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } case 55: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:200 +//line pkg/logql/expr.y:202 { exprVAL.PipelineStage = exprDollar[1].LineFilters } case 56: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:201 +//line pkg/logql/expr.y:203 { exprVAL.PipelineStage = exprDollar[2].LabelParser } case 57: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:202 +//line pkg/logql/expr.y:204 { exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} } case 58: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:203 +//line pkg/logql/expr.y:205 { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } case 59: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:204 +//line pkg/logql/expr.y:206 { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } case 60: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:208 +//line pkg/logql/expr.y:210 { exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) } case 61: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:209 +//line pkg/logql/expr.y:211 { exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } case 62: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:212 +//line pkg/logql/expr.y:214 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } case 63: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:213 +//line pkg/logql/expr.y:215 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } case 64: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:214 +//line pkg/logql/expr.y:216 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } case 65: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:217 +//line pkg/logql/expr.y:219 { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } case 66: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:220 +//line pkg/logql/expr.y:222 { - exprVAL.LabelFormat = newRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) + exprVAL.LabelFormat = log.NewRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } case 67: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:221 +//line pkg/logql/expr.y:223 { - exprVAL.LabelFormat = newTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) + exprVAL.LabelFormat = log.NewTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } case 68: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:225 +//line pkg/logql/expr.y:227 { - exprVAL.LabelsFormat = []labelFmt{exprDollar[1].LabelFormat} + exprVAL.LabelsFormat = []LabelFmt{exprDollar[1].LabelFormat} } case 69: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:226 +//line pkg/logql/expr.y:228 { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } case 71: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:230 +//line pkg/logql/expr.y:232 { - exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) + exprVAL.LabelFormatExpr = log.NewLabelFmtExpr(exprDollar[2].LabelsFormat) } case 72: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:233 +//line pkg/logql/expr.y:235 { exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) } case 73: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:234 +//line pkg/logql/expr.y:236 { exprVAL.LabelFilter = exprDollar[1].UnitFilter } case 74: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:235 +//line pkg/logql/expr.y:237 { exprVAL.LabelFilter = exprDollar[1].NumberFilter } case 75: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:236 +//line pkg/logql/expr.y:238 { exprVAL.LabelFilter = exprDollar[2].LabelFilter } case 76: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:237 +//line pkg/logql/expr.y:239 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } case 77: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:238 +//line pkg/logql/expr.y:240 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 78: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:239 +//line pkg/logql/expr.y:241 { exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 79: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:240 +//line pkg/logql/expr.y:242 { exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 80: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:244 +//line pkg/logql/expr.y:246 { exprVAL.UnitFilter = exprDollar[1].DurationFilter } case 81: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:245 +//line pkg/logql/expr.y:247 { exprVAL.UnitFilter = exprDollar[1].BytesFilter } case 82: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:248 +//line pkg/logql/expr.y:250 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } case 83: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:249 +//line pkg/logql/expr.y:251 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 84: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:250 +//line pkg/logql/expr.y:252 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } case 85: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:251 +//line pkg/logql/expr.y:253 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 86: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:252 +//line pkg/logql/expr.y:254 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } case 87: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:253 +//line pkg/logql/expr.y:255 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 88: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:254 +//line pkg/logql/expr.y:256 { exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 89: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:258 +//line pkg/logql/expr.y:260 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].bytes) } case 90: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:259 +//line pkg/logql/expr.y:261 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } case 91: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:260 +//line pkg/logql/expr.y:262 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].bytes) } case 92: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:261 +//line pkg/logql/expr.y:263 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } case 93: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:262 +//line pkg/logql/expr.y:264 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].bytes) } case 94: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:263 +//line pkg/logql/expr.y:265 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].bytes) } case 95: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:264 +//line pkg/logql/expr.y:266 { exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].bytes) } case 96: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:268 +//line pkg/logql/expr.y:270 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 97: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:269 +//line pkg/logql/expr.y:271 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 98: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:270 +//line pkg/logql/expr.y:272 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 99: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:271 +//line pkg/logql/expr.y:273 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 100: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:272 +//line pkg/logql/expr.y:274 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 101: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:273 +//line pkg/logql/expr.y:275 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 102: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:274 +//line pkg/logql/expr.y:276 { exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 103: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:280 +//line pkg/logql/expr.y:282 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 104: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:281 +//line pkg/logql/expr.y:283 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 105: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:282 +//line pkg/logql/expr.y:284 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 106: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:283 +//line pkg/logql/expr.y:285 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 107: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:284 +//line pkg/logql/expr.y:286 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 108: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:285 +//line pkg/logql/expr.y:287 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 109: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:286 +//line pkg/logql/expr.y:288 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 110: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:287 +//line pkg/logql/expr.y:289 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 111: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:288 +//line pkg/logql/expr.y:290 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 112: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:289 +//line pkg/logql/expr.y:291 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 113: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:290 +//line pkg/logql/expr.y:292 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 114: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:291 +//line pkg/logql/expr.y:293 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 115: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:292 +//line pkg/logql/expr.y:294 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 116: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:293 +//line pkg/logql/expr.y:295 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 117: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:294 +//line pkg/logql/expr.y:296 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 118: exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:298 +//line pkg/logql/expr.y:300 { exprVAL.BinOpModifier = BinOpOptions{} } case 119: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:299 +//line pkg/logql/expr.y:301 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } case 120: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:303 +//line pkg/logql/expr.y:305 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } case 121: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:304 +//line pkg/logql/expr.y:306 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } case 122: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:305 +//line pkg/logql/expr.y:307 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } case 123: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:309 +//line pkg/logql/expr.y:311 { exprVAL.VectorOp = OpTypeSum } case 124: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:310 +//line pkg/logql/expr.y:312 { exprVAL.VectorOp = OpTypeAvg } case 125: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:311 +//line pkg/logql/expr.y:313 { exprVAL.VectorOp = OpTypeCount } case 126: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:312 +//line pkg/logql/expr.y:314 { exprVAL.VectorOp = OpTypeMax } case 127: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:313 +//line pkg/logql/expr.y:315 { exprVAL.VectorOp = OpTypeMin } case 128: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:314 +//line pkg/logql/expr.y:316 { exprVAL.VectorOp = OpTypeStddev } case 129: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:315 +//line pkg/logql/expr.y:317 { exprVAL.VectorOp = OpTypeStdvar } case 130: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:316 +//line pkg/logql/expr.y:318 { exprVAL.VectorOp = OpTypeBottomK } case 131: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:317 +//line pkg/logql/expr.y:319 { exprVAL.VectorOp = OpTypeTopK } case 132: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:321 +//line pkg/logql/expr.y:323 { exprVAL.RangeOp = OpRangeTypeCount } case 133: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:322 +//line pkg/logql/expr.y:324 { exprVAL.RangeOp = OpRangeTypeRate } case 134: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:323 +//line pkg/logql/expr.y:325 { exprVAL.RangeOp = OpRangeTypeBytes } case 135: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:324 +//line pkg/logql/expr.y:326 { exprVAL.RangeOp = OpRangeTypeBytesRate } case 136: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:325 +//line pkg/logql/expr.y:327 { exprVAL.RangeOp = OpRangeTypeAvg } case 137: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:326 +//line pkg/logql/expr.y:328 { exprVAL.RangeOp = OpRangeTypeSum } case 138: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:327 +//line pkg/logql/expr.y:329 { exprVAL.RangeOp = OpRangeTypeMin } case 139: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:328 +//line pkg/logql/expr.y:330 { exprVAL.RangeOp = OpRangeTypeMax } case 140: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:329 +//line pkg/logql/expr.y:331 { exprVAL.RangeOp = OpRangeTypeStdvar } case 141: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:330 +//line pkg/logql/expr.y:332 { exprVAL.RangeOp = OpRangeTypeStddev } case 142: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:331 +//line pkg/logql/expr.y:333 { exprVAL.RangeOp = OpRangeTypeQuantile } case 143: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:336 +//line pkg/logql/expr.y:338 { exprVAL.Labels = []string{exprDollar[1].str} } case 144: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:337 +//line pkg/logql/expr.y:339 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } case 145: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:341 +//line pkg/logql/expr.y:343 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } case 146: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:342 +//line pkg/logql/expr.y:344 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/labelfilter/bytes.go b/pkg/logql/labelfilter/bytes.go deleted file mode 100644 index b6faaa59b6a9f..0000000000000 --- a/pkg/logql/labelfilter/bytes.go +++ /dev/null @@ -1,55 +0,0 @@ -package labelfilter - -import ( - "fmt" - - "github.com/dustin/go-humanize" - "github.com/prometheus/prometheus/pkg/labels" -) - -type Bytes struct { - Name string - Value uint64 - Type FilterType -} - -func NewBytes(t FilterType, name string, b uint64) *Bytes { - return &Bytes{ - Name: name, - Type: t, - Value: b, - } -} - -func (d *Bytes) Filter(lbs labels.Labels) (bool, error) { - for _, l := range lbs { - if l.Name == d.Name { - value, err := humanize.ParseBytes(l.Value) - if err != nil { - return false, errConversion - } - switch d.Type { - case FilterEqual: - return value == d.Value, nil - case FilterNotEqual: - return value != d.Value, nil - case FilterGreaterThan: - return value > d.Value, nil - case FilterGreaterThanOrEqual: - return value >= d.Value, nil - case FilterLesserThan: - return value < d.Value, nil - case FilterLesserThanOrEqual: - return value <= d.Value, nil - default: - return false, errUnsupportedType - } - } - } - // we have not found this label. - return false, nil -} - -func (d *Bytes) String() string { - return fmt.Sprintf("%s%s%d", d.Name, d.Type, d.Value) -} diff --git a/pkg/logql/labelfilter/duration.go b/pkg/logql/labelfilter/duration.go deleted file mode 100644 index 122945932896a..0000000000000 --- a/pkg/logql/labelfilter/duration.go +++ /dev/null @@ -1,93 +0,0 @@ -package labelfilter - -import ( - "errors" - "fmt" - "time" - - "github.com/prometheus/prometheus/pkg/labels" -) - -// FilterType is an enum for label filtering types. -type FilterType int - -func (f FilterType) String() string { - switch f { - case FilterEqual: - return "==" - case FilterNotEqual: - return "!=" - case FilterGreaterThan: - return ">" - case FilterGreaterThanOrEqual: - return ">=" - case FilterLesserThan: - return "<" - case FilterLesserThanOrEqual: - return "<=" - default: - return "" - } -} - -// Possible FilterTypes. -const ( - FilterEqual FilterType = iota - FilterNotEqual - FilterGreaterThan - FilterGreaterThanOrEqual - FilterLesserThan - FilterLesserThanOrEqual -) - -var ( - errConversion = errors.New("converting label value failed") - errUnsupportedType = errors.New("unsupported filter type") -) - -type Duration struct { - Name string - Value time.Duration - Type FilterType -} - -func NewDuration(t FilterType, name string, d time.Duration) *Duration { - return &Duration{ - Name: name, - Type: t, - Value: d, - } -} - -func (d *Duration) Filter(lbs labels.Labels) (bool, error) { - for _, l := range lbs { - if l.Name == d.Name { - value, err := time.ParseDuration(l.Value) - if err != nil { - return false, errConversion - } - switch d.Type { - case FilterEqual: - return value == d.Value, nil - case FilterNotEqual: - return value != d.Value, nil - case FilterGreaterThan: - return value > d.Value, nil - case FilterGreaterThanOrEqual: - return value >= d.Value, nil - case FilterLesserThan: - return value < d.Value, nil - case FilterLesserThanOrEqual: - return value <= d.Value, nil - default: - return false, errUnsupportedType - } - } - } - // we have not found this label. - return false, nil -} - -func (d *Duration) String() string { - return fmt.Sprintf("%s%s%s", d.Name, d.Type, d.Value) -} diff --git a/pkg/logql/labelfilter/filter.go b/pkg/logql/labelfilter/filter.go deleted file mode 100644 index 177585c31bd23..0000000000000 --- a/pkg/logql/labelfilter/filter.go +++ /dev/null @@ -1,87 +0,0 @@ -package labelfilter - -import ( - "fmt" - "strings" - - "github.com/prometheus/prometheus/pkg/labels" -) - -var ( - Noop = noopFilter{} -) - -type Filterer interface { - Filter(lbs labels.Labels) (bool, error) - fmt.Stringer -} - -type Binary struct { - Left Filterer - Right Filterer - and bool -} - -func NewAnd(left Filterer, right Filterer) *Binary { - return &Binary{ - Left: left, - Right: right, - and: true, - } -} - -func NewOr(left Filterer, right Filterer) *Binary { - return &Binary{ - Left: left, - Right: right, - } -} - -func (b *Binary) Filter(lbs labels.Labels) (bool, error) { - l, err := b.Left.Filter(lbs) - if err != nil { - return false, err - } - if !b.and && l { - return true, nil - } - r, err := b.Right.Filter(lbs) - if err != nil { - return false, err - } - if !b.and { - return l || r, nil - } - return l && r, nil -} - -func (b *Binary) String() string { - var sb strings.Builder - sb.WriteString("( ") - sb.WriteString(b.Left.String()) - if b.and { - sb.WriteString(" , ") - } else { - sb.WriteString(" or ") - } - sb.WriteString(b.Right.String()) - sb.WriteString(" )") - return sb.String() -} - -type noopFilter struct{} - -func (noopFilter) Filter(lbs labels.Labels) (bool, error) { return true, nil } - -func (noopFilter) String() string { return "" } - -func ReduceAnd(filters []Filterer) Filterer { - if len(filters) == 0 { - return Noop - } - result := filters[0] - for _, f := range filters[0:] { - result = NewAnd(result, f) - } - return result -} diff --git a/pkg/logql/labelfilter/number.go b/pkg/logql/labelfilter/number.go deleted file mode 100644 index 52510c93c6272..0000000000000 --- a/pkg/logql/labelfilter/number.go +++ /dev/null @@ -1,54 +0,0 @@ -package labelfilter - -import ( - "fmt" - "strconv" - - "github.com/prometheus/prometheus/pkg/labels" -) - -type Numeric struct { - Name string - Value float64 - Type FilterType -} - -func NewNumeric(t FilterType, name string, v float64) *Numeric { - return &Numeric{ - Name: name, - Type: t, - Value: v, - } -} - -func (n *Numeric) Filter(lbs labels.Labels) (bool, error) { - for _, l := range lbs { - if l.Name == n.Name { - value, err := strconv.ParseFloat(l.Value, 64) - if err != nil { - return false, errConversion - } - switch n.Type { - case FilterEqual: - return value == n.Value, nil - case FilterNotEqual: - return value != n.Value, nil - case FilterGreaterThan: - return value > n.Value, nil - case FilterGreaterThanOrEqual: - return value >= n.Value, nil - case FilterLesserThan: - return value < n.Value, nil - case FilterLesserThanOrEqual: - return value <= n.Value, nil - default: - return false, errUnsupportedType - } - } - } - return false, nil -} - -func (n *Numeric) String() string { - return fmt.Sprintf("%s%s%s", n.Name, n.Type, strconv.FormatFloat(n.Value, 'f', -1, 64)) -} diff --git a/pkg/logql/log/filter.go b/pkg/logql/log/filter.go index bb5a948fc9b8f..95bea98aba367 100644 --- a/pkg/logql/log/filter.go +++ b/pkg/logql/log/filter.go @@ -12,6 +12,7 @@ import ( // Filterer is a interface to filter log lines. type Filterer interface { Filter(line []byte) bool + ToStage() Stage } // LineFilterFunc is a syntax sugar for creating line filter from a function @@ -24,6 +25,7 @@ func (f FiltererFunc) Filter(line []byte) bool { type trueFilter struct{} func (trueFilter) Filter(_ []byte) bool { return true } +func (trueFilter) ToStage() Stage { return NoopStage } // TrueFilter is a filter that returns and matches all log lines whatever their content. var TrueFilter = trueFilter{} @@ -72,6 +74,12 @@ func (a andFilter) Filter(line []byte) bool { return a.left.Filter(line) && a.right.Filter(line) } +func (a andFilter) ToStage() Stage { + return StageFunc(func(line []byte, lbs Labels) ([]byte, bool) { + return line, a.Filter(line) + }) +} + type orFilter struct { left Filterer right Filterer @@ -105,6 +113,12 @@ func (a orFilter) Filter(line []byte) bool { return a.left.Filter(line) || a.right.Filter(line) } +func (a orFilter) ToStage() Stage { + return StageFunc(func(line []byte, lbs Labels) ([]byte, bool) { + return line, a.Filter(line) + }) +} + type regexpFilter struct { *regexp.Regexp } @@ -127,6 +141,12 @@ func (r regexpFilter) Filter(line []byte) bool { return r.Match(line) } +func (r regexpFilter) ToStage() Stage { + return StageFunc(func(line []byte, lbs Labels) ([]byte, bool) { + return line, r.Filter(line) + }) +} + type containsFilter struct { match []byte caseInsensitive bool @@ -139,6 +159,12 @@ func (l containsFilter) Filter(line []byte) bool { return bytes.Contains(line, l.match) } +func (l containsFilter) ToStage() Stage { + return StageFunc(func(line []byte, lbs Labels) ([]byte, bool) { + return line, l.Filter(line) + }) +} + func (l containsFilter) String() string { return string(l.match) } diff --git a/pkg/logql/log/fmt.go b/pkg/logql/log/fmt.go index c3b43f34681c7..689f1cc1dec08 100644 --- a/pkg/logql/log/fmt.go +++ b/pkg/logql/log/fmt.go @@ -9,6 +9,9 @@ import ( ) var ( + _ Stage = &lineFormatter{} + _ Stage = &labelsFormatter{} + functionMap = template.FuncMap{ "ToLower": strings.ToLower, "ToUpper": strings.ToUpper, @@ -56,31 +59,31 @@ func (lf *lineFormatter) Process(_ []byte, lbs Labels) ([]byte, bool) { return res, true } -type labelFmt struct { - name string +type LabelFmt struct { + Name string + Value string - value string - rename bool + Rename bool } -func newRenameLabelFmt(dst, target string) labelFmt { - return labelFmt{ - name: dst, - rename: true, - value: target, +func NewRenameLabelFmt(dst, target string) LabelFmt { + return LabelFmt{ + Name: dst, + Rename: true, + Value: target, } } -func newTemplateLabelFmt(dst, template string) labelFmt { - return labelFmt{ - name: dst, - rename: false, - value: template, +func NewTemplateLabelFmt(dst, template string) LabelFmt { + return LabelFmt{ + Name: dst, + Rename: false, + Value: template, } } type labelFormatter struct { - *template.Template - labelFmt + tmpl *template.Template + LabelFmt } type labelsFormatter struct { @@ -88,19 +91,19 @@ type labelsFormatter struct { buf *bytes.Buffer } -func NewLabelsFormatter(fmts []labelFmt) (*labelsFormatter, error) { +func NewLabelsFormatter(fmts []LabelFmt) (*labelsFormatter, error) { if err := validate(fmts); err != nil { return nil, err } formats := make([]labelFormatter, 0, len(fmts)) for _, fm := range fmts { - toAdd := labelFormatter{labelFmt: fm} - if !fm.rename { - t, err := template.New("label").Option("missingkey=zero").Funcs(functionMap).Parse(fm.value) + toAdd := labelFormatter{LabelFmt: fm} + if !fm.Rename { + t, err := template.New("label").Option("missingkey=zero").Funcs(functionMap).Parse(fm.Value) if err != nil { - return nil, fmt.Errorf("invalid template for label '%s': %s", fm.name, err) + return nil, fmt.Errorf("invalid template for label '%s': %s", fm.Name, err) } - toAdd.Template = t + toAdd.tmpl = t } formats = append(formats, toAdd) } @@ -110,33 +113,33 @@ func NewLabelsFormatter(fmts []labelFmt) (*labelsFormatter, error) { }, nil } -func validate(fmts []labelFmt) error { +func validate(fmts []LabelFmt) error { // it would be too confusing to rename and change the same label value. // To avoid confusion we allow to have a label name only once per stage. uniqueLabelName := map[string]struct{}{} for _, f := range fmts { - if f.name == errorLabel { - return fmt.Errorf("%s cannot be formatted", f.name) + if f.Name == errorLabel { + return fmt.Errorf("%s cannot be formatted", f.Name) } - if _, ok := uniqueLabelName[f.name]; ok { - return fmt.Errorf("multiple label name '%s' not allowed in a single format operation", f.name) + if _, ok := uniqueLabelName[f.Name]; ok { + return fmt.Errorf("multiple label name '%s' not allowed in a single format operation", f.Name) } - uniqueLabelName[f.name] = struct{}{} + uniqueLabelName[f.Name] = struct{}{} } return nil } func (lf *labelsFormatter) Process(l []byte, lbs Labels) ([]byte, bool) { for _, f := range lf.formats { - if f.rename { - lbs[f.name] = lbs[f.value] - delete(lbs, f.value) + if f.Rename { + lbs[f.Name] = lbs[f.Value] + delete(lbs, f.Value) continue } lf.buf.Reset() //todo (cyriltovena): handle error - _ = f.Template.Execute(lf.buf, lbs) - lbs[f.name] = lf.buf.String() + _ = f.tmpl.Execute(lf.buf, lbs) + lbs[f.Name] = lf.buf.String() } return l, true } diff --git a/pkg/logql/log/fmt_test.go b/pkg/logql/log/fmt_test.go index cb87bf278b58e..bd87285aff6dc 100644 --- a/pkg/logql/log/fmt_test.go +++ b/pkg/logql/log/fmt_test.go @@ -1,10 +1,8 @@ package log import ( - "sort" "testing" - "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/require" ) @@ -61,44 +59,43 @@ func Test_labelsFormatter_Format(t *testing.T) { name string fmter *labelsFormatter - in labels.Labels - want labels.Labels + in Labels + want Labels }{ { "combined with template", - mustNewLabelsFormatter([]labelFmt{newTemplateLabelFmt("foo", "{{.foo}} and {{.bar}}")}), - labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, - labels.Labels{{Name: "foo", Value: "blip and blop"}, {Name: "bar", Value: "blop"}}, + mustNewLabelsFormatter([]LabelFmt{NewTemplateLabelFmt("foo", "{{.foo}} and {{.bar}}")}), + map[string]string{"foo": "blip", "bar": "blop"}, + map[string]string{"foo": "blip and blop", "bar": "blop"}, }, { "combined with template and rename", - mustNewLabelsFormatter([]labelFmt{ - newTemplateLabelFmt("blip", "{{.foo}} and {{.bar}}"), - newRenameLabelFmt("bar", "foo"), + mustNewLabelsFormatter([]LabelFmt{ + NewTemplateLabelFmt("blip", "{{.foo}} and {{.bar}}"), + NewRenameLabelFmt("bar", "foo"), }), - labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, - labels.Labels{{Name: "blip", Value: "blip and blop"}, {Name: "bar", Value: "blip"}}, + map[string]string{"foo": "blip", "bar": "blop"}, + map[string]string{"blip": "blip and blop", "bar": "blip"}, }, { "fn", - mustNewLabelsFormatter([]labelFmt{ - newTemplateLabelFmt("blip", "{{.foo | ToUpper }} and {{.bar}}"), - newRenameLabelFmt("bar", "foo"), + mustNewLabelsFormatter([]LabelFmt{ + NewTemplateLabelFmt("blip", "{{.foo | ToUpper }} and {{.bar}}"), + NewRenameLabelFmt("bar", "foo"), }), - labels.Labels{{Name: "foo", Value: "blip"}, {Name: "bar", Value: "blop"}}, - labels.Labels{{Name: "blip", Value: "BLIP and blop"}, {Name: "bar", Value: "blip"}}, + map[string]string{"foo": "blip", "bar": "blop"}, + map[string]string{"blip": "BLIP and blop", "bar": "blip"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - sort.Sort(tt.want) _, _ = tt.fmter.Process(nil, tt.in) require.Equal(t, tt.want, tt.in) }) } } -func mustNewLabelsFormatter(fmts []labelFmt) *labelsFormatter { +func mustNewLabelsFormatter(fmts []LabelFmt) *labelsFormatter { lf, err := NewLabelsFormatter(fmts) if err != nil { panic(err) @@ -109,12 +106,12 @@ func mustNewLabelsFormatter(fmts []labelFmt) *labelsFormatter { func Test_validate(t *testing.T) { tests := []struct { name string - fmts []labelFmt + fmts []LabelFmt wantErr bool }{ - {"no dup", []labelFmt{newRenameLabelFmt("foo", "bar"), newRenameLabelFmt("bar", "foo")}, false}, - {"dup", []labelFmt{newRenameLabelFmt("foo", "bar"), newRenameLabelFmt("foo", "blip")}, true}, - {"no error", []labelFmt{newRenameLabelFmt(errorLabel, "bar")}, true}, + {"no dup", []LabelFmt{NewRenameLabelFmt("foo", "bar"), NewRenameLabelFmt("bar", "foo")}, false}, + {"dup", []LabelFmt{NewRenameLabelFmt("foo", "bar"), NewRenameLabelFmt("foo", "blip")}, true}, + {"no error", []LabelFmt{NewRenameLabelFmt(errorLabel, "bar")}, true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/logql/log/labelfilter/bytes.go b/pkg/logql/log/labelfilter/bytes.go new file mode 100644 index 0000000000000..86337fa4be2e8 --- /dev/null +++ b/pkg/logql/log/labelfilter/bytes.go @@ -0,0 +1,63 @@ +package labelfilter + +import ( + "fmt" + + "github.com/dustin/go-humanize" + "github.com/grafana/loki/pkg/logql/log" +) + +type Bytes struct { + Name string + Value uint64 + Type FilterType +} + +func NewBytes(t FilterType, name string, b uint64) *Bytes { + return &Bytes{ + Name: name, + Type: t, + Value: b, + } +} + +func (d *Bytes) Process(line []byte, lbs log.Labels) ([]byte, bool) { + if lbs.HasError() { + // if there's an error only the string matchers can filter it out. + return line, true + } + for k, v := range lbs { + if k == d.Name { + value, err := humanize.ParseBytes(v) + if err != nil { + lbs.SetError("LabelFilterError") + return line, true + } + switch d.Type { + case FilterEqual: + return line, value == d.Value + case FilterNotEqual: + return line, value != d.Value + case FilterGreaterThan: + return line, value > d.Value + case FilterGreaterThanOrEqual: + return line, value >= d.Value + case FilterLesserThan: + return line, value < d.Value + case FilterLesserThanOrEqual: + return line, value <= d.Value + default: + if err != nil { + lbs.SetError("LabelFilterError") + return line, true + } + } + } + } + // we have not found this label. + return line, false +} + +func (d *Bytes) String() string { + return fmt.Sprintf("%s%s%d", d.Name, d.Type, d.Value) +} diff --git a/pkg/logql/log/labelfilter/duration.go b/pkg/logql/log/labelfilter/duration.go new file mode 100644 index 0000000000000..ff4cea0826a5c --- /dev/null +++ b/pkg/logql/log/labelfilter/duration.go @@ -0,0 +1,61 @@ +package labelfilter + +import ( + "fmt" + "time" + + "github.com/grafana/loki/pkg/logql/log" +) + +type Duration struct { + Name string + Value time.Duration + Type FilterType +} + +func NewDuration(t FilterType, name string, d time.Duration) *Duration { + return &Duration{ + Name: name, + Type: t, + Value: d, + } +} + +func (d *Duration) Process(line []byte, lbs log.Labels) ([]byte, bool) { + if lbs.HasError() { + // if there's an error only the string matchers can filter out. + return line, true + } + for k, v := range lbs { + if k == d.Name { + value, err := time.ParseDuration(v) + if err != nil { + lbs.SetError("LabelFilterError") + return line, true + } + switch d.Type { + case FilterEqual: + return line, value == d.Value + case FilterNotEqual: + return line, value != d.Value + case FilterGreaterThan: + return line, value > d.Value + case FilterGreaterThanOrEqual: + return line, value >= d.Value + case FilterLesserThan: + return line, value < d.Value + case FilterLesserThanOrEqual: + return line, value <= d.Value + default: + lbs.SetError("LabelFilterError") + return line, true + } + } + } + // we have not found this label. + return line, false +} + +func (d *Duration) String() string { + return fmt.Sprintf("%s%s%s", d.Name, d.Type, d.Value) +} diff --git a/pkg/logql/log/labelfilter/filter.go b/pkg/logql/log/labelfilter/filter.go new file mode 100644 index 0000000000000..8fdd144fdeb18 --- /dev/null +++ b/pkg/logql/log/labelfilter/filter.go @@ -0,0 +1,118 @@ +package labelfilter + +import ( + "fmt" + "strings" + + "github.com/grafana/loki/pkg/logql/log" +) + +var ( + _ Filterer = &Binary{} + _ Filterer = &Bytes{} + _ Filterer = &Duration{} + _ Filterer = &String{} + _ Filterer = &Numeric{} + + Noop = noopFilter{} +) + +// FilterType is an enum for label filtering types. +type FilterType int + +func (f FilterType) String() string { + switch f { + case FilterEqual: + return "==" + case FilterNotEqual: + return "!=" + case FilterGreaterThan: + return ">" + case FilterGreaterThanOrEqual: + return ">=" + case FilterLesserThan: + return "<" + case FilterLesserThanOrEqual: + return "<=" + default: + return "" + } +} + +// Possible FilterTypes. +const ( + FilterEqual FilterType = iota + FilterNotEqual + FilterGreaterThan + FilterGreaterThanOrEqual + FilterLesserThan + FilterLesserThanOrEqual +) + +type Filterer interface { + log.Stage + fmt.Stringer +} + +type Binary struct { + Left Filterer + Right Filterer + and bool +} + +func NewAnd(left Filterer, right Filterer) *Binary { + return &Binary{ + Left: left, + Right: right, + and: true, + } +} + +func NewOr(left Filterer, right Filterer) *Binary { + return &Binary{ + Left: left, + Right: right, + } +} + +func (b *Binary) Process(line []byte, lbs log.Labels) ([]byte, bool) { + line, lok := b.Left.Process(line, lbs) + if !b.and && lok { + return line, true + } + line, rok := b.Right.Process(line, lbs) + if !b.and { + return line, lok || rok + } + return line, lok && rok +} + +func (b *Binary) String() string { + var sb strings.Builder + sb.WriteString("( ") + sb.WriteString(b.Left.String()) + if b.and { + sb.WriteString(" , ") + } else { + sb.WriteString(" or ") + } + sb.WriteString(b.Right.String()) + sb.WriteString(" )") + return sb.String() +} + +type noopFilter struct{} + +func (noopFilter) String() string { return "" } +func (noopFilter) Process(line []byte, lbs log.Labels) ([]byte, bool) { return line, true } + +func ReduceAnd(filters []Filterer) Filterer { + if len(filters) == 0 { + return Noop + } + result := filters[0] + for _, f := range filters[0:] { + result = NewAnd(result, f) + } + return result +} diff --git a/pkg/logql/labelfilter/filter_test.go b/pkg/logql/log/labelfilter/filter_test.go similarity index 59% rename from pkg/logql/labelfilter/filter_test.go rename to pkg/logql/log/labelfilter/filter_test.go index 9399fe1e48c2f..33b7179accef1 100644 --- a/pkg/logql/labelfilter/filter_test.go +++ b/pkg/logql/log/labelfilter/filter_test.go @@ -6,63 +6,65 @@ import ( "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/require" + + "github.com/grafana/loki/pkg/logql/log" ) func TestBinary_Filter(t *testing.T) { tests := []struct { f *Binary - lbs labels.Labels + lbs log.Labels want bool - wantErr bool + wantLbs log.Labels }{ { NewAnd(NewNumeric(FilterEqual, "foo", 5), NewDuration(FilterEqual, "bar", 1*time.Second)), - labels.Labels{labels.Label{Name: "foo", Value: "5"}, labels.Label{Name: "bar", Value: "1s"}}, + log.Labels{"foo": "5", "bar": "1s"}, true, - false, + log.Labels{"foo": "5", "bar": "1s"}, }, { NewAnd(NewNumeric(FilterEqual, "foo", 5), NewBytes(FilterEqual, "bar", 42)), - labels.Labels{labels.Label{Name: "foo", Value: "5"}, labels.Label{Name: "bar", Value: "42B"}}, + log.Labels{"foo": "5", "bar": "42B"}, true, - false, + log.Labels{"foo": "5", "bar": "42B"}, }, { NewAnd( NewNumeric(FilterEqual, "foo", 5), NewDuration(FilterEqual, "bar", 1*time.Second), ), - labels.Labels{labels.Label{Name: "foo", Value: "6"}, labels.Label{Name: "bar", Value: "1s"}}, - false, + log.Labels{"foo": "6", "bar": "1s"}, false, + log.Labels{"foo": "6", "bar": "1s"}, }, { NewAnd( NewNumeric(FilterEqual, "foo", 5), NewDuration(FilterEqual, "bar", 1*time.Second), ), - labels.Labels{labels.Label{Name: "foo", Value: "5"}, labels.Label{Name: "bar", Value: "2s"}}, - false, + log.Labels{"foo": "5", "bar": "2s"}, false, + log.Labels{"foo": "5", "bar": "2s"}, }, { NewAnd( NewString(labels.MustNewMatcher(labels.MatchEqual, "foo", "5")), NewDuration(FilterEqual, "bar", 1*time.Second), ), - labels.Labels{labels.Label{Name: "foo", Value: "5"}, labels.Label{Name: "bar", Value: "1s"}}, + log.Labels{"foo": "5", "bar": "1s"}, true, - false, + log.Labels{"foo": "5", "bar": "1s"}, }, { NewAnd( NewString(labels.MustNewMatcher(labels.MatchEqual, "foo", "5")), NewDuration(FilterEqual, "bar", 1*time.Second), ), - labels.Labels{labels.Label{Name: "foo", Value: "6"}, labels.Label{Name: "bar", Value: "1s"}}, - false, + log.Labels{"foo": "6", "bar": "1s"}, false, + log.Labels{"foo": "6", "bar": "1s"}, }, { NewAnd( @@ -72,13 +74,17 @@ func TestBinary_Filter(t *testing.T) { ), NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), ), - labels.Labels{ - {Name: "duration", Value: "2s"}, - {Name: "status", Value: "200"}, - {Name: "method", Value: "GET"}, + log.Labels{ + "duration": "2s", + "status": "200", + "method": "GET", }, true, - false, + log.Labels{ + "duration": "2s", + "status": "200", + "method": "GET", + }, }, { NewAnd( @@ -88,13 +94,17 @@ func TestBinary_Filter(t *testing.T) { ), NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), ), - labels.Labels{ - {Name: "duration", Value: "2s"}, - {Name: "status", Value: "200"}, - {Name: "method", Value: "POST"}, + log.Labels{ + "duration": "2s", + "status": "200", + "method": "POST", }, false, - false, + log.Labels{ + "duration": "2s", + "status": "200", + "method": "POST", + }, }, { NewAnd( @@ -104,13 +114,17 @@ func TestBinary_Filter(t *testing.T) { ), NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), ), - labels.Labels{ - {Name: "duration", Value: "2s"}, - {Name: "status", Value: "500"}, - {Name: "method", Value: "POST"}, + log.Labels{ + "duration": "2s", + "status": "500", + "method": "POST", }, false, - false, + log.Labels{ + "duration": "2s", + "status": "500", + "method": "POST", + }, }, { NewAnd( @@ -120,23 +134,24 @@ func TestBinary_Filter(t *testing.T) { ), NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), ), - labels.Labels{ - {Name: "duration", Value: "2s"}, - {Name: "status", Value: "200"}, - {Name: "method", Value: "POST"}, + log.Labels{ + "duration": "2s", + "status": "200", + "method": "POST", }, false, - false, + log.Labels{ + "duration": "2s", + "status": "200", + "method": "POST", + }, }, } for _, tt := range tests { t.Run(tt.f.String(), func(t *testing.T) { - got, err := tt.f.Filter(tt.lbs) - if (err != nil) != tt.wantErr { - t.Errorf("Binary.Filter() error = %v, wantErr %v", err, tt.wantErr) - return - } - require.Equal(t, got, tt.want, tt.lbs) + _, got := tt.f.Process(nil, tt.lbs) + require.Equal(t, tt.want, got) + require.Equal(t, tt.wantLbs, tt.lbs) }) } } diff --git a/pkg/logql/log/labelfilter/number.go b/pkg/logql/log/labelfilter/number.go new file mode 100644 index 0000000000000..6a61d3ebcea89 --- /dev/null +++ b/pkg/logql/log/labelfilter/number.go @@ -0,0 +1,60 @@ +package labelfilter + +import ( + "fmt" + "strconv" + + "github.com/grafana/loki/pkg/logql/log" +) + +type Numeric struct { + Name string + Value float64 + Type FilterType +} + +func NewNumeric(t FilterType, name string, v float64) *Numeric { + return &Numeric{ + Name: name, + Type: t, + Value: v, + } +} + +func (n *Numeric) Process(line []byte, lbs log.Labels) ([]byte, bool) { + if lbs.HasError() { + // if there's an error only the string matchers can filter out. + return line, true + } + for k, v := range lbs { + if k == n.Name { + value, err := strconv.ParseFloat(v, 64) + if err != nil { + lbs.SetError("LabelFilterError") + return line, true + } + switch n.Type { + case FilterEqual: + return line, value == n.Value + case FilterNotEqual: + return line, value != n.Value + case FilterGreaterThan: + return line, value > n.Value + case FilterGreaterThanOrEqual: + return line, value >= n.Value + case FilterLesserThan: + return line, value < n.Value + case FilterLesserThanOrEqual: + return line, value <= n.Value + default: + lbs.SetError("LabelFilterError") + return line, true + } + } + } + return line, false +} + +func (n *Numeric) String() string { + return fmt.Sprintf("%s%s%s", n.Name, n.Type, strconv.FormatFloat(n.Value, 'f', -1, 64)) +} diff --git a/pkg/logql/labelfilter/string.go b/pkg/logql/log/labelfilter/string.go similarity index 50% rename from pkg/logql/labelfilter/string.go rename to pkg/logql/log/labelfilter/string.go index bcd9ac94dd226..1a168edfbc0a1 100644 --- a/pkg/logql/labelfilter/string.go +++ b/pkg/logql/log/labelfilter/string.go @@ -1,6 +1,8 @@ package labelfilter import ( + "github.com/grafana/loki/pkg/logql/log" + "github.com/prometheus/prometheus/pkg/labels" ) @@ -14,11 +16,11 @@ func NewString(m *labels.Matcher) *String { } } -func (s *String) Filter(lbs labels.Labels) (bool, error) { - for _, l := range lbs { - if l.Name == s.Name { - return s.Matches(l.Value), nil +func (s *String) Process(line []byte, lbs log.Labels) ([]byte, bool) { + for k, v := range lbs { + if k == s.Name { + return line, s.Matches(v) } } - return false, nil + return line, false } diff --git a/pkg/logql/log/labels.go b/pkg/logql/log/labels.go index 04652c7f4e647..b415bf6860dfc 100644 --- a/pkg/logql/log/labels.go +++ b/pkg/logql/log/labels.go @@ -1,3 +1,17 @@ package log type Labels map[string]string + +func (l Labels) Has(key string) bool { + _, ok := l[key] + return ok +} + +func (l Labels) SetError(err string) { + l[errorLabel] = err +} + +func (l Labels) HasError() bool { + _, ok := l[errorLabel] + return ok +} diff --git a/pkg/logql/log/parser.go b/pkg/logql/log/parser.go index 8896d38d90819..22b2bf69710de 100644 --- a/pkg/logql/log/parser.go +++ b/pkg/logql/log/parser.go @@ -11,7 +11,6 @@ import ( jsoniter "github.com/json-iterator/go" "github.com/prometheus/common/model" - "github.com/prometheus/prometheus/pkg/labels" ) const ( @@ -20,54 +19,48 @@ const ( ) var ( + _ Stage = &jsonParser{} + _ Stage = ®expParser{} + _ Stage = &logfmtParser{} + errMissingCapture = errors.New("at least one named capture must be supplied") - NoopLabelParser = noopParser{} underscore = "_" point = "." dash = "-" ) -type Parser interface { - Parse(line []byte, lbs Labels) -} - -type noopParser struct{} - -func (noopParser) Parse(_ []byte, lbs labels.Labels) labels.Labels { - return lbs +func addLabel(lbs Labels) func(key, value string) { + unique := map[string]struct{}{} + return func(key, value string) { + _, ok := unique[key] + if ok { + return + } + unique[key] = struct{}{} + key = strings.ReplaceAll(strings.ReplaceAll(key, point, underscore), dash, underscore) + if lbs.Has(key) { + key = fmt.Sprintf("%s%s", key, duplicateSuffix) + } + lbs[key] = value + } } -type jsonParser struct { - builder *labels.Builder -} +type jsonParser struct{} func NewJSONParser() *jsonParser { - return &jsonParser{ - builder: labels.NewBuilder(nil), - } + return &jsonParser{} } -func (j *jsonParser) Parse(line []byte, lbs labels.Labels) labels.Labels { +func (j *jsonParser) Process(line []byte, lbs Labels) ([]byte, bool) { data := map[string]interface{}{} - j.builder.Reset(lbs) err := jsoniter.ConfigFastest.Unmarshal(line, &data) if err != nil { - j.builder.Set(errorLabel, errJSON) - return j.builder.Labels() - } - parseMap("", data, addLabel(j.builder, lbs)) - return j.builder.Labels() -} - -func addLabel(builder *labels.Builder, lbs labels.Labels) func(key, value string) { - return func(key, value string) { - key = strings.ReplaceAll(strings.ReplaceAll(key, point, underscore), dash, underscore) - if lbs.Has(key) { - key = fmt.Sprintf("%s%s", key, duplicateSuffix) - } - builder.Set(key, value) + lbs.SetError(errJSON) + return line, true } + parseMap("", data, addLabel(lbs)) + return line, true } func parseMap(prefix string, data map[string]interface{}, add func(key, value string)) { @@ -93,7 +86,6 @@ func jsonKey(prefix, key string) string { type regexpParser struct { regex *regexp.Regexp - builder *labels.Builder nameIndex map[int]string } @@ -124,7 +116,6 @@ func NewRegexpParser(re string) (*regexpParser, error) { } return ®expParser{ regex: regex, - builder: labels.NewBuilder(nil), nameIndex: nameIndex, }, nil } @@ -137,37 +128,35 @@ func mustNewRegexParser(re string) *regexpParser { return r } -func (r *regexpParser) Parse(line []byte, lbs labels.Labels) labels.Labels { - r.builder.Reset(lbs) +func (r *regexpParser) Process(line []byte, lbs Labels) ([]byte, bool) { + add := addLabel(lbs) for i, value := range r.regex.FindSubmatch(line) { if name, ok := r.nameIndex[i]; ok { - addLabel(r.builder, lbs)(name, string(value)) + add(name, string(value)) } } - return r.builder.Labels() + return line, true } type logfmtParser struct { - builder *labels.Builder - dec *logfmt.Decoder + dec *logfmt.Decoder } func NewLogfmtParser() *logfmtParser { return &logfmtParser{ - builder: labels.NewBuilder(nil), - dec: logfmt.NewDecoder(), + dec: logfmt.NewDecoder(), } } -func (l *logfmtParser) Parse(line []byte, lbs labels.Labels) labels.Labels { - l.builder.Reset(lbs) +func (l *logfmtParser) Process(line []byte, lbs Labels) ([]byte, bool) { l.dec.Reset(line) - + add := addLabel(lbs) for l.dec.ScanKeyval() { - addLabel(l.builder, lbs)(string(l.dec.Key()), string(l.dec.Value())) + add(string(l.dec.Key()), string(l.dec.Value())) } if l.dec.Err() != nil { - l.builder.Set(errorLabel, errLogfmt) + lbs.SetError(errLogfmt) + return line, true } - return l.builder.Labels() + return line, true } diff --git a/pkg/logql/log/parser_test.go b/pkg/logql/log/parser_test.go index 9b6eac12bd1a6..ad32b6f2c0bf9 100644 --- a/pkg/logql/log/parser_test.go +++ b/pkg/logql/log/parser_test.go @@ -1,10 +1,8 @@ package log import ( - "sort" "testing" - "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/require" ) @@ -12,80 +10,74 @@ func Test_jsonParser_Parse(t *testing.T) { tests := []struct { name string - j *jsonParser line []byte - lbs labels.Labels - want labels.Labels + lbs Labels + want Labels }{ { "multi depth", - NewJSONParser(), []byte(`{"app":"foo","namespace":"prod","pod":{"uuid":"foo","deployment":{"ref":"foobar"}}}`), - labels.Labels{}, - labels.Labels{ - labels.Label{Name: "app", Value: "foo"}, - labels.Label{Name: "namespace", Value: "prod"}, - labels.Label{Name: "pod_uuid", Value: "foo"}, - labels.Label{Name: "pod_deployment_ref", Value: "foobar"}, + Labels{}, + Labels{ + "app": "foo", + "namespace": "prod", + "pod_uuid": "foo", + "pod_deployment_ref": "foobar", }, }, { "numeric", - NewJSONParser(), []byte(`{"counter":1, "price": {"_net_":5.56909}}`), - labels.Labels{}, - labels.Labels{ - labels.Label{Name: "counter", Value: "1"}, - labels.Label{Name: "price__net_", Value: "5.56909"}, + Labels{}, + Labels{ + "counter": "1", + "price__net_": "5.56909", }, }, { "skip arrays", - NewJSONParser(), []byte(`{"counter":1, "price": {"net_":["10","20"]}}`), - labels.Labels{}, - labels.Labels{ - labels.Label{Name: "counter", Value: "1"}, + Labels{}, + Labels{ + "counter": "1", }, }, { "bad key replaced", - NewJSONParser(), []byte(`{"cou-nter":1}`), - labels.Labels{}, - labels.Labels{ - labels.Label{Name: "cou_nter", Value: "1"}, + Labels{}, + Labels{ + "cou_nter": "1", }, }, { "errors", - NewJSONParser(), []byte(`{n}`), - labels.Labels{}, - labels.Labels{ - labels.Label{Name: errorLabel, Value: errJSON}, + Labels{}, + Labels{ + errorLabel: errJSON, }, }, { "duplicate extraction", - NewJSONParser(), []byte(`{"app":"foo","namespace":"prod","pod":{"uuid":"foo","deployment":{"ref":"foobar"}}}`), - labels.Labels{ - labels.Label{Name: "app", Value: "bar"}, + Labels{ + "app": "bar", + }, + Labels{ + "app": "bar", + "app_extracted": "foo", + "namespace": "prod", + "pod_uuid": "foo", + "pod_deployment_ref": "foobar", }, - labels.Labels{ - labels.Label{Name: "app", Value: "bar"}, - labels.Label{Name: "app_extracted", Value: "foo"}, - labels.Label{Name: "namespace", Value: "prod"}, - labels.Label{Name: "pod_uuid", Value: "foo"}, - labels.Label{Name: "pod_deployment_ref", Value: "foobar"}}, }, } for _, tt := range tests { + j := NewJSONParser() t.Run(tt.name, func(t *testing.T) { - sort.Sort(tt.want) - got := tt.j.Parse(tt.line, tt.lbs) - require.Equal(t, tt.want, got) + _, _ = j.Process(tt.line, tt.lbs) + require.Equal(t, tt.want, tt.lbs) }) } } @@ -119,63 +111,62 @@ func Test_regexpParser_Parse(t *testing.T) { name string parser *regexpParser line []byte - lbs labels.Labels - want labels.Labels + lbs Labels + want Labels }{ { "no matches", mustNewRegexParser("(?Pfoo|bar)buzz"), []byte("blah"), - labels.Labels{ - labels.Label{Name: "app", Value: "foo"}, + Labels{ + "app": "foo", }, - labels.Labels{ - labels.Label{Name: "app", Value: "foo"}, + Labels{ + "app": "foo", }, }, { "double matches", mustNewRegexParser("(?P.*)buzz"), []byte("matchebuzz barbuzz"), - labels.Labels{ - labels.Label{Name: "app", Value: "bar"}, + Labels{ + "app": "bar", }, - labels.Labels{ - labels.Label{Name: "app", Value: "bar"}, - labels.Label{Name: "foo", Value: "matchebuzz bar"}, + Labels{ + "app": "bar", + "foo": "matchebuzz bar", }, }, { "duplicate labels", mustNewRegexParser("(?Pbar)buzz"), []byte("barbuzz"), - labels.Labels{ - labels.Label{Name: "bar", Value: "foo"}, + Labels{ + "bar": "foo", }, - labels.Labels{ - labels.Label{Name: "bar", Value: "foo"}, - labels.Label{Name: "bar_extracted", Value: "bar"}, + Labels{ + "bar": "foo", + "bar_extracted": "bar", }, }, { "multiple labels extracted", mustNewRegexParser("status=(?P\\w+),latency=(?P\\w+)(ms|ns)"), []byte("status=200,latency=500ms"), - labels.Labels{ - labels.Label{Name: "app", Value: "foo"}, + Labels{ + "app": "foo", }, - labels.Labels{ - labels.Label{Name: "app", Value: "foo"}, - labels.Label{Name: "status", Value: "200"}, - labels.Label{Name: "latency", Value: "500"}, + Labels{ + "app": "foo", + "status": "200", + "latency": "500", }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - sort.Sort(tt.want) - got := tt.parser.Parse(tt.line, tt.lbs) - require.Equal(t, tt.want, got) + _, _ = tt.parser.Process(tt.line, tt.lbs) + require.Equal(t, tt.want, tt.lbs) }) } } @@ -184,97 +175,97 @@ func Test_logfmtParser_Parse(t *testing.T) { tests := []struct { name string line []byte - lbs labels.Labels - want labels.Labels + lbs Labels + want Labels }{ { "not logfmt", []byte("foobar====wqe=sdad1r"), - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, + Labels{ + "foo": "bar", }, - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, - labels.Label{Name: errorLabel, Value: errLogfmt}, + Labels{ + "foo": "bar", + errorLabel: errLogfmt, }, }, { "key alone logfmt", []byte("buzz bar=foo"), - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, + Labels{ + "foo": "bar", }, - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, - labels.Label{Name: "bar", Value: "foo"}, + Labels{ + "foo": "bar", + "bar": "foo", + "buzz": "", }, }, { "quoted logfmt", []byte(`foobar="foo bar"`), - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, + Labels{ + "foo": "bar", }, - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, - labels.Label{Name: "foobar", Value: "foo bar"}, + Labels{ + "foo": "bar", + "foobar": "foo bar", }, }, { "double property logfmt", []byte(`foobar="foo bar" latency=10ms`), - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, + Labels{ + "foo": "bar", }, - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, - labels.Label{Name: "foobar", Value: "foo bar"}, - labels.Label{Name: "latency", Value: "10ms"}, + Labels{ + "foo": "bar", + "foobar": "foo bar", + "latency": "10ms", }, }, { "duplicate from line property", []byte(`foobar="foo bar" foobar=10ms`), - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, + Labels{ + "foo": "bar", }, - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, - labels.Label{Name: "foobar", Value: "10ms"}, + Labels{ + "foo": "bar", + "foobar": "foo bar", }, }, { "duplicate property", []byte(`foo="foo bar" foobar=10ms`), - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, + Labels{ + "foo": "bar", }, - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, - labels.Label{Name: "foo_extracted", Value: "foo bar"}, - labels.Label{Name: "foobar", Value: "10ms"}, + Labels{ + "foo": "bar", + "foo_extracted": "foo bar", + "foobar": "10ms", }, }, { "invalid key names", []byte(`foo="foo bar" foo.bar=10ms test-dash=foo`), - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, + Labels{ + "foo": "bar", }, - labels.Labels{ - labels.Label{Name: "foo", Value: "bar"}, - labels.Label{Name: "foo_extracted", Value: "foo bar"}, - labels.Label{Name: "foo_bar", Value: "10ms"}, - labels.Label{Name: "test_dash", Value: "foo"}, + Labels{ + "foo": "bar", + "foo_extracted": "foo bar", + "foo_bar": "10ms", + "test_dash": "foo", }, }, } p := NewLogfmtParser() for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - sort.Sort(tt.want) - got := p.Parse(tt.line, tt.lbs) - require.Equal(t, tt.want, got) + _, _ = p.Process(tt.line, tt.lbs) + require.Equal(t, tt.want, tt.lbs) }) } } diff --git a/pkg/logql/log/pipeline.go b/pkg/logql/log/pipeline.go index cbfa734ee26ac..c5d1bbc3cb898 100644 --- a/pkg/logql/log/pipeline.go +++ b/pkg/logql/log/pipeline.go @@ -29,9 +29,9 @@ func (noopStage) Process(line []byte, lbs Labels) ([]byte, bool) { return line, true } -type PipelineFunc func(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) +type StageFunc func(line []byte, lbs Labels) ([]byte, bool) -func (fn PipelineFunc) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { +func (fn StageFunc) Process(line []byte, lbs Labels) ([]byte, bool) { return fn(line, lbs) } diff --git a/pkg/logql/series_extractor.go b/pkg/logql/log/series_extractor.go similarity index 97% rename from pkg/logql/series_extractor.go rename to pkg/logql/log/series_extractor.go index e7cdec934ff1b..96ebdf0212e6e 100644 --- a/pkg/logql/series_extractor.go +++ b/pkg/logql/log/series_extractor.go @@ -4,7 +4,7 @@ import ( "strconv" "time" - "github.com/grafana/loki/pkg/logql/labelfilter" + "github.com/grafana/loki/pkg/logql/log/labelfilter" "github.com/prometheus/prometheus/pkg/labels" ) diff --git a/pkg/logql/series_extractor_test.go b/pkg/logql/log/series_extractor_test.go similarity index 100% rename from pkg/logql/series_extractor_test.go rename to pkg/logql/log/series_extractor_test.go diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 54ea789699af2..9826e1b34088b 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -9,7 +9,7 @@ import ( "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/labelfilter" + "github.com/grafana/loki/pkg/logql/log/labelfilter" ) func newString(s string) *string { From 1aa1609c0049d9d73a49ac3c964fbcb111d62d0c Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 14 Oct 2020 18:29:28 +0200 Subject: [PATCH 34/45] Got something that builds and throw __error__ labels properly now. Signed-off-by: Cyril Tovena --- pkg/chunkenc/dumb_chunk.go | 2 +- pkg/chunkenc/interface.go | 4 +- pkg/chunkenc/memchunk.go | 94 +- pkg/chunkenc/memchunk_test.go | 4 +- pkg/ingester/instance.go | 6 +- pkg/ingester/stream.go | 4 +- pkg/logql/ast.go | 50 +- pkg/logql/ast_test.go | 22 +- pkg/logql/expr.y | 75 +- pkg/logql/expr.y.go | 362 ++- pkg/logql/functions.go | 29 +- pkg/logql/log/error.go | 2 +- pkg/logql/log/filter.go | 6 + pkg/logql/log/label_filter.go | 301 +++ pkg/logql/log/label_filter_test.go | 155 ++ pkg/logql/log/labelfilter/bytes.go | 63 - pkg/logql/log/labelfilter/duration.go | 61 - pkg/logql/log/labelfilter/filter.go | 118 - pkg/logql/log/labelfilter/filter_test.go | 157 -- pkg/logql/log/labelfilter/number.go | 60 - pkg/logql/log/labelfilter/string.go | 26 - pkg/logql/log/metrics_extraction.go | 148 ++ ...tor_test.go => metrics_extraction_test.go} | 61 +- pkg/logql/log/pipeline.go | 22 +- pkg/logql/log/series_extractor.go | 96 - pkg/logql/parser_test.go | 2210 ++++++++--------- pkg/logql/shardmapper_test.go | 4 +- pkg/logql/test_utils.go | 47 +- pkg/storage/batch.go | 5 +- pkg/storage/batch_test.go | 2 +- pkg/storage/lazy_chunk.go | 5 +- pkg/storage/lazy_chunk_test.go | 2 +- pkg/storage/store.go | 7 +- 33 files changed, 2115 insertions(+), 2095 deletions(-) create mode 100644 pkg/logql/log/label_filter.go create mode 100644 pkg/logql/log/label_filter_test.go delete mode 100644 pkg/logql/log/labelfilter/bytes.go delete mode 100644 pkg/logql/log/labelfilter/duration.go delete mode 100644 pkg/logql/log/labelfilter/filter.go delete mode 100644 pkg/logql/log/labelfilter/filter_test.go delete mode 100644 pkg/logql/log/labelfilter/number.go delete mode 100644 pkg/logql/log/labelfilter/string.go create mode 100644 pkg/logql/log/metrics_extraction.go rename pkg/logql/log/{series_extractor_test.go => metrics_extraction_test.go} (60%) delete mode 100644 pkg/logql/log/series_extractor.go diff --git a/pkg/chunkenc/dumb_chunk.go b/pkg/chunkenc/dumb_chunk.go index ad4173e54d137..263d2c60ed8d2 100644 --- a/pkg/chunkenc/dumb_chunk.go +++ b/pkg/chunkenc/dumb_chunk.go @@ -94,7 +94,7 @@ func (c *dumbChunk) Iterator(_ context.Context, from, through time.Time, directi }, nil } -func (c *dumbChunk) SampleIterator(_ context.Context, from, through time.Time, _ labels.Labels, _ logql.Pipeline, _ logql.SampleExtractor) iter.SampleIterator { +func (c *dumbChunk) SampleIterator(_ context.Context, from, through time.Time, _ labels.Labels, _ logql.SampleExtractor) iter.SampleIterator { return nil } diff --git a/pkg/chunkenc/interface.go b/pkg/chunkenc/interface.go index 676420f11c362..fb0f46d5fb8fe 100644 --- a/pkg/chunkenc/interface.go +++ b/pkg/chunkenc/interface.go @@ -100,7 +100,7 @@ type Chunk interface { SpaceFor(*logproto.Entry) bool Append(*logproto.Entry) error Iterator(ctx context.Context, mintT, maxtT time.Time, direction logproto.Direction, lbs labels.Labels, pipeline logql.Pipeline) (iter.EntryIterator, error) - SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator + SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, extractor logql.SampleExtractor) iter.SampleIterator // Returns the list of blocks in the chunks. Blocks(mintT, maxtT time.Time) []Block Size() int @@ -125,5 +125,5 @@ type Block interface { // Iterator returns an entry iterator for the block. Iterator(ctx context.Context, lbs labels.Labels, pipeline logql.Pipeline) iter.EntryIterator // SampleIterator returns a sample iterator for the block. - SampleIterator(ctx context.Context, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator + SampleIterator(ctx context.Context, lbs labels.Labels, extractor logql.SampleExtractor) iter.SampleIterator } diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index 6250e11d0efb9..617561ce74ef9 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -505,7 +505,7 @@ func (c *MemChunk) Iterator(ctx context.Context, mintT, maxtT time.Time, directi } // Iterator implements Chunk. -func (c *MemChunk) SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator { +func (c *MemChunk) SampleIterator(ctx context.Context, from, through time.Time, lbs labels.Labels, extractor logql.SampleExtractor) iter.SampleIterator { mint, maxt := from.UnixNano(), through.UnixNano() its := make([]iter.SampleIterator, 0, len(c.blocks)+1) @@ -513,11 +513,11 @@ func (c *MemChunk) SampleIterator(ctx context.Context, from, through time.Time, if maxt < b.mint || b.maxt < mint { continue } - its = append(its, b.SampleIterator(ctx, lbs, pipeline, extractor)) + its = append(its, b.SampleIterator(ctx, lbs, extractor)) } if !c.head.isEmpty() { - its = append(its, c.head.sampleIterator(ctx, mint, maxt, lbs, pipeline, extractor)) + its = append(its, c.head.sampleIterator(ctx, mint, maxt, lbs, extractor)) } return iter.NewTimeRangedSampleIterator( @@ -547,11 +547,11 @@ func (b block) Iterator(ctx context.Context, lbs labels.Labels, pipeline logql.P return newEntryIterator(ctx, b.readers, b.b, lbs, pipeline) } -func (b block) SampleIterator(ctx context.Context, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator { +func (b block) SampleIterator(ctx context.Context, lbs labels.Labels, extractor logql.SampleExtractor) iter.SampleIterator { if len(b.b) == 0 { return iter.NoopIterator } - return newSampleIterator(ctx, b.readers, b.b, lbs, pipeline, extractor) + return newSampleIterator(ctx, b.readers, b.b, lbs, extractor) } func (b block) Offset() int { @@ -613,7 +613,7 @@ func (hb *headBlock) iterator(ctx context.Context, direction logproto.Direction, return iter.NewStreamsIterator(ctx, streamsResult, direction) } -func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator { +func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, lbs labels.Labels, extractor logql.SampleExtractor) iter.SampleIterator { if hb.isEmpty() || (maxt < hb.mint || hb.maxt < mint) { return iter.NoopIterator } @@ -623,16 +623,11 @@ func (hb *headBlock) sampleIterator(ctx context.Context, mint, maxt int64, lbs l for _, e := range hb.entries { chunkStats.HeadChunkBytes += int64(len(e.s)) line := []byte(e.s) - newLine, parsedLabels, ok := pipeline.Process(line, lbs) + value, parsedLabels, ok := extractor.Process(line, lbs) if !ok { continue } - var value float64 var found bool - ok, value, parsedLabels = extractor.Extract(newLine, parsedLabels) - if !ok { - continue - } var s *logproto.Series lhash := parsedLabels.Hash() if s, found = series[lhash]; !found { @@ -668,12 +663,10 @@ type bufferedIterator struct { err error - decBuf []byte // The buffer for decoding the lengths. - buf []byte // The buffer for a single entry. - currLine []byte // the current line, this is the same as the buffer but sliced the the line size. - currTs int64 - currLabels labels.Labels - consumed bool + decBuf []byte // The buffer for decoding the lengths. + buf []byte // The buffer for a single entry. + currLine []byte // the current line, this is the same as the buffer but sliced the the line size. + currTs int64 closed bool @@ -681,7 +674,7 @@ type bufferedIterator struct { pipeline logql.Pipeline } -func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, pipeline logql.Pipeline) *bufferedIterator { +func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels) *bufferedIterator { chunkStats := stats.GetChunkData(ctx) chunkStats.CompressedBytes += int64(len(b)) return &bufferedIterator{ @@ -690,9 +683,7 @@ func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, lbs lab reader: nil, // will be initialized later bufReader: nil, // will be initialized later pool: pool, - pipeline: pipeline, decBuf: make([]byte, binary.MaxVarintLen64), - consumed: true, baseLbs: lbs, } } @@ -714,15 +705,8 @@ func (si *bufferedIterator) Next() bool { si.stats.DecompressedBytes += int64(len(line)) + 2*binary.MaxVarintLen64 si.stats.DecompressedLines++ - newLine, lbs, ok := si.pipeline.Process(line, si.baseLbs) - if !ok { - continue - } si.currTs = ts - si.currLine = newLine - si.consumed = false - // todo(cyriltovena) add cache for building the string of labels via some sort of decode context. - si.currLabels = lbs + si.currLine = line return true } } @@ -807,31 +791,44 @@ func (si *bufferedIterator) close() { si.decBuf = nil } -func (si *bufferedIterator) Labels() string { return si.currLabels.String() } - func newEntryIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, pipeline logql.Pipeline) iter.EntryIterator { return &entryBufferedIterator{ - bufferedIterator: newBufferedIterator(ctx, pool, b, lbs, pipeline), + bufferedIterator: newBufferedIterator(ctx, pool, b, lbs), + pipeline: pipeline, } } type entryBufferedIterator struct { *bufferedIterator - cur logproto.Entry + pipeline logql.Pipeline + + cur logproto.Entry + currLabels labels.Labels } func (e *entryBufferedIterator) Entry() logproto.Entry { - if !e.consumed { + return e.cur +} + +func (e *entryBufferedIterator) Labels() string { return e.currLabels.String() } + +func (e *entryBufferedIterator) Next() bool { + for e.bufferedIterator.Next() { + newLine, lbs, ok := e.pipeline.Process(e.currLine, e.baseLbs) + if !ok { + continue + } e.cur.Timestamp = time.Unix(0, e.currTs) - e.cur.Line = string(e.currLine) - e.consumed = true + e.cur.Line = string(newLine) + e.currLabels = lbs + return true } - return e.cur + return false } -func newSampleIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, pipeline logql.Pipeline, extractor logql.SampleExtractor) iter.SampleIterator { +func newSampleIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels, extractor logql.SampleExtractor) iter.SampleIterator { it := &sampleBufferedIterator{ - bufferedIterator: newBufferedIterator(ctx, pool, b, lbs, pipeline), + bufferedIterator: newBufferedIterator(ctx, pool, b, lbs), extractor: extractor, } return it @@ -843,30 +840,25 @@ type sampleBufferedIterator struct { extractor logql.SampleExtractor cur logproto.Sample - currLabels string - currValue float64 + currLabels labels.Labels } func (e *sampleBufferedIterator) Next() bool { for e.bufferedIterator.Next() { - ok, val, labels := e.extractor.Extract(e.currLine, e.bufferedIterator.currLabels) + val, labels, ok := e.extractor.Process(e.currLine, e.baseLbs) if !ok { continue } - e.currValue = val - e.currLabels = labels.String() + e.currLabels = labels + e.cur.Value = val + e.cur.Hash = xxhash.Sum64(e.currLine) + e.cur.Timestamp = e.currTs return true } return false } -func (e *sampleBufferedIterator) Labels() string { return e.currLabels } +func (e *sampleBufferedIterator) Labels() string { return e.currLabels.String() } func (e *sampleBufferedIterator) Sample() logproto.Sample { - if !e.consumed { - e.cur.Timestamp = e.currTs - e.cur.Hash = xxhash.Sum64(e.currLine) - e.cur.Value = e.currValue - e.consumed = true - } return e.cur } diff --git a/pkg/chunkenc/memchunk_test.go b/pkg/chunkenc/memchunk_test.go index 1637c1931eab8..f770a39789cb5 100644 --- a/pkg/chunkenc/memchunk_test.go +++ b/pkg/chunkenc/memchunk_test.go @@ -128,7 +128,7 @@ func TestBlock(t *testing.T) { require.NoError(t, it.Close()) require.Equal(t, len(cases), idx) - sampleIt := chk.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, logql.NoopPipeline, logql.ExtractCount) + sampleIt := chk.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, logql.ExtractCount) idx = 0 for sampleIt.Next() { s := sampleIt.Sample() @@ -276,7 +276,7 @@ func TestSerialization(t *testing.T) { } require.NoError(t, it.Error()) - sampleIt := bc.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, logql.NoopPipeline, logql.ExtractCount) + sampleIt := bc.SampleIterator(context.Background(), time.Unix(0, 0), time.Unix(0, math.MaxInt64), nil, logql.ExtractCount) for i := 0; i < numSamples; i++ { require.True(t, sampleIt.Next(), i) diff --git a/pkg/ingester/instance.go b/pkg/ingester/instance.go index a873e8dc79ca5..0654af3c8aff6 100644 --- a/pkg/ingester/instance.go +++ b/pkg/ingester/instance.go @@ -231,10 +231,6 @@ func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams if err != nil { return nil, err } - pipeline, err := expr.Selector().Pipeline() - if err != nil { - return nil, err - } extractor, err := expr.Extractor() if err != nil { return nil, err @@ -246,7 +242,7 @@ func (i *instance) QuerySample(ctx context.Context, req logql.SelectSampleParams expr.Selector().Matchers(), func(stream *stream) error { ingStats.TotalChunksMatched += int64(len(stream.chunks)) - iter, err := stream.SampleIterator(ctx, req.Start, req.End, pipeline, extractor) + iter, err := stream.SampleIterator(ctx, req.Start, req.End, extractor) if err != nil { return err } diff --git a/pkg/ingester/stream.go b/pkg/ingester/stream.go index a1b1cd079d90b..08c374dd6f0af 100644 --- a/pkg/ingester/stream.go +++ b/pkg/ingester/stream.go @@ -280,10 +280,10 @@ func (s *stream) Iterator(ctx context.Context, from, through time.Time, directio } // Returns an SampleIterator. -func (s *stream) SampleIterator(ctx context.Context, from, through time.Time, pipeline logql.Pipeline, extractor logql.SampleExtractor) (iter.SampleIterator, error) { +func (s *stream) SampleIterator(ctx context.Context, from, through time.Time, extractor logql.SampleExtractor) (iter.SampleIterator, error) { iterators := make([]iter.SampleIterator, 0, len(s.chunks)) for _, c := range s.chunks { - if itr := c.chunk.SampleIterator(ctx, from, through, s.labels, pipeline, extractor); itr != nil { + if itr := c.chunk.SampleIterator(ctx, from, through, s.labels, extractor); itr != nil { iterators = append(iterators, itr) } } diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 12e595ed77750..868f6794872b4 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -14,7 +14,6 @@ import ( "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/log/labelfilter" ) // Expr is the root expression which can be a SampleExpr or LogSelectorExpr @@ -81,8 +80,17 @@ type LogSelectorExpr interface { Expr } +// type alias for backward compatibility +type Pipeline = log.Pipeline +type SampleExtractor = log.SampleExtractor + +var ( + NoopPipeline = log.NoopPipeline + ExtractCount = log.CountExtractor.ToSampleExtractor() +) + type PipelineExpr interface { - Pipeline() (log.Pipeline, error) + Pipeline() (Pipeline, error) Expr } @@ -94,6 +102,17 @@ type StageExpr interface { type MultiStageExpr []StageExpr func (m MultiStageExpr) Pipeline() (log.Pipeline, error) { + stages, err := m.stages() + if err != nil { + return nil, err + } + if len(stages) == 0 { + return log.NoopPipeline, nil + } + return stages, nil +} + +func (m MultiStageExpr) stages() (log.MultiStage, error) { c := make(log.MultiStage, 0, len(m)) for _, e := range m { p, err := e.Stage() @@ -105,9 +124,6 @@ func (m MultiStageExpr) Pipeline() (log.Pipeline, error) { } c = append(c, p) } - if len(c) == 0 { - return log.NoopPipeline, nil - } return c, nil } @@ -316,16 +332,16 @@ func (e *labelParserExpr) String() string { } type labelFilterExpr struct { - labelfilter.Filterer + log.LabelFilterer implicit } func (e *labelFilterExpr) Stage() (log.Stage, error) { - return e.Filterer, nil + return e.LabelFilterer, nil } func (e *labelFilterExpr) String() string { - return fmt.Sprintf("%s %s", OpPipe, e.Filterer.String()) + return fmt.Sprintf("%s %s", OpPipe, e.LabelFilterer.String()) } type lineFmtExpr struct { @@ -401,7 +417,7 @@ type unwrapExpr struct { identifier string operation string - postFilters []labelfilter.Filterer + postFilters []log.LabelFilterer } func (u unwrapExpr) String() string { @@ -417,7 +433,7 @@ func (u unwrapExpr) String() string { return sb.String() } -func (u *unwrapExpr) addPostFilter(f labelfilter.Filterer) *unwrapExpr { +func (u *unwrapExpr) addPostFilter(f log.LabelFilterer) *unwrapExpr { u.postFilters = append(u.postFilters, f) return u } @@ -696,7 +712,7 @@ func (e *vectorAggregationExpr) Selector() LogSelectorExpr { return e.left.Selector() } -func (e *vectorAggregationExpr) Extractor() (SampleExtractor, error) { +func (e *vectorAggregationExpr) Extractor() (log.SampleExtractor, error) { return e.left.Extractor() } @@ -833,12 +849,12 @@ func (e *literalExpr) String() string { // literlExpr impls SampleExpr & LogSelectorExpr mainly to reduce the need for more complicated typings // to facilitate sum types. We'll be type switching when evaluating them anyways // and they will only be present in binary operation legs. -func (e *literalExpr) Selector() LogSelectorExpr { return e } -func (e *literalExpr) HasFilter() bool { return false } -func (e *literalExpr) Operations() []string { return nil } -func (e *literalExpr) Pipeline() (log.Pipeline, error) { return log.NoopPipeline, nil } -func (e *literalExpr) Matchers() []*labels.Matcher { return nil } -func (e *literalExpr) Extractor() (SampleExtractor, error) { return nil, nil } +func (e *literalExpr) Selector() LogSelectorExpr { return e } +func (e *literalExpr) HasFilter() bool { return false } +func (e *literalExpr) Operations() []string { return nil } +func (e *literalExpr) Pipeline() (log.Pipeline, error) { return log.NoopPipeline, nil } +func (e *literalExpr) Matchers() []*labels.Matcher { return nil } +func (e *literalExpr) Extractor() (log.SampleExtractor, error) { return nil, nil } // helper used to impl Stringer for vector and range aggregations // nolint:interfacer diff --git a/pkg/logql/ast_test.go b/pkg/logql/ast_test.go index 2340ecaab002b..1a6672fa1e792 100644 --- a/pkg/logql/ast_test.go +++ b/pkg/logql/ast_test.go @@ -3,6 +3,8 @@ package logql import ( "testing" + "github.com/grafana/loki/pkg/logql/log" + "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -41,7 +43,7 @@ func Test_logSelectorExpr_String(t *testing.T) { if err != nil { t.Fatalf("failed to get filter: %s", err) } - require.Equal(t, tt.expectFilter, p != NoopPipeline) + require.Equal(t, tt.expectFilter, p != log.NoopPipeline) if expr.String() != tt.selector { t.Fatalf("error expected: %s got: %s", tt.selector, expr.String()) } @@ -209,7 +211,7 @@ func Test_FilterMatcher(t *testing.T) { p, err := expr.Pipeline() assert.Nil(t, err) if tt.lines == nil { - assert.Equal(t, p, NoopPipeline) + assert.Equal(t, p, log.NoopPipeline) } else { for _, lc := range tt.lines { _, _, ok := p.Process([]byte(lc.l), labelBar) @@ -286,11 +288,11 @@ func Test_parserExpr_Parser(t *testing.T) { name string op string param string - want LabelParser + want log.Stage wantErr bool }{ - {"json", OpParserTypeJSON, "", NewJSONParser(), false}, - {"logfmt", OpParserTypeLogfmt, "", NewLogfmtParser(), false}, + {"json", OpParserTypeJSON, "", log.NewJSONParser(), false}, + {"logfmt", OpParserTypeLogfmt, "", log.NewLogfmtParser(), false}, {"regexp", OpParserTypeRegexp, "(?Pfoo)", mustNewRegexParser("(?Pfoo)"), false}, {"regexp err ", OpParserTypeRegexp, "foo", nil, true}, } @@ -300,7 +302,7 @@ func Test_parserExpr_Parser(t *testing.T) { op: tt.op, param: tt.param, } - got, err := e.parser() + got, err := e.Stage() if (err != nil) != tt.wantErr { t.Errorf("parserExpr.Parser() error = %v, wantErr %v", err, tt.wantErr) return @@ -313,3 +315,11 @@ func Test_parserExpr_Parser(t *testing.T) { }) } } + +func mustNewRegexParser(re string) log.Stage { + r, err := log.NewRegexpParser(re) + if err != nil { + panic(err) + } + return r +} diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y index de2bd80e0248d..a01241059fbdd 100644 --- a/pkg/logql/expr.y +++ b/pkg/logql/expr.y @@ -4,7 +4,6 @@ package logql import ( "time" "github.com/prometheus/prometheus/pkg/labels" - "github.com/grafana/loki/pkg/logql/log/labelfilter" "github.com/grafana/loki/pkg/logql/log" ) @@ -37,11 +36,11 @@ import ( LineFilters *lineFilterExpr PipelineExpr MultiStageExpr PipelineStage StageExpr - BytesFilter labelfilter.Filterer - NumberFilter labelfilter.Filterer - DurationFilter labelfilter.Filterer - LabelFilter labelfilter.Filterer - UnitFilter labelfilter.Filterer + BytesFilter log.LabelFilterer + NumberFilter log.LabelFilterer + DurationFilter log.LabelFilterer + LabelFilter log.LabelFilterer + UnitFilter log.LabelFilterer LineFormatExpr *lineFmtExpr LabelFormatExpr *labelFmtExpr LabelFormat log.LabelFmt @@ -201,7 +200,7 @@ pipelineExpr: pipelineStage: lineFilters { $$ = $1 } | PIPE labelParser { $$ = $2 } - | PIPE labelFilter { $$ = &labelFilterExpr{Filterer: $2 }} + | PIPE labelFilter { $$ = &labelFilterExpr{LabelFilterer: $2 }} | PIPE lineFormatExpr { $$ = $2 } | PIPE labelFormatExpr { $$ = $2 } ; @@ -224,56 +223,56 @@ labelFormat: ; labelsFormat: - labelFormat { $$ = []LabelFmt{ $1 } } + labelFormat { $$ = []log.LabelFmt{ $1 } } | labelsFormat COMMA labelFormat { $$ = append($1, $3) } | labelsFormat COMMA error ; -labelFormatExpr: LABEL_FMT labelsFormat { $$ = log.NewLabelFmtExpr($2) }; +labelFormatExpr: LABEL_FMT labelsFormat { $$ = newLabelFmtExpr($2) }; labelFilter: - matcher { $$ = labelfilter.NewString($1) } - | unitFilter { $$ = $1 } - | numberFilter { $$ = $1 } + matcher { $$ = log.NewStringLabelFilter($1) } + | unitFilter { $$ = $1 } + | numberFilter { $$ = $1 } | OPEN_PARENTHESIS labelFilter CLOSE_PARENTHESIS { $$ = $2 } - | labelFilter labelFilter { $$ = labelfilter.NewAnd($1, $2 ) } - | labelFilter AND labelFilter { $$ = labelfilter.NewAnd($1, $3 ) } - | labelFilter COMMA labelFilter { $$ = labelfilter.NewAnd($1, $3 ) } - | labelFilter OR labelFilter { $$ = labelfilter.NewOr($1, $3 ) } + | labelFilter labelFilter { $$ = log.NewAndLabelFilter($1, $2 ) } + | labelFilter AND labelFilter { $$ = log.NewAndLabelFilter($1, $3 ) } + | labelFilter COMMA labelFilter { $$ = log.NewAndLabelFilter($1, $3 ) } + | labelFilter OR labelFilter { $$ = log.NewOrLabelFilter($1, $3 ) } ; unitFilter: durationFilter { $$ = $1 } - | bytesFilter { $$ = $1 } + | bytesFilter { $$ = $1 } durationFilter: - IDENTIFIER GT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThan, $1, $3) } - | IDENTIFIER GTE DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, $1, $3) } - | IDENTIFIER LT DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterLesserThan, $1, $3) } - | IDENTIFIER LTE DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, $1, $3) } - | IDENTIFIER NEQ DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterNotEqual, $1, $3) } - | IDENTIFIER EQ DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterEqual, $1, $3) } - | IDENTIFIER CMP_EQ DURATION { $$ = labelfilter.NewDuration(labelfilter.FilterEqual, $1, $3) } + IDENTIFIER GT DURATION { $$ = log.NewDurationLabelFilter(log.LabelFilterGreaterThan, $1, $3) } + | IDENTIFIER GTE DURATION { $$ = log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, $1, $3) } + | IDENTIFIER LT DURATION { $$ = log.NewDurationLabelFilter(log.LabelFilterLesserThan, $1, $3) } + | IDENTIFIER LTE DURATION { $$ = log.NewDurationLabelFilter(log.LabelFilterLesserThanOrEqual, $1, $3) } + | IDENTIFIER NEQ DURATION { $$ = log.NewDurationLabelFilter(log.LabelFilterNotEqual, $1, $3) } + | IDENTIFIER EQ DURATION { $$ = log.NewDurationLabelFilter(log.LabelFilterEqual, $1, $3) } + | IDENTIFIER CMP_EQ DURATION { $$ = log.NewDurationLabelFilter(log.LabelFilterEqual, $1, $3) } ; bytesFilter: - IDENTIFIER GT BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterGreaterThan, $1, $3) } - | IDENTIFIER GTE BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterGreaterThanOrEqual, $1, $3) } - | IDENTIFIER LT BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterLesserThan, $1, $3) } - | IDENTIFIER LTE BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterLesserThanOrEqual, $1, $3) } - | IDENTIFIER NEQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterNotEqual, $1, $3) } - | IDENTIFIER EQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterEqual, $1, $3) } - | IDENTIFIER CMP_EQ BYTES { $$ = labelfilter.NewBytes(labelfilter.FilterEqual, $1, $3) } + IDENTIFIER GT BYTES { $$ = log.NewBytesLabelFilter(log.LabelFilterGreaterThan, $1, $3) } + | IDENTIFIER GTE BYTES { $$ = log.NewBytesLabelFilter(log.LabelFilterGreaterThanOrEqual, $1, $3) } + | IDENTIFIER LT BYTES { $$ = log.NewBytesLabelFilter(log.LabelFilterLesserThan, $1, $3) } + | IDENTIFIER LTE BYTES { $$ = log.NewBytesLabelFilter(log.LabelFilterLesserThanOrEqual, $1, $3) } + | IDENTIFIER NEQ BYTES { $$ = log.NewBytesLabelFilter(log.LabelFilterNotEqual, $1, $3) } + | IDENTIFIER EQ BYTES { $$ = log.NewBytesLabelFilter(log.LabelFilterEqual, $1, $3) } + | IDENTIFIER CMP_EQ BYTES { $$ = log.NewBytesLabelFilter(log.LabelFilterEqual, $1, $3) } ; numberFilter: - IDENTIFIER GT NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, $1, mustNewFloat($3))} - | IDENTIFIER GTE NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, $1, mustNewFloat($3))} - | IDENTIFIER LT NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterLesserThan, $1, mustNewFloat($3))} - | IDENTIFIER LTE NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, $1, mustNewFloat($3))} - | IDENTIFIER NEQ NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterNotEqual, $1, mustNewFloat($3))} - | IDENTIFIER EQ NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterEqual, $1, mustNewFloat($3))} - | IDENTIFIER CMP_EQ NUMBER { $$ = labelfilter.NewNumeric(labelfilter.FilterEqual, $1, mustNewFloat($3))} + IDENTIFIER GT NUMBER { $$ = log.NewNumericLabelFilter(log.LabelFilterGreaterThan, $1, mustNewFloat($3))} + | IDENTIFIER GTE NUMBER { $$ = log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, $1, mustNewFloat($3))} + | IDENTIFIER LT NUMBER { $$ = log.NewNumericLabelFilter(log.LabelFilterLesserThan, $1, mustNewFloat($3))} + | IDENTIFIER LTE NUMBER { $$ = log.NewNumericLabelFilter(log.LabelFilterLesserThanOrEqual, $1, mustNewFloat($3))} + | IDENTIFIER NEQ NUMBER { $$ = log.NewNumericLabelFilter(log.LabelFilterNotEqual, $1, mustNewFloat($3))} + | IDENTIFIER EQ NUMBER { $$ = log.NewNumericLabelFilter(log.LabelFilterEqual, $1, mustNewFloat($3))} + | IDENTIFIER CMP_EQ NUMBER { $$ = log.NewNumericLabelFilter(log.LabelFilterEqual, $1, mustNewFloat($3))} ; // TODO(owen-d): add (on,ignoring) clauses to binOpExpr diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go index bedbf991d2c77..1b99d052d7919 100644 --- a/pkg/logql/expr.y.go +++ b/pkg/logql/expr.y.go @@ -6,15 +6,13 @@ package logql import __yyfmt__ "fmt" //line pkg/logql/expr.y:2 - import ( "github.com/grafana/loki/pkg/logql/log" - "github.com/grafana/loki/pkg/logql/log/labelfilter" "github.com/prometheus/prometheus/pkg/labels" "time" ) -//line pkg/logql/expr.y:13 +//line pkg/logql/expr.y:12 type exprSymType struct { yys int Expr Expr @@ -43,11 +41,11 @@ type exprSymType struct { LineFilters *lineFilterExpr PipelineExpr MultiStageExpr PipelineStage StageExpr - BytesFilter labelfilter.Filterer - NumberFilter labelfilter.Filterer - DurationFilter labelfilter.Filterer - LabelFilter labelfilter.Filterer - UnitFilter labelfilter.Filterer + BytesFilter log.LabelFilterer + NumberFilter log.LabelFilterer + DurationFilter log.LabelFilterer + LabelFilter log.LabelFilterer + UnitFilter log.LabelFilterer LineFormatExpr *lineFmtExpr LabelFormatExpr *labelFmtExpr LabelFormat log.LabelFmt @@ -203,7 +201,7 @@ const exprEofCode = 1 const exprErrCode = 2 const exprInitialStackSize = 16 -//line pkg/logql/expr.y:346 +//line pkg/logql/expr.y:345 //line yacctab:1 var exprExca = [...]int{ @@ -746,864 +744,864 @@ exprdefault: case 1: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:105 +//line pkg/logql/expr.y:104 { exprlex.(*lexer).expr = exprDollar[1].Expr } case 2: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:108 +//line pkg/logql/expr.y:107 { exprVAL.Expr = exprDollar[1].LogExpr } case 3: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:109 +//line pkg/logql/expr.y:108 { exprVAL.Expr = exprDollar[1].MetricExpr } case 4: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:113 +//line pkg/logql/expr.y:112 { exprVAL.MetricExpr = exprDollar[1].RangeAggregationExpr } case 5: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:114 +//line pkg/logql/expr.y:113 { exprVAL.MetricExpr = exprDollar[1].VectorAggregationExpr } case 6: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:115 +//line pkg/logql/expr.y:114 { exprVAL.MetricExpr = exprDollar[1].BinOpExpr } case 7: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:116 +//line pkg/logql/expr.y:115 { exprVAL.MetricExpr = exprDollar[1].LiteralExpr } case 8: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:117 +//line pkg/logql/expr.y:116 { exprVAL.MetricExpr = exprDollar[2].MetricExpr } case 9: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:121 +//line pkg/logql/expr.y:120 { exprVAL.LogExpr = newMatcherExpr(exprDollar[1].Selector) } case 10: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:122 +//line pkg/logql/expr.y:121 { exprVAL.LogExpr = newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr) } case 11: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:123 +//line pkg/logql/expr.y:122 { exprVAL.LogExpr = exprDollar[2].LogExpr } case 12: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:127 +//line pkg/logql/expr.y:126 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, nil) } case 13: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:128 +//line pkg/logql/expr.y:127 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, nil) } case 14: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:129 +//line pkg/logql/expr.y:128 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].duration, exprDollar[3].UnwrapExpr) } case 15: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:130 +//line pkg/logql/expr.y:129 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[4].duration, exprDollar[5].UnwrapExpr) } case 16: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:131 +//line pkg/logql/expr.y:130 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].duration, exprDollar[2].UnwrapExpr) } case 17: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:132 +//line pkg/logql/expr.y:131 { exprVAL.LogRangeExpr = newLogRange(newMatcherExpr(exprDollar[2].Selector), exprDollar[5].duration, exprDollar[3].UnwrapExpr) } case 18: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:133 +//line pkg/logql/expr.y:132 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[3].duration, nil) } case 19: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:134 +//line pkg/logql/expr.y:133 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[5].duration, nil) } case 20: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:135 +//line pkg/logql/expr.y:134 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[2].PipelineExpr), exprDollar[4].duration, exprDollar[3].UnwrapExpr) } case 21: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:136 +//line pkg/logql/expr.y:135 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[2].Selector), exprDollar[3].PipelineExpr), exprDollar[6].duration, exprDollar[4].UnwrapExpr) } case 22: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:137 +//line pkg/logql/expr.y:136 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, nil) } case 23: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:138 +//line pkg/logql/expr.y:137 { exprVAL.LogRangeExpr = newLogRange(newPipelineExpr(newMatcherExpr(exprDollar[1].Selector), exprDollar[3].PipelineExpr), exprDollar[2].duration, exprDollar[4].UnwrapExpr) } case 24: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:139 +//line pkg/logql/expr.y:138 { exprVAL.LogRangeExpr = exprDollar[2].LogRangeExpr } case 26: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:144 +//line pkg/logql/expr.y:143 { exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[3].str, "") } case 27: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:145 +//line pkg/logql/expr.y:144 { exprVAL.UnwrapExpr = newUnwrapExpr(exprDollar[5].str, exprDollar[3].ConvOp) } case 28: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:146 +//line pkg/logql/expr.y:145 { exprVAL.UnwrapExpr = exprDollar[1].UnwrapExpr.addPostFilter(exprDollar[3].LabelFilter) } case 29: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:150 +//line pkg/logql/expr.y:149 { exprVAL.ConvOp = OpConvDuration } case 30: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:151 +//line pkg/logql/expr.y:150 { exprVAL.ConvOp = OpConvDurationSeconds } case 31: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:155 +//line pkg/logql/expr.y:154 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, nil, nil) } case 32: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:156 +//line pkg/logql/expr.y:155 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, nil, &exprDollar[3].str) } case 33: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:157 +//line pkg/logql/expr.y:156 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[3].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[5].Grouping, nil) } case 34: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:158 +//line pkg/logql/expr.y:157 { exprVAL.RangeAggregationExpr = newRangeAggregationExpr(exprDollar[5].LogRangeExpr, exprDollar[1].RangeOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 35: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:163 +//line pkg/logql/expr.y:162 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, nil, nil) } case 36: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:164 +//line pkg/logql/expr.y:163 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].MetricExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil) } case 37: exprDollar = exprS[exprpt-5 : exprpt+1] -//line pkg/logql/expr.y:165 +//line pkg/logql/expr.y:164 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].MetricExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil) } case 38: exprDollar = exprS[exprpt-6 : exprpt+1] -//line pkg/logql/expr.y:167 +//line pkg/logql/expr.y:166 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str) } case 39: exprDollar = exprS[exprpt-7 : exprpt+1] -//line pkg/logql/expr.y:168 +//line pkg/logql/expr.y:167 { exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].MetricExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str) } case 40: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:172 +//line pkg/logql/expr.y:171 { exprVAL.Filter = labels.MatchRegexp } case 41: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:173 +//line pkg/logql/expr.y:172 { exprVAL.Filter = labels.MatchEqual } case 42: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:174 +//line pkg/logql/expr.y:173 { exprVAL.Filter = labels.MatchNotRegexp } case 43: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:175 +//line pkg/logql/expr.y:174 { exprVAL.Filter = labels.MatchNotEqual } case 44: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:179 +//line pkg/logql/expr.y:178 { exprVAL.Selector = exprDollar[2].Matchers } case 45: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:180 +//line pkg/logql/expr.y:179 { exprVAL.Selector = exprDollar[2].Matchers } case 46: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:181 +//line pkg/logql/expr.y:180 { } case 47: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:185 +//line pkg/logql/expr.y:184 { exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} } case 48: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:186 +//line pkg/logql/expr.y:185 { exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) } case 49: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:190 +//line pkg/logql/expr.y:189 { exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str) } case 50: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:191 +//line pkg/logql/expr.y:190 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str) } case 51: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:192 +//line pkg/logql/expr.y:191 { exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str) } case 52: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:193 +//line pkg/logql/expr.y:192 { exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str) } case 53: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:197 +//line pkg/logql/expr.y:196 { exprVAL.PipelineExpr = MultiStageExpr{exprDollar[1].PipelineStage} } case 54: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:198 +//line pkg/logql/expr.y:197 { exprVAL.PipelineExpr = append(exprDollar[1].PipelineExpr, exprDollar[2].PipelineStage) } case 55: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:202 +//line pkg/logql/expr.y:201 { exprVAL.PipelineStage = exprDollar[1].LineFilters } case 56: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:203 +//line pkg/logql/expr.y:202 { exprVAL.PipelineStage = exprDollar[2].LabelParser } case 57: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:204 +//line pkg/logql/expr.y:203 { - exprVAL.PipelineStage = &labelFilterExpr{Filterer: exprDollar[2].LabelFilter} + exprVAL.PipelineStage = &labelFilterExpr{LabelFilterer: exprDollar[2].LabelFilter} } case 58: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:205 +//line pkg/logql/expr.y:204 { exprVAL.PipelineStage = exprDollar[2].LineFormatExpr } case 59: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:206 +//line pkg/logql/expr.y:205 { exprVAL.PipelineStage = exprDollar[2].LabelFormatExpr } case 60: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:210 +//line pkg/logql/expr.y:209 { exprVAL.LineFilters = newLineFilterExpr(nil, exprDollar[1].Filter, exprDollar[2].str) } case 61: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:211 +//line pkg/logql/expr.y:210 { exprVAL.LineFilters = newLineFilterExpr(exprDollar[1].LineFilters, exprDollar[2].Filter, exprDollar[3].str) } case 62: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:214 +//line pkg/logql/expr.y:213 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeJSON, "") } case 63: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:215 +//line pkg/logql/expr.y:214 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeLogfmt, "") } case 64: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:216 +//line pkg/logql/expr.y:215 { exprVAL.LabelParser = newLabelParserExpr(OpParserTypeRegexp, exprDollar[2].str) } case 65: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:219 +//line pkg/logql/expr.y:218 { exprVAL.LineFormatExpr = newLineFmtExpr(exprDollar[2].str) } case 66: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:222 +//line pkg/logql/expr.y:221 { exprVAL.LabelFormat = log.NewRenameLabelFmt(exprDollar[1].str, exprDollar[3].str) } case 67: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:223 +//line pkg/logql/expr.y:222 { exprVAL.LabelFormat = log.NewTemplateLabelFmt(exprDollar[1].str, exprDollar[3].str) } case 68: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:227 +//line pkg/logql/expr.y:226 { - exprVAL.LabelsFormat = []LabelFmt{exprDollar[1].LabelFormat} + exprVAL.LabelsFormat = []log.LabelFmt{exprDollar[1].LabelFormat} } case 69: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:228 +//line pkg/logql/expr.y:227 { exprVAL.LabelsFormat = append(exprDollar[1].LabelsFormat, exprDollar[3].LabelFormat) } case 71: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:232 +//line pkg/logql/expr.y:231 { - exprVAL.LabelFormatExpr = log.NewLabelFmtExpr(exprDollar[2].LabelsFormat) + exprVAL.LabelFormatExpr = newLabelFmtExpr(exprDollar[2].LabelsFormat) } case 72: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:235 +//line pkg/logql/expr.y:234 { - exprVAL.LabelFilter = labelfilter.NewString(exprDollar[1].Matcher) + exprVAL.LabelFilter = log.NewStringLabelFilter(exprDollar[1].Matcher) } case 73: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:236 +//line pkg/logql/expr.y:235 { exprVAL.LabelFilter = exprDollar[1].UnitFilter } case 74: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:237 +//line pkg/logql/expr.y:236 { exprVAL.LabelFilter = exprDollar[1].NumberFilter } case 75: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:238 +//line pkg/logql/expr.y:237 { exprVAL.LabelFilter = exprDollar[2].LabelFilter } case 76: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:239 +//line pkg/logql/expr.y:238 { - exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) + exprVAL.LabelFilter = log.NewAndLabelFilter(exprDollar[1].LabelFilter, exprDollar[2].LabelFilter) } case 77: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:240 +//line pkg/logql/expr.y:239 { - exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) + exprVAL.LabelFilter = log.NewAndLabelFilter(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 78: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:241 +//line pkg/logql/expr.y:240 { - exprVAL.LabelFilter = labelfilter.NewAnd(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) + exprVAL.LabelFilter = log.NewAndLabelFilter(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 79: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:242 +//line pkg/logql/expr.y:241 { - exprVAL.LabelFilter = labelfilter.NewOr(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) + exprVAL.LabelFilter = log.NewOrLabelFilter(exprDollar[1].LabelFilter, exprDollar[3].LabelFilter) } case 80: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:246 +//line pkg/logql/expr.y:245 { exprVAL.UnitFilter = exprDollar[1].DurationFilter } case 81: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:247 +//line pkg/logql/expr.y:246 { exprVAL.UnitFilter = exprDollar[1].BytesFilter } case 82: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:250 +//line pkg/logql/expr.y:249 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterGreaterThan, exprDollar[1].str, exprDollar[3].duration) } case 83: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:251 +//line pkg/logql/expr.y:250 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 84: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:252 +//line pkg/logql/expr.y:251 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterLesserThan, exprDollar[1].str, exprDollar[3].duration) } case 85: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:253 +//line pkg/logql/expr.y:252 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].duration) } case 86: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:254 +//line pkg/logql/expr.y:253 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterNotEqual, exprDollar[1].str, exprDollar[3].duration) } case 87: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:255 +//line pkg/logql/expr.y:254 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 88: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:256 +//line pkg/logql/expr.y:255 { - exprVAL.DurationFilter = labelfilter.NewDuration(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].duration) + exprVAL.DurationFilter = log.NewDurationLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].duration) } case 89: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:260 +//line pkg/logql/expr.y:259 { - exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterGreaterThan, exprDollar[1].str, exprDollar[3].bytes) + exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterGreaterThan, exprDollar[1].str, exprDollar[3].bytes) } case 90: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:261 +//line pkg/logql/expr.y:260 { - exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) + exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterGreaterThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } case 91: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:262 +//line pkg/logql/expr.y:261 { - exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterLesserThan, exprDollar[1].str, exprDollar[3].bytes) + exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterLesserThan, exprDollar[1].str, exprDollar[3].bytes) } case 92: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:263 +//line pkg/logql/expr.y:262 { - exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) + exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterLesserThanOrEqual, exprDollar[1].str, exprDollar[3].bytes) } case 93: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:264 +//line pkg/logql/expr.y:263 { - exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterNotEqual, exprDollar[1].str, exprDollar[3].bytes) + exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterNotEqual, exprDollar[1].str, exprDollar[3].bytes) } case 94: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:265 +//line pkg/logql/expr.y:264 { - exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].bytes) + exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].bytes) } case 95: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:266 +//line pkg/logql/expr.y:265 { - exprVAL.BytesFilter = labelfilter.NewBytes(labelfilter.FilterEqual, exprDollar[1].str, exprDollar[3].bytes) + exprVAL.BytesFilter = log.NewBytesLabelFilter(log.LabelFilterEqual, exprDollar[1].str, exprDollar[3].bytes) } case 96: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:270 +//line pkg/logql/expr.y:269 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterGreaterThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 97: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:271 +//line pkg/logql/expr.y:270 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 98: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:272 +//line pkg/logql/expr.y:271 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterLesserThan, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 99: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:273 +//line pkg/logql/expr.y:272 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterLesserThanOrEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 100: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:274 +//line pkg/logql/expr.y:273 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterNotEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 101: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:275 +//line pkg/logql/expr.y:274 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 102: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:276 +//line pkg/logql/expr.y:275 { - exprVAL.NumberFilter = labelfilter.NewNumeric(labelfilter.FilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) + exprVAL.NumberFilter = log.NewNumericLabelFilter(log.LabelFilterEqual, exprDollar[1].str, mustNewFloat(exprDollar[3].str)) } case 103: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:282 +//line pkg/logql/expr.y:281 { exprVAL.BinOpExpr = mustNewBinOpExpr("or", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 104: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:283 +//line pkg/logql/expr.y:282 { exprVAL.BinOpExpr = mustNewBinOpExpr("and", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 105: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:284 +//line pkg/logql/expr.y:283 { exprVAL.BinOpExpr = mustNewBinOpExpr("unless", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 106: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:285 +//line pkg/logql/expr.y:284 { exprVAL.BinOpExpr = mustNewBinOpExpr("+", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 107: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:286 +//line pkg/logql/expr.y:285 { exprVAL.BinOpExpr = mustNewBinOpExpr("-", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 108: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:287 +//line pkg/logql/expr.y:286 { exprVAL.BinOpExpr = mustNewBinOpExpr("*", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 109: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:288 +//line pkg/logql/expr.y:287 { exprVAL.BinOpExpr = mustNewBinOpExpr("/", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 110: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:289 +//line pkg/logql/expr.y:288 { exprVAL.BinOpExpr = mustNewBinOpExpr("%", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 111: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:290 +//line pkg/logql/expr.y:289 { exprVAL.BinOpExpr = mustNewBinOpExpr("^", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 112: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:291 +//line pkg/logql/expr.y:290 { exprVAL.BinOpExpr = mustNewBinOpExpr("==", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 113: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:292 +//line pkg/logql/expr.y:291 { exprVAL.BinOpExpr = mustNewBinOpExpr("!=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 114: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:293 +//line pkg/logql/expr.y:292 { exprVAL.BinOpExpr = mustNewBinOpExpr(">", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 115: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:294 +//line pkg/logql/expr.y:293 { exprVAL.BinOpExpr = mustNewBinOpExpr(">=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 116: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:295 +//line pkg/logql/expr.y:294 { exprVAL.BinOpExpr = mustNewBinOpExpr("<", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 117: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:296 +//line pkg/logql/expr.y:295 { exprVAL.BinOpExpr = mustNewBinOpExpr("<=", exprDollar[3].BinOpModifier, exprDollar[1].Expr, exprDollar[4].Expr) } case 118: exprDollar = exprS[exprpt-0 : exprpt+1] -//line pkg/logql/expr.y:300 +//line pkg/logql/expr.y:299 { exprVAL.BinOpModifier = BinOpOptions{} } case 119: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:301 +//line pkg/logql/expr.y:300 { exprVAL.BinOpModifier = BinOpOptions{ReturnBool: true} } case 120: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:305 +//line pkg/logql/expr.y:304 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[1].str, false) } case 121: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:306 +//line pkg/logql/expr.y:305 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, false) } case 122: exprDollar = exprS[exprpt-2 : exprpt+1] -//line pkg/logql/expr.y:307 +//line pkg/logql/expr.y:306 { exprVAL.LiteralExpr = mustNewLiteralExpr(exprDollar[2].str, true) } case 123: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:311 +//line pkg/logql/expr.y:310 { exprVAL.VectorOp = OpTypeSum } case 124: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:312 +//line pkg/logql/expr.y:311 { exprVAL.VectorOp = OpTypeAvg } case 125: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:313 +//line pkg/logql/expr.y:312 { exprVAL.VectorOp = OpTypeCount } case 126: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:314 +//line pkg/logql/expr.y:313 { exprVAL.VectorOp = OpTypeMax } case 127: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:315 +//line pkg/logql/expr.y:314 { exprVAL.VectorOp = OpTypeMin } case 128: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:316 +//line pkg/logql/expr.y:315 { exprVAL.VectorOp = OpTypeStddev } case 129: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:317 +//line pkg/logql/expr.y:316 { exprVAL.VectorOp = OpTypeStdvar } case 130: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:318 +//line pkg/logql/expr.y:317 { exprVAL.VectorOp = OpTypeBottomK } case 131: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:319 +//line pkg/logql/expr.y:318 { exprVAL.VectorOp = OpTypeTopK } case 132: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:323 +//line pkg/logql/expr.y:322 { exprVAL.RangeOp = OpRangeTypeCount } case 133: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:324 +//line pkg/logql/expr.y:323 { exprVAL.RangeOp = OpRangeTypeRate } case 134: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:325 +//line pkg/logql/expr.y:324 { exprVAL.RangeOp = OpRangeTypeBytes } case 135: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:326 +//line pkg/logql/expr.y:325 { exprVAL.RangeOp = OpRangeTypeBytesRate } case 136: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:327 +//line pkg/logql/expr.y:326 { exprVAL.RangeOp = OpRangeTypeAvg } case 137: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:328 +//line pkg/logql/expr.y:327 { exprVAL.RangeOp = OpRangeTypeSum } case 138: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:329 +//line pkg/logql/expr.y:328 { exprVAL.RangeOp = OpRangeTypeMin } case 139: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:330 +//line pkg/logql/expr.y:329 { exprVAL.RangeOp = OpRangeTypeMax } case 140: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:331 +//line pkg/logql/expr.y:330 { exprVAL.RangeOp = OpRangeTypeStdvar } case 141: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:332 +//line pkg/logql/expr.y:331 { exprVAL.RangeOp = OpRangeTypeStddev } case 142: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:333 +//line pkg/logql/expr.y:332 { exprVAL.RangeOp = OpRangeTypeQuantile } case 143: exprDollar = exprS[exprpt-1 : exprpt+1] -//line pkg/logql/expr.y:338 +//line pkg/logql/expr.y:337 { exprVAL.Labels = []string{exprDollar[1].str} } case 144: exprDollar = exprS[exprpt-3 : exprpt+1] -//line pkg/logql/expr.y:339 +//line pkg/logql/expr.y:338 { exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str) } case 145: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:343 +//line pkg/logql/expr.y:342 { exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels} } case 146: exprDollar = exprS[exprpt-4 : exprpt+1] -//line pkg/logql/expr.y:344 +//line pkg/logql/expr.y:343 { exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels} } diff --git a/pkg/logql/functions.go b/pkg/logql/functions.go index b5c4d77ebf48d..d70b1a2c94356 100644 --- a/pkg/logql/functions.go +++ b/pkg/logql/functions.go @@ -6,23 +6,44 @@ import ( "sort" "time" + "github.com/grafana/loki/pkg/logql/log" "github.com/prometheus/prometheus/promql" ) const unsupportedErr = "unsupported range vector aggregation operation: %s" -func (r rangeAggregationExpr) Extractor() (SampleExtractor, error) { +func (r rangeAggregationExpr) Extractor() (log.SampleExtractor, error) { if err := r.validate(); err != nil { return nil, err } + stages := log.MultiStage{} + if p, ok := r.left.left.(*pipelineExpr); ok { + // if the expression is a pipeline then take all stages into account first. + st, err := p.pipeline.stages() + if err != nil { + return nil, err + } + stages = st + } + // unwrap...means we want to extract metrics from labels. if r.left.unwrap != nil { - return newLabelSampleExtractor(r.left.unwrap.identifier, r.left.unwrap.operation, r.left.unwrap.postFilters, r.grouping), nil + var convOp string + var groups []string + var without bool + switch r.left.unwrap.operation { + case OpConvDuration, OpConvDurationSeconds: + convOp = log.ConvertDuration + default: + convOp = log.ConvertFloat + } + return stages.WithLabelExtractor(r.left.unwrap.identifier, convOp, groups, without, log.ReduceAndLabelFilter(r.left.unwrap.postFilters)) } + // otherwise we extract metrics from the log line. switch r.operation { case OpRangeTypeRate, OpRangeTypeCount: - return ExtractCount, nil + return stages.WithLineExtractor(log.CountExtractor) case OpRangeTypeBytes, OpRangeTypeBytesRate: - return ExtractBytes, nil + return stages.WithLineExtractor(log.BytesExtractor) default: return nil, fmt.Errorf(unsupportedErr, r.operation) } diff --git a/pkg/logql/log/error.go b/pkg/logql/log/error.go index b0bf9442e47fd..44a784009f86b 100644 --- a/pkg/logql/log/error.go +++ b/pkg/logql/log/error.go @@ -4,7 +4,7 @@ var ( errJSON = "JSONParser" errLogfmt = "LogfmtParser" errSampleExtraction = "SampleExtraction" - errFilter = "Filter" + errLabelFilter = "LabelFilter" errorLabel = "__error__" ) diff --git a/pkg/logql/log/filter.go b/pkg/logql/log/filter.go index 95bea98aba367..79c51233b7db4 100644 --- a/pkg/logql/log/filter.go +++ b/pkg/logql/log/filter.go @@ -38,6 +38,12 @@ func (n notFilter) Filter(line []byte) bool { return !n.Filterer.Filter(line) } +func (n notFilter) ToStage() Stage { + return StageFunc(func(line []byte, lbs Labels) ([]byte, bool) { + return line, n.Filter(line) + }) +} + // newNotFilter creates a new filter which matches only if the base filter doesn't match. // If the base filter is a `or` it will recursively simplify with `and` operations. func newNotFilter(base Filterer) Filterer { diff --git a/pkg/logql/log/label_filter.go b/pkg/logql/log/label_filter.go new file mode 100644 index 0000000000000..13b71d781c3a6 --- /dev/null +++ b/pkg/logql/log/label_filter.go @@ -0,0 +1,301 @@ +package log + +import ( + "fmt" + "strconv" + "strings" + "time" + + "github.com/dustin/go-humanize" + "github.com/prometheus/prometheus/pkg/labels" +) + +var ( + _ LabelFilterer = &BinaryLabelFilter{} + _ LabelFilterer = &BytesLabelFilter{} + _ LabelFilterer = &DurationLabelFilter{} + _ LabelFilterer = &NumericLabelFilter{} + _ LabelFilterer = &StringLabelFilter{} + + NoopLabelFilter = noopLabelFilter{} +) + +// LabelFilterType is an enum for label filtering types. +type LabelFilterType int + +// Possible LabelFilterType. +const ( + LabelFilterEqual LabelFilterType = iota + LabelFilterNotEqual + LabelFilterGreaterThan + LabelFilterGreaterThanOrEqual + LabelFilterLesserThan + LabelFilterLesserThanOrEqual +) + +func (f LabelFilterType) String() string { + switch f { + case LabelFilterEqual: + return "==" + case LabelFilterNotEqual: + return "!=" + case LabelFilterGreaterThan: + return ">" + case LabelFilterGreaterThanOrEqual: + return ">=" + case LabelFilterLesserThan: + return "<" + case LabelFilterLesserThanOrEqual: + return "<=" + default: + return "" + } +} + +type LabelFilterer interface { + Stage + fmt.Stringer +} + +type BinaryLabelFilter struct { + Left LabelFilterer + Right LabelFilterer + and bool +} + +func NewAndLabelFilter(left LabelFilterer, right LabelFilterer) *BinaryLabelFilter { + return &BinaryLabelFilter{ + Left: left, + Right: right, + and: true, + } +} + +func NewOrLabelFilter(left LabelFilterer, right LabelFilterer) *BinaryLabelFilter { + return &BinaryLabelFilter{ + Left: left, + Right: right, + } +} + +func (b *BinaryLabelFilter) Process(line []byte, lbs Labels) ([]byte, bool) { + line, lok := b.Left.Process(line, lbs) + if !b.and && lok { + return line, true + } + line, rok := b.Right.Process(line, lbs) + if !b.and { + return line, lok || rok + } + return line, lok && rok +} + +func (b *BinaryLabelFilter) String() string { + var sb strings.Builder + sb.WriteString("( ") + sb.WriteString(b.Left.String()) + if b.and { + sb.WriteString(" , ") + } else { + sb.WriteString(" or ") + } + sb.WriteString(b.Right.String()) + sb.WriteString(" )") + return sb.String() +} + +type noopLabelFilter struct{} + +func (noopLabelFilter) String() string { return "" } +func (noopLabelFilter) Process(line []byte, lbs Labels) ([]byte, bool) { return line, true } + +func ReduceAndLabelFilter(filters []LabelFilterer) LabelFilterer { + if len(filters) == 0 { + return NoopLabelFilter + } + result := filters[0] + for _, f := range filters[0:] { + result = NewAndLabelFilter(result, f) + } + return result +} + +type BytesLabelFilter struct { + Name string + Value uint64 + Type LabelFilterType +} + +func NewBytesLabelFilter(t LabelFilterType, name string, b uint64) *BytesLabelFilter { + return &BytesLabelFilter{ + Name: name, + Type: t, + Value: b, + } +} + +func (d *BytesLabelFilter) Process(line []byte, lbs Labels) ([]byte, bool) { + if lbs.HasError() { + // if there's an error only the string matchers can filter it out. + return line, true + } + v, ok := lbs[d.Name] + if !ok { + // we have not found this label. + return line, false + } + value, err := humanize.ParseBytes(v) + if err != nil { + lbs.SetError(errLabelFilter) + return line, true + } + switch d.Type { + case LabelFilterEqual: + return line, value == d.Value + case LabelFilterNotEqual: + return line, value != d.Value + case LabelFilterGreaterThan: + return line, value > d.Value + case LabelFilterGreaterThanOrEqual: + return line, value >= d.Value + case LabelFilterLesserThan: + return line, value < d.Value + case LabelFilterLesserThanOrEqual: + return line, value <= d.Value + default: + lbs.SetError(errLabelFilter) + return line, true + } +} + +func (d *BytesLabelFilter) String() string { + return fmt.Sprintf("%s%s%d", d.Name, d.Type, d.Value) +} + +type DurationLabelFilter struct { + Name string + Value time.Duration + Type LabelFilterType +} + +func NewDurationLabelFilter(t LabelFilterType, name string, d time.Duration) *DurationLabelFilter { + return &DurationLabelFilter{ + Name: name, + Type: t, + Value: d, + } +} + +func (d *DurationLabelFilter) Process(line []byte, lbs Labels) ([]byte, bool) { + if lbs.HasError() { + // if there's an error only the string matchers can filter out. + return line, true + } + v, ok := lbs[d.Name] + if !ok { + // we have not found this label. + return line, false + } + value, err := time.ParseDuration(v) + if err != nil { + lbs.SetError(errLabelFilter) + return line, true + } + switch d.Type { + case LabelFilterEqual: + return line, value == d.Value + case LabelFilterNotEqual: + return line, value != d.Value + case LabelFilterGreaterThan: + return line, value > d.Value + case LabelFilterGreaterThanOrEqual: + return line, value >= d.Value + case LabelFilterLesserThan: + return line, value < d.Value + case LabelFilterLesserThanOrEqual: + return line, value <= d.Value + default: + lbs.SetError(errLabelFilter) + return line, true + } +} + +func (d *DurationLabelFilter) String() string { + return fmt.Sprintf("%s%s%s", d.Name, d.Type, d.Value) +} + +type NumericLabelFilter struct { + Name string + Value float64 + Type LabelFilterType +} + +func NewNumericLabelFilter(t LabelFilterType, name string, v float64) *NumericLabelFilter { + return &NumericLabelFilter{ + Name: name, + Type: t, + Value: v, + } +} + +func (n *NumericLabelFilter) Process(line []byte, lbs Labels) ([]byte, bool) { + if lbs.HasError() { + // if there's an error only the string matchers can filter out. + return line, true + } + if lbs.HasError() { + // if there's an error only the string matchers can filter out. + return line, true + } + v, ok := lbs[n.Name] + if !ok { + // we have not found this label. + return line, false + } + value, err := strconv.ParseFloat(v, 64) + if err != nil { + lbs.SetError(errLabelFilter) + return line, true + } + switch n.Type { + case LabelFilterEqual: + return line, value == n.Value + case LabelFilterNotEqual: + return line, value != n.Value + case LabelFilterGreaterThan: + return line, value > n.Value + case LabelFilterGreaterThanOrEqual: + return line, value >= n.Value + case LabelFilterLesserThan: + return line, value < n.Value + case LabelFilterLesserThanOrEqual: + return line, value <= n.Value + default: + lbs.SetError(errLabelFilter) + return line, true + } + +} + +func (n *NumericLabelFilter) String() string { + return fmt.Sprintf("%s%s%s", n.Name, n.Type, strconv.FormatFloat(n.Value, 'f', -1, 64)) +} + +type StringLabelFilter struct { + *labels.Matcher +} + +func NewStringLabelFilter(m *labels.Matcher) *StringLabelFilter { + return &StringLabelFilter{ + Matcher: m, + } +} + +func (s *StringLabelFilter) Process(line []byte, lbs Labels) ([]byte, bool) { + for k, v := range lbs { + if k == s.Name { + return line, s.Matches(v) + } + } + return line, false +} diff --git a/pkg/logql/log/label_filter_test.go b/pkg/logql/log/label_filter_test.go new file mode 100644 index 0000000000000..6bdd5f3c5f1c6 --- /dev/null +++ b/pkg/logql/log/label_filter_test.go @@ -0,0 +1,155 @@ +package log + +import ( + "testing" + "time" + + "github.com/prometheus/prometheus/pkg/labels" + "github.com/stretchr/testify/require" +) + +func TestBinary_Filter(t *testing.T) { + + tests := []struct { + f *BinaryLabelFilter + lbs Labels + want bool + wantLbs Labels + }{ + { + NewAndLabelFilter(NewNumericLabelFilter(LabelFilterEqual, "foo", 5), NewDurationLabelFilter(LabelFilterEqual, "bar", 1*time.Second)), + Labels{"foo": "5", "bar": "1s"}, + true, + Labels{"foo": "5", "bar": "1s"}, + }, + { + NewAndLabelFilter(NewNumericLabelFilter(LabelFilterEqual, "foo", 5), NewBytesLabelFilter(LabelFilterEqual, "bar", 42)), + Labels{"foo": "5", "bar": "42B"}, + true, + Labels{"foo": "5", "bar": "42B"}, + }, + { + NewAndLabelFilter( + NewNumericLabelFilter(LabelFilterEqual, "foo", 5), + NewDurationLabelFilter(LabelFilterEqual, "bar", 1*time.Second), + ), + Labels{"foo": "6", "bar": "1s"}, + false, + Labels{"foo": "6", "bar": "1s"}, + }, + { + NewAndLabelFilter( + NewNumericLabelFilter(LabelFilterEqual, "foo", 5), + NewDurationLabelFilter(LabelFilterEqual, "bar", 1*time.Second), + ), + Labels{"foo": "5", "bar": "2s"}, + false, + Labels{"foo": "5", "bar": "2s"}, + }, + { + NewAndLabelFilter( + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchEqual, "foo", "5")), + NewDurationLabelFilter(LabelFilterEqual, "bar", 1*time.Second), + ), + Labels{"foo": "5", "bar": "1s"}, + true, + Labels{"foo": "5", "bar": "1s"}, + }, + { + NewAndLabelFilter( + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchEqual, "foo", "5")), + NewDurationLabelFilter(LabelFilterEqual, "bar", 1*time.Second), + ), + Labels{"foo": "6", "bar": "1s"}, + false, + Labels{"foo": "6", "bar": "1s"}, + }, + { + NewAndLabelFilter( + NewOrLabelFilter( + NewDurationLabelFilter(LabelFilterGreaterThan, "duration", 1*time.Second), + NewNumericLabelFilter(LabelFilterNotEqual, "status", 200), + ), + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), + ), + Labels{ + "duration": "2s", + "status": "200", + "method": "GET", + }, + true, + Labels{ + "duration": "2s", + "status": "200", + "method": "GET", + }, + }, + { + NewAndLabelFilter( + NewOrLabelFilter( + NewDurationLabelFilter(LabelFilterGreaterThan, "duration", 1*time.Second), + NewNumericLabelFilter(LabelFilterNotEqual, "status", 200), + ), + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), + ), + Labels{ + "duration": "2s", + "status": "200", + "method": "POST", + }, + false, + Labels{ + "duration": "2s", + "status": "200", + "method": "POST", + }, + }, + { + NewAndLabelFilter( + NewOrLabelFilter( + NewDurationLabelFilter(LabelFilterGreaterThan, "duration", 1*time.Second), + NewNumericLabelFilter(LabelFilterNotEqual, "status", 200), + ), + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), + ), + Labels{ + "duration": "2s", + "status": "500", + "method": "POST", + }, + false, + Labels{ + "duration": "2s", + "status": "500", + "method": "POST", + }, + }, + { + NewAndLabelFilter( + NewOrLabelFilter( + NewDurationLabelFilter(LabelFilterGreaterThan, "duration", 3*time.Second), + NewNumericLabelFilter(LabelFilterNotEqual, "status", 200), + ), + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), + ), + Labels{ + "duration": "2s", + "status": "200", + "method": "POST", + }, + false, + Labels{ + "duration": "2s", + "status": "200", + "method": "POST", + }, + }, + } + for _, tt := range tests { + t.Run(tt.f.String(), func(t *testing.T) { + _, got := tt.f.Process(nil, tt.lbs) + require.Equal(t, tt.want, got) + require.Equal(t, tt.wantLbs, tt.lbs) + }) + } +} diff --git a/pkg/logql/log/labelfilter/bytes.go b/pkg/logql/log/labelfilter/bytes.go deleted file mode 100644 index 86337fa4be2e8..0000000000000 --- a/pkg/logql/log/labelfilter/bytes.go +++ /dev/null @@ -1,63 +0,0 @@ -package labelfilter - -import ( - "fmt" - - "github.com/dustin/go-humanize" - "github.com/grafana/loki/pkg/logql/log" -) - -type Bytes struct { - Name string - Value uint64 - Type FilterType -} - -func NewBytes(t FilterType, name string, b uint64) *Bytes { - return &Bytes{ - Name: name, - Type: t, - Value: b, - } -} - -func (d *Bytes) Process(line []byte, lbs log.Labels) ([]byte, bool) { - if lbs.HasError() { - // if there's an error only the string matchers can filter it out. - return line, true - } - for k, v := range lbs { - if k == d.Name { - value, err := humanize.ParseBytes(v) - if err != nil { - lbs.SetError("LabelFilterError") - return line, true - } - switch d.Type { - case FilterEqual: - return line, value == d.Value - case FilterNotEqual: - return line, value != d.Value - case FilterGreaterThan: - return line, value > d.Value - case FilterGreaterThanOrEqual: - return line, value >= d.Value - case FilterLesserThan: - return line, value < d.Value - case FilterLesserThanOrEqual: - return line, value <= d.Value - default: - if err != nil { - lbs.SetError("LabelFilterError") - return line, true - } - } - } - } - // we have not found this label. - return line, false -} - -func (d *Bytes) String() string { - return fmt.Sprintf("%s%s%d", d.Name, d.Type, d.Value) -} diff --git a/pkg/logql/log/labelfilter/duration.go b/pkg/logql/log/labelfilter/duration.go deleted file mode 100644 index ff4cea0826a5c..0000000000000 --- a/pkg/logql/log/labelfilter/duration.go +++ /dev/null @@ -1,61 +0,0 @@ -package labelfilter - -import ( - "fmt" - "time" - - "github.com/grafana/loki/pkg/logql/log" -) - -type Duration struct { - Name string - Value time.Duration - Type FilterType -} - -func NewDuration(t FilterType, name string, d time.Duration) *Duration { - return &Duration{ - Name: name, - Type: t, - Value: d, - } -} - -func (d *Duration) Process(line []byte, lbs log.Labels) ([]byte, bool) { - if lbs.HasError() { - // if there's an error only the string matchers can filter out. - return line, true - } - for k, v := range lbs { - if k == d.Name { - value, err := time.ParseDuration(v) - if err != nil { - lbs.SetError("LabelFilterError") - return line, true - } - switch d.Type { - case FilterEqual: - return line, value == d.Value - case FilterNotEqual: - return line, value != d.Value - case FilterGreaterThan: - return line, value > d.Value - case FilterGreaterThanOrEqual: - return line, value >= d.Value - case FilterLesserThan: - return line, value < d.Value - case FilterLesserThanOrEqual: - return line, value <= d.Value - default: - lbs.SetError("LabelFilterError") - return line, true - } - } - } - // we have not found this label. - return line, false -} - -func (d *Duration) String() string { - return fmt.Sprintf("%s%s%s", d.Name, d.Type, d.Value) -} diff --git a/pkg/logql/log/labelfilter/filter.go b/pkg/logql/log/labelfilter/filter.go deleted file mode 100644 index 8fdd144fdeb18..0000000000000 --- a/pkg/logql/log/labelfilter/filter.go +++ /dev/null @@ -1,118 +0,0 @@ -package labelfilter - -import ( - "fmt" - "strings" - - "github.com/grafana/loki/pkg/logql/log" -) - -var ( - _ Filterer = &Binary{} - _ Filterer = &Bytes{} - _ Filterer = &Duration{} - _ Filterer = &String{} - _ Filterer = &Numeric{} - - Noop = noopFilter{} -) - -// FilterType is an enum for label filtering types. -type FilterType int - -func (f FilterType) String() string { - switch f { - case FilterEqual: - return "==" - case FilterNotEqual: - return "!=" - case FilterGreaterThan: - return ">" - case FilterGreaterThanOrEqual: - return ">=" - case FilterLesserThan: - return "<" - case FilterLesserThanOrEqual: - return "<=" - default: - return "" - } -} - -// Possible FilterTypes. -const ( - FilterEqual FilterType = iota - FilterNotEqual - FilterGreaterThan - FilterGreaterThanOrEqual - FilterLesserThan - FilterLesserThanOrEqual -) - -type Filterer interface { - log.Stage - fmt.Stringer -} - -type Binary struct { - Left Filterer - Right Filterer - and bool -} - -func NewAnd(left Filterer, right Filterer) *Binary { - return &Binary{ - Left: left, - Right: right, - and: true, - } -} - -func NewOr(left Filterer, right Filterer) *Binary { - return &Binary{ - Left: left, - Right: right, - } -} - -func (b *Binary) Process(line []byte, lbs log.Labels) ([]byte, bool) { - line, lok := b.Left.Process(line, lbs) - if !b.and && lok { - return line, true - } - line, rok := b.Right.Process(line, lbs) - if !b.and { - return line, lok || rok - } - return line, lok && rok -} - -func (b *Binary) String() string { - var sb strings.Builder - sb.WriteString("( ") - sb.WriteString(b.Left.String()) - if b.and { - sb.WriteString(" , ") - } else { - sb.WriteString(" or ") - } - sb.WriteString(b.Right.String()) - sb.WriteString(" )") - return sb.String() -} - -type noopFilter struct{} - -func (noopFilter) String() string { return "" } -func (noopFilter) Process(line []byte, lbs log.Labels) ([]byte, bool) { return line, true } - -func ReduceAnd(filters []Filterer) Filterer { - if len(filters) == 0 { - return Noop - } - result := filters[0] - for _, f := range filters[0:] { - result = NewAnd(result, f) - } - return result -} diff --git a/pkg/logql/log/labelfilter/filter_test.go b/pkg/logql/log/labelfilter/filter_test.go deleted file mode 100644 index 33b7179accef1..0000000000000 --- a/pkg/logql/log/labelfilter/filter_test.go +++ /dev/null @@ -1,157 +0,0 @@ -package labelfilter - -import ( - "testing" - "time" - - "github.com/prometheus/prometheus/pkg/labels" - "github.com/stretchr/testify/require" - - "github.com/grafana/loki/pkg/logql/log" -) - -func TestBinary_Filter(t *testing.T) { - - tests := []struct { - f *Binary - lbs log.Labels - want bool - wantLbs log.Labels - }{ - { - NewAnd(NewNumeric(FilterEqual, "foo", 5), NewDuration(FilterEqual, "bar", 1*time.Second)), - log.Labels{"foo": "5", "bar": "1s"}, - true, - log.Labels{"foo": "5", "bar": "1s"}, - }, - { - NewAnd(NewNumeric(FilterEqual, "foo", 5), NewBytes(FilterEqual, "bar", 42)), - log.Labels{"foo": "5", "bar": "42B"}, - true, - log.Labels{"foo": "5", "bar": "42B"}, - }, - { - NewAnd( - NewNumeric(FilterEqual, "foo", 5), - NewDuration(FilterEqual, "bar", 1*time.Second), - ), - log.Labels{"foo": "6", "bar": "1s"}, - false, - log.Labels{"foo": "6", "bar": "1s"}, - }, - { - NewAnd( - NewNumeric(FilterEqual, "foo", 5), - NewDuration(FilterEqual, "bar", 1*time.Second), - ), - log.Labels{"foo": "5", "bar": "2s"}, - false, - log.Labels{"foo": "5", "bar": "2s"}, - }, - { - NewAnd( - NewString(labels.MustNewMatcher(labels.MatchEqual, "foo", "5")), - NewDuration(FilterEqual, "bar", 1*time.Second), - ), - log.Labels{"foo": "5", "bar": "1s"}, - true, - log.Labels{"foo": "5", "bar": "1s"}, - }, - { - NewAnd( - NewString(labels.MustNewMatcher(labels.MatchEqual, "foo", "5")), - NewDuration(FilterEqual, "bar", 1*time.Second), - ), - log.Labels{"foo": "6", "bar": "1s"}, - false, - log.Labels{"foo": "6", "bar": "1s"}, - }, - { - NewAnd( - NewOr( - NewDuration(FilterGreaterThan, "duration", 1*time.Second), - NewNumeric(FilterNotEqual, "status", 200), - ), - NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), - ), - log.Labels{ - "duration": "2s", - "status": "200", - "method": "GET", - }, - true, - log.Labels{ - "duration": "2s", - "status": "200", - "method": "GET", - }, - }, - { - NewAnd( - NewOr( - NewDuration(FilterGreaterThan, "duration", 1*time.Second), - NewNumeric(FilterNotEqual, "status", 200), - ), - NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), - ), - log.Labels{ - "duration": "2s", - "status": "200", - "method": "POST", - }, - false, - log.Labels{ - "duration": "2s", - "status": "200", - "method": "POST", - }, - }, - { - NewAnd( - NewOr( - NewDuration(FilterGreaterThan, "duration", 1*time.Second), - NewNumeric(FilterNotEqual, "status", 200), - ), - NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), - ), - log.Labels{ - "duration": "2s", - "status": "500", - "method": "POST", - }, - false, - log.Labels{ - "duration": "2s", - "status": "500", - "method": "POST", - }, - }, - { - NewAnd( - NewOr( - NewDuration(FilterGreaterThan, "duration", 3*time.Second), - NewNumeric(FilterNotEqual, "status", 200), - ), - NewString(labels.MustNewMatcher(labels.MatchNotEqual, "method", "POST")), - ), - log.Labels{ - "duration": "2s", - "status": "200", - "method": "POST", - }, - false, - log.Labels{ - "duration": "2s", - "status": "200", - "method": "POST", - }, - }, - } - for _, tt := range tests { - t.Run(tt.f.String(), func(t *testing.T) { - _, got := tt.f.Process(nil, tt.lbs) - require.Equal(t, tt.want, got) - require.Equal(t, tt.wantLbs, tt.lbs) - }) - } -} diff --git a/pkg/logql/log/labelfilter/number.go b/pkg/logql/log/labelfilter/number.go deleted file mode 100644 index 6a61d3ebcea89..0000000000000 --- a/pkg/logql/log/labelfilter/number.go +++ /dev/null @@ -1,60 +0,0 @@ -package labelfilter - -import ( - "fmt" - "strconv" - - "github.com/grafana/loki/pkg/logql/log" -) - -type Numeric struct { - Name string - Value float64 - Type FilterType -} - -func NewNumeric(t FilterType, name string, v float64) *Numeric { - return &Numeric{ - Name: name, - Type: t, - Value: v, - } -} - -func (n *Numeric) Process(line []byte, lbs log.Labels) ([]byte, bool) { - if lbs.HasError() { - // if there's an error only the string matchers can filter out. - return line, true - } - for k, v := range lbs { - if k == n.Name { - value, err := strconv.ParseFloat(v, 64) - if err != nil { - lbs.SetError("LabelFilterError") - return line, true - } - switch n.Type { - case FilterEqual: - return line, value == n.Value - case FilterNotEqual: - return line, value != n.Value - case FilterGreaterThan: - return line, value > n.Value - case FilterGreaterThanOrEqual: - return line, value >= n.Value - case FilterLesserThan: - return line, value < n.Value - case FilterLesserThanOrEqual: - return line, value <= n.Value - default: - lbs.SetError("LabelFilterError") - return line, true - } - } - } - return line, false -} - -func (n *Numeric) String() string { - return fmt.Sprintf("%s%s%s", n.Name, n.Type, strconv.FormatFloat(n.Value, 'f', -1, 64)) -} diff --git a/pkg/logql/log/labelfilter/string.go b/pkg/logql/log/labelfilter/string.go deleted file mode 100644 index 1a168edfbc0a1..0000000000000 --- a/pkg/logql/log/labelfilter/string.go +++ /dev/null @@ -1,26 +0,0 @@ -package labelfilter - -import ( - "github.com/grafana/loki/pkg/logql/log" - - "github.com/prometheus/prometheus/pkg/labels" -) - -type String struct { - *labels.Matcher -} - -func NewString(m *labels.Matcher) *String { - return &String{ - Matcher: m, - } -} - -func (s *String) Process(line []byte, lbs log.Labels) ([]byte, bool) { - for k, v := range lbs { - if k == s.Name { - return line, s.Matches(v) - } - } - return line, false -} diff --git a/pkg/logql/log/metrics_extraction.go b/pkg/logql/log/metrics_extraction.go new file mode 100644 index 0000000000000..57695eaf5e186 --- /dev/null +++ b/pkg/logql/log/metrics_extraction.go @@ -0,0 +1,148 @@ +package log + +import ( + "strconv" + "time" + + "github.com/pkg/errors" + "github.com/prometheus/prometheus/pkg/labels" +) + +type SampleExtractor interface { + Process(line []byte, lbs labels.Labels) (float64, labels.Labels, bool) +} + +type SampleExtractorFunc func(line []byte, lbs labels.Labels) (float64, labels.Labels, bool) + +func (fn SampleExtractorFunc) Process(line []byte, lbs labels.Labels) (float64, labels.Labels, bool) { + return fn(line, lbs) +} + +type LineExtractor func([]byte) float64 + +func (l LineExtractor) ToSampleExtractor() SampleExtractor { + return SampleExtractorFunc(func(line []byte, lbs labels.Labels) (float64, labels.Labels, bool) { + return l(line), lbs, true + }) +} + +var ( + CountExtractor LineExtractor = func(line []byte) float64 { return 1. } + BytesExtractor LineExtractor = func(line []byte) float64 { return float64(len(line)) } +) + +type lineSampleExtractor struct { + Stage + LineExtractor +} + +func (l lineSampleExtractor) Process(line []byte, lbs labels.Labels) (float64, labels.Labels, bool) { + labelmap := lbs.Map() + line, ok := l.Stage.Process(line, labelmap) + if !ok { + return 0, nil, false + } + return l.LineExtractor(line), labels.FromMap(labelmap), true +} + +func (m MultiStage) WithLineExtractor(ex LineExtractor) (SampleExtractor, error) { + if len(m) == 0 { + return ex.ToSampleExtractor(), nil + } + return lineSampleExtractor{Stage: m.Reduce(), LineExtractor: ex}, nil +} + +type convertionFn func(value string) (float64, error) + +type labelSampleExtractor struct { + preStage Stage + postFilter Stage + + labelName string + conversionFn convertionFn + groups []string + without bool +} + +const ( + ConvertDuration = "duration" + ConvertFloat = "float" +) + +func (m MultiStage) WithLabelExtractor( + labelName, conversion string, + groups []string, without bool, + postFilter Stage, +) (SampleExtractor, error) { + var convFn convertionFn + switch conversion { + case ConvertDuration: + convFn = convertDuration + case ConvertFloat: + convFn = convertFloat + default: + return nil, errors.Errorf("unsupported conversion operation %s", conversion) + } + return &labelSampleExtractor{ + preStage: m.Reduce(), + conversionFn: convFn, + groups: groups, + labelName: labelName, + postFilter: postFilter, + without: without, + }, nil +} + +func (l *labelSampleExtractor) Process(line []byte, lbs labels.Labels) (float64, labels.Labels, bool) { + // apply pipeline + labelmap := Labels(lbs.Map()) + line, ok := l.preStage.Process(line, labelmap) + if !ok { + return 0, nil, false + } + // convert + var v float64 + stringValue := labelmap[l.labelName] + if stringValue == "" { + labelmap.SetError(errSampleExtraction) + } else { + var err error + v, err = l.conversionFn(stringValue) + if err != nil { + labelmap.SetError(errSampleExtraction) + } + } + // post filters + _, ok = l.postFilter.Process(line, labelmap) + if !ok { + return 0, nil, false + } + if labelmap.HasError() { + // we still have an error after post filtering. + // We need to return now before applying grouping otherwise the error might get lost. + return v, labels.FromMap(labelmap), true + } + return v, l.groupLabels(labels.FromMap(labelmap)), true +} + +func (l *labelSampleExtractor) groupLabels(lbs labels.Labels) labels.Labels { + if l.groups != nil { + if l.without { + return lbs.WithoutLabels(append(l.groups, l.labelName)...) + } + return lbs.WithLabels(l.groups...) + } + return lbs.WithoutLabels(l.labelName) +} + +func convertFloat(v string) (float64, error) { + return strconv.ParseFloat(v, 64) +} + +func convertDuration(v string) (float64, error) { + d, err := time.ParseDuration(v) + if err != nil { + return 0, err + } + return d.Seconds(), nil +} diff --git a/pkg/logql/log/series_extractor_test.go b/pkg/logql/log/metrics_extraction_test.go similarity index 60% rename from pkg/logql/log/series_extractor_test.go rename to pkg/logql/log/metrics_extraction_test.go index 9f949a00432e7..b81a54bfc1301 100644 --- a/pkg/logql/log/series_extractor_test.go +++ b/pkg/logql/log/metrics_extraction_test.go @@ -1,4 +1,4 @@ -package logql +package log import ( "sort" @@ -11,7 +11,7 @@ import ( func Test_labelSampleExtractor_Extract(t *testing.T) { tests := []struct { name string - ex *labelSampleExtractor + ex SampleExtractor in labels.Labels want float64 wantLbs labels.Labels @@ -19,7 +19,9 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { }{ { "convert float", - newLabelSampleExtractor("foo", "", nil, nil), + mustSampleExtractor(MultiStage{}.WithLabelExtractor( + "foo", ConvertFloat, nil, false, NoopStage, + )), labels.Labels{labels.Label{Name: "foo", Value: "15.0"}}, 15, labels.Labels{}, @@ -27,11 +29,9 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { }, { "convert float without", - newLabelSampleExtractor("foo", - "", - nil, - &grouping{without: true, groups: []string{"bar", "buzz"}}, - ), + mustSampleExtractor(MultiStage{}.WithLabelExtractor( + "foo", ConvertFloat, []string{"bar", "buzz"}, true, NoopStage, + )), labels.Labels{ {Name: "foo", Value: "10"}, {Name: "bar", Value: "foo"}, @@ -46,11 +46,9 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { }, { "convert float with", - newLabelSampleExtractor("foo", - "", - nil, - &grouping{without: false, groups: []string{"bar", "buzz"}}, - ), + mustSampleExtractor(MultiStage{}.WithLabelExtractor( + "foo", ConvertFloat, []string{"bar", "buzz"}, false, NoopStage, + )), labels.Labels{ {Name: "foo", Value: "0.6"}, {Name: "bar", Value: "foo"}, @@ -66,11 +64,9 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { }, { "convert duration with", - newLabelSampleExtractor("foo", - OpConvDuration, - nil, - &grouping{without: false, groups: []string{"bar", "buzz"}}, - ), + mustSampleExtractor(MultiStage{}.WithLabelExtractor( + "foo", ConvertDuration, []string{"bar", "buzz"}, false, NoopStage, + )), labels.Labels{ {Name: "foo", Value: "500ms"}, {Name: "bar", Value: "foo"}, @@ -84,34 +80,21 @@ func Test_labelSampleExtractor_Extract(t *testing.T) { }, true, }, - { - "convert duration_seconds with", - newLabelSampleExtractor("foo", - OpConvDurationSeconds, - nil, - &grouping{without: false, groups: []string{"bar", "buzz"}}, - ), - labels.Labels{ - {Name: "foo", Value: "250ms"}, - {Name: "bar", Value: "foo"}, - {Name: "buzz", Value: "blip"}, - {Name: "namespace", Value: "dev"}, - }, - 0.25, - labels.Labels{ - {Name: "bar", Value: "foo"}, - {Name: "buzz", Value: "blip"}, - }, - true, - }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { sort.Sort(tt.in) - ok, outval, outlbs := tt.ex.Extract([]byte(""), tt.in) + outval, outlbs, ok := tt.ex.Process([]byte(""), tt.in) require.Equal(t, tt.wantOk, ok) require.Equal(t, tt.want, outval) require.Equal(t, tt.wantLbs, outlbs) }) } } + +func mustSampleExtractor(ex SampleExtractor, err error) SampleExtractor { + if err != nil { + panic(err) + } + return ex +} diff --git a/pkg/logql/log/pipeline.go b/pkg/logql/log/pipeline.go index c5d1bbc3cb898..75a2cbddc88f9 100644 --- a/pkg/logql/log/pipeline.go +++ b/pkg/logql/log/pipeline.go @@ -40,14 +40,30 @@ type MultiStage []Stage func (m MultiStage) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { var ok bool if len(m) == 0 { - return line, lbs, ok + return line, lbs, true } labelmap := lbs.Map() for _, p := range m { line, ok = p.Process(line, labelmap) if !ok { - return line, labels.FromMap(labelmap), ok + return nil, nil, false } } - return line, labels.FromMap(labelmap), ok + return line, labels.FromMap(labelmap), true +} + +func (m MultiStage) Reduce() Stage { + if len(m) == 0 { + return NoopStage + } + return StageFunc(func(line []byte, lbs Labels) ([]byte, bool) { + var ok bool + for _, p := range m { + line, ok = p.Process(line, lbs) + if !ok { + return nil, false + } + } + return line, true + }) } diff --git a/pkg/logql/log/series_extractor.go b/pkg/logql/log/series_extractor.go deleted file mode 100644 index 96ebdf0212e6e..0000000000000 --- a/pkg/logql/log/series_extractor.go +++ /dev/null @@ -1,96 +0,0 @@ -package logql - -import ( - "strconv" - "time" - - "github.com/grafana/loki/pkg/logql/log/labelfilter" - - "github.com/prometheus/prometheus/pkg/labels" -) - -var ( - ExtractBytes = bytesSampleExtractor{} - ExtractCount = countSampleExtractor{} -) - -// SampleExtractor transforms a log entry into a sample. -// In case of failure the second return value will be false. -type SampleExtractor interface { - Extract(line []byte, lbs labels.Labels) (bool, float64, labels.Labels) -} - -type countSampleExtractor struct{} - -func (countSampleExtractor) Extract(line []byte, lbs labels.Labels) (bool, float64, labels.Labels) { - return true, 1., lbs -} - -type bytesSampleExtractor struct{} - -func (bytesSampleExtractor) Extract(line []byte, lbs labels.Labels) (bool, float64, labels.Labels) { - return true, float64(len(line)), lbs -} - -type labelSampleExtractor struct { - labelName string - gr *grouping - postFilter labelfilter.Filterer - conversion string // the sample conversion operation to attempt - - builder *labels.Builder -} - -func newLabelSampleExtractor(labelName, conversion string, postFilters []labelfilter.Filterer, gr *grouping) *labelSampleExtractor { - return &labelSampleExtractor{ - labelName: labelName, - conversion: conversion, - gr: gr, - postFilter: labelfilter.ReduceAnd(postFilters), - builder: labels.NewBuilder(nil), - } -} - -func (l *labelSampleExtractor) Extract(_ []byte, lbs labels.Labels) (bool, float64, labels.Labels) { - stringValue := lbs.Get(l.labelName) - l.builder.Reset(lbs) - if stringValue == "" { - l.builder.Set(errorLabel, errSampleExtraction) - return true, 0, lbs - } - var f float64 - var err error - switch l.conversion { - case OpConvDuration, OpConvDurationSeconds: - f, err = convertDuration(stringValue) - default: - f, err = convertFloat(stringValue) - } - if err != nil { - l.builder.Set(errorLabel, errSampleExtraction) - return true, 0, lbs - } - return true, f, l.groupLabels(lbs) -} - -func (l *labelSampleExtractor) groupLabels(lbs labels.Labels) labels.Labels { - if l.gr != nil { - if l.gr.without { - return lbs.WithoutLabels(append(l.gr.groups, l.labelName)...) - } - return lbs.WithLabels(l.gr.groups...) - } - return lbs.WithoutLabels(l.labelName) -} - -func convertFloat(v string) (float64, error) { - return strconv.ParseFloat(v, 64) -} - -func convertDuration(v string) (float64, error) { - d, err := time.ParseDuration(v) - if err != nil { - return 0, err - } - return d.Seconds(), nil -} diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 9826e1b34088b..ae212872931c0 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -9,10 +9,10 @@ import ( "github.com/prometheus/prometheus/pkg/labels" "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/logql/log/labelfilter" + "github.com/grafana/loki/pkg/logql/log" ) -func newString(s string) *string { +func NewStringLabelFilter(s string) *string { return &s } @@ -22,922 +22,922 @@ func TestParse(t *testing.T) { exp Expr err error }{ - { - // raw string - in: "count_over_time({foo=~`bar\\w+`}[12h] |~ `error\\`)", - exp: &rangeAggregationExpr{ - operation: "count_over_time", - left: &logRange{ - left: &pipelineExpr{ - pipeline: MultiPipelineExpr{ - newLineFilterExpr(nil, labels.MatchRegexp, "error\\"), - }, - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchRegexp, "foo", "bar\\w+"), - }, - }, - }, - interval: 12 * time.Hour, - }, - }, - }, - { - // test [12h] before filter expr - in: `count_over_time({foo="bar"}[12h] |= "error")`, - exp: &rangeAggregationExpr{ - operation: "count_over_time", - left: &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), - MultiPipelineExpr{ - newLineFilterExpr(nil, labels.MatchEqual, "error"), - }, - ), - interval: 12 * time.Hour, - }, - }, - }, - { - // test [12h] after filter expr - in: `count_over_time({foo="bar"} |= "error" [12h])`, - exp: &rangeAggregationExpr{ - operation: "count_over_time", - left: &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), - MultiPipelineExpr{newLineFilterExpr(nil, labels.MatchEqual, "error")}, - ), - interval: 12 * time.Hour, - }, - }, - }, - { - in: `{foo="bar"}`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - }, - { - in: `{ foo = "bar" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, - }, - { - in: `{ foo != "bar" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotEqual, "foo", "bar")}}, - }, - { - in: `{ foo =~ "bar" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}}, - }, - { - in: `{ foo !~ "bar" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - }, - { - in: `count_over_time({ foo !~ "bar" }[12m])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 12 * time.Minute, - }, - operation: "count_over_time", - }, - }, - { - in: `bytes_over_time({ foo !~ "bar" }[12m])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 12 * time.Minute, - }, - operation: OpRangeTypeBytes, - }, - }, - { - in: `bytes_rate({ foo !~ "bar" }[12m])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 12 * time.Minute, - }, - operation: OpRangeTypeBytesRate, - }, - }, - { - in: `rate({ foo !~ "bar" }[5h])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "rate", - }, - }, - { - in: `rate({ foo !~ "bar" }[5d])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * 24 * time.Hour, - }, - operation: "rate", - }, - }, - { - in: `count_over_time({ foo !~ "bar" }[1w])`, - exp: &rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 7 * 24 * time.Hour, - }, - operation: "count_over_time", - }, - }, - { - in: `sum(rate({ foo !~ "bar" }[5h]))`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "rate", - }, "sum", nil, nil), - }, - { - in: `sum(rate({ foo !~ "bar" }[1y]))`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 365 * 24 * time.Hour, - }, - operation: "rate", - }, "sum", nil, nil), - }, - { - in: `avg(count_over_time({ foo !~ "bar" }[5h])) by (bar,foo)`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "count_over_time", - }, "avg", &grouping{ - without: false, - groups: []string{"bar", "foo"}, - }, nil), - }, - { - in: `max without (bar) (count_over_time({ foo !~ "bar" }[5h]))`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "count_over_time", - }, "max", &grouping{ - without: true, - groups: []string{"bar"}, - }, nil), - }, - { - in: `topk(10,count_over_time({ foo !~ "bar" }[5h])) without (bar)`, - exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "count_over_time", - }, "topk", &grouping{ - without: true, - groups: []string{"bar"}, - }, newString("10")), - }, - { - in: `bottomk(30 ,sum(rate({ foo !~ "bar" }[5h])) by (foo))`, - exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "rate", - }, "sum", &grouping{ - groups: []string{"foo"}, - without: false, - }, nil), "bottomk", nil, - newString("30")), - }, - { - in: `max( sum(count_over_time({ foo !~ "bar" }[5h])) without (foo,bar) ) by (foo)`, - exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ - left: &logRange{ - left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, - interval: 5 * time.Hour, - }, - operation: "count_over_time", - }, "sum", &grouping{ - groups: []string{"foo", "bar"}, - without: true, - }, nil), "max", &grouping{ - groups: []string{"foo"}, - without: false, - }, nil), - }, - { - in: `unk({ foo !~ "bar" }[5m])`, - err: ParseError{ - msg: "syntax error: unexpected IDENTIFIER", - line: 1, - col: 1, - }, - }, - { - in: `rate({ foo !~ "bar" }[5minutes])`, - err: ParseError{ - msg: `not a valid duration string: "5minutes"`, - line: 0, - col: 22, - }, - }, - { - in: `rate({ foo !~ "bar" }[5)`, - err: ParseError{ - msg: "missing closing ']' in duration", - line: 0, - col: 22, - }, - }, - { - in: `min({ foo !~ "bar" }[5m])`, - err: ParseError{ - msg: "syntax error: unexpected RANGE", - line: 0, - col: 21, - }, - }, - { - in: `sum(3 ,count_over_time({ foo !~ "bar" }[5h]))`, - err: ParseError{ - msg: "unsupported parameter for operation sum(3,", - line: 0, - col: 0, - }, - }, - { - in: `topk(count_over_time({ foo !~ "bar" }[5h]))`, - err: ParseError{ - msg: "parameter required for operation topk", - line: 0, - col: 0, - }, - }, - { - in: `bottomk(he,count_over_time({ foo !~ "bar" }[5h]))`, - err: ParseError{ - msg: "syntax error: unexpected IDENTIFIER", - line: 1, - col: 9, - }, - }, - { - in: `bottomk(1.2,count_over_time({ foo !~ "bar" }[5h]))`, - err: ParseError{ - msg: "invalid parameter bottomk(1.2,", - line: 0, - col: 0, - }, - }, - { - in: `stddev({ foo !~ "bar" })`, - err: ParseError{ - msg: "syntax error: unexpected )", - line: 1, - col: 24, - }, - }, - { - in: `{ foo = "bar", bar != "baz" }`, - exp: &matchersExpr{matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - mustNewMatcher(labels.MatchNotEqual, "bar", "baz"), - }}, - }, - { - in: `{foo="bar"} |= "baz"`, - exp: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{newLineFilterExpr(nil, labels.MatchEqual, "baz")}, - ), - }, - { - in: `{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, - exp: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - }, - { - in: `count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, - exp: newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - }, - { - in: `bytes_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, - exp: newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeBytes, nil, nil), - }, - { - in: `sum(count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil), - }, - { - in: `sum(bytes_rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeBytesRate, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil), - }, - { - in: `topk(5,count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) without (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "topk", - &grouping{ - without: true, - groups: []string{"foo"}, - }, - newString("5")), - }, - { - in: `topk(5,sum(rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (app))`, - exp: mustNewVectorAggregationExpr( - mustNewVectorAggregationExpr( - newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeRate, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"app"}, - }, - nil), - "topk", - nil, - newString("5")), - }, - { - in: `count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")`, - exp: newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - }, - { - in: `sum(count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil), - }, - { - in: `topk(5,count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) without (foo)`, - exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "topk", - &grouping{ - without: true, - groups: []string{"foo"}, - }, - newString("5")), - }, - { - in: `topk(5,sum(rate({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (app))`, - exp: mustNewVectorAggregationExpr( - mustNewVectorAggregationExpr( - newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr( - newLineFilterExpr(nil, labels.MatchEqual, "baz"), - labels.MatchRegexp, "blip"), - labels.MatchNotEqual, "flip"), - labels.MatchNotRegexp, "flap"), - }, - ), - interval: 5 * time.Minute, - }, OpRangeTypeRate, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"app"}, - }, - nil), - "topk", - nil, - newString("5")), - }, - { - in: `{foo="bar}`, - err: ParseError{ - msg: "literal not terminated", - line: 1, - col: 6, - }, - }, - { - in: `{foo="bar"`, - err: ParseError{ - msg: "syntax error: unexpected $end, expecting } or ,", - line: 1, - col: 11, - }, - }, + // { + // // raw string + // in: "count_over_time({foo=~`bar\\w+`}[12h] |~ `error\\`)", + // exp: &rangeAggregationExpr{ + // operation: "count_over_time", + // left: &logRange{ + // left: &pipelineExpr{ + // pipeline: MultiStageExpr{ + // newLineFilterExpr(nil, labels.MatchRegexp, "error\\"), + // }, + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchRegexp, "foo", "bar\\w+"), + // }, + // }, + // }, + // interval: 12 * time.Hour, + // }, + // }, + // }, + // { + // // test [12h] before filter expr + // in: `count_over_time({foo="bar"}[12h] |= "error")`, + // exp: &rangeAggregationExpr{ + // operation: "count_over_time", + // left: &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), + // MultiStageExpr{ + // newLineFilterExpr(nil, labels.MatchEqual, "error"), + // }, + // ), + // interval: 12 * time.Hour, + // }, + // }, + // }, + // { + // // test [12h] after filter expr + // in: `count_over_time({foo="bar"} |= "error" [12h])`, + // exp: &rangeAggregationExpr{ + // operation: "count_over_time", + // left: &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "foo", Value: "bar"}}), + // MultiStageExpr{newLineFilterExpr(nil, labels.MatchEqual, "error")}, + // ), + // interval: 12 * time.Hour, + // }, + // }, + // }, + // { + // in: `{foo="bar"}`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // }, + // { + // in: `{ foo = "bar" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + // }, + // { + // in: `{ foo != "bar" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotEqual, "foo", "bar")}}, + // }, + // { + // in: `{ foo =~ "bar" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}}, + // }, + // { + // in: `{ foo !~ "bar" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // }, + // { + // in: `count_over_time({ foo !~ "bar" }[12m])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 12 * time.Minute, + // }, + // operation: "count_over_time", + // }, + // }, + // { + // in: `bytes_over_time({ foo !~ "bar" }[12m])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 12 * time.Minute, + // }, + // operation: OpRangeTypeBytes, + // }, + // }, + // { + // in: `bytes_rate({ foo !~ "bar" }[12m])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 12 * time.Minute, + // }, + // operation: OpRangeTypeBytesRate, + // }, + // }, + // { + // in: `rate({ foo !~ "bar" }[5h])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "rate", + // }, + // }, + // { + // in: `rate({ foo !~ "bar" }[5d])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * 24 * time.Hour, + // }, + // operation: "rate", + // }, + // }, + // { + // in: `count_over_time({ foo !~ "bar" }[1w])`, + // exp: &rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 7 * 24 * time.Hour, + // }, + // operation: "count_over_time", + // }, + // }, + // { + // in: `sum(rate({ foo !~ "bar" }[5h]))`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "rate", + // }, "sum", nil, nil), + // }, + // { + // in: `sum(rate({ foo !~ "bar" }[1y]))`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 365 * 24 * time.Hour, + // }, + // operation: "rate", + // }, "sum", nil, nil), + // }, + // { + // in: `avg(count_over_time({ foo !~ "bar" }[5h])) by (bar,foo)`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "count_over_time", + // }, "avg", &grouping{ + // without: false, + // groups: []string{"bar", "foo"}, + // }, nil), + // }, + // { + // in: `max without (bar) (count_over_time({ foo !~ "bar" }[5h]))`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "count_over_time", + // }, "max", &grouping{ + // without: true, + // groups: []string{"bar"}, + // }, nil), + // }, + // { + // in: `topk(10,count_over_time({ foo !~ "bar" }[5h])) without (bar)`, + // exp: mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "count_over_time", + // }, "topk", &grouping{ + // without: true, + // groups: []string{"bar"}, + // }, NewStringLabelFilter("10")), + // }, + // { + // in: `bottomk(30 ,sum(rate({ foo !~ "bar" }[5h])) by (foo))`, + // exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "rate", + // }, "sum", &grouping{ + // groups: []string{"foo"}, + // without: false, + // }, nil), "bottomk", nil, + // NewStringLabelFilter("30")), + // }, + // { + // in: `max( sum(count_over_time({ foo !~ "bar" }[5h])) without (foo,bar) ) by (foo)`, + // exp: mustNewVectorAggregationExpr(mustNewVectorAggregationExpr(&rangeAggregationExpr{ + // left: &logRange{ + // left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + // interval: 5 * time.Hour, + // }, + // operation: "count_over_time", + // }, "sum", &grouping{ + // groups: []string{"foo", "bar"}, + // without: true, + // }, nil), "max", &grouping{ + // groups: []string{"foo"}, + // without: false, + // }, nil), + // }, + // { + // in: `unk({ foo !~ "bar" }[5m])`, + // err: ParseError{ + // msg: "syntax error: unexpected IDENTIFIER", + // line: 1, + // col: 1, + // }, + // }, + // { + // in: `rate({ foo !~ "bar" }[5minutes])`, + // err: ParseError{ + // msg: `not a valid duration string: "5minutes"`, + // line: 0, + // col: 22, + // }, + // }, + // { + // in: `rate({ foo !~ "bar" }[5)`, + // err: ParseError{ + // msg: "missing closing ']' in duration", + // line: 0, + // col: 22, + // }, + // }, + // { + // in: `min({ foo !~ "bar" }[5m])`, + // err: ParseError{ + // msg: "syntax error: unexpected RANGE", + // line: 0, + // col: 21, + // }, + // }, + // { + // in: `sum(3 ,count_over_time({ foo !~ "bar" }[5h]))`, + // err: ParseError{ + // msg: "unsupported parameter for operation sum(3,", + // line: 0, + // col: 0, + // }, + // }, + // { + // in: `topk(count_over_time({ foo !~ "bar" }[5h]))`, + // err: ParseError{ + // msg: "parameter required for operation topk", + // line: 0, + // col: 0, + // }, + // }, + // { + // in: `bottomk(he,count_over_time({ foo !~ "bar" }[5h]))`, + // err: ParseError{ + // msg: "syntax error: unexpected IDENTIFIER", + // line: 1, + // col: 9, + // }, + // }, + // { + // in: `bottomk(1.2,count_over_time({ foo !~ "bar" }[5h]))`, + // err: ParseError{ + // msg: "invalid parameter bottomk(1.2,", + // line: 0, + // col: 0, + // }, + // }, + // { + // in: `stddev({ foo !~ "bar" })`, + // err: ParseError{ + // msg: "syntax error: unexpected )", + // line: 1, + // col: 24, + // }, + // }, + // { + // in: `{ foo = "bar", bar != "baz" }`, + // exp: &matchersExpr{matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // mustNewMatcher(labels.MatchNotEqual, "bar", "baz"), + // }}, + // }, + // { + // in: `{foo="bar"} |= "baz"`, + // exp: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{newLineFilterExpr(nil, labels.MatchEqual, "baz")}, + // ), + // }, + // { + // in: `{foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap"`, + // exp: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // }, + // { + // in: `count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, + // exp: newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // }, + // { + // in: `bytes_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])`, + // exp: newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeBytes, nil, nil), + // }, + // { + // in: `sum(count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil), + // }, + // { + // in: `sum(bytes_rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeBytesRate, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil), + // }, + // { + // in: `topk(5,count_over_time(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) without (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "topk", + // &grouping{ + // without: true, + // groups: []string{"foo"}, + // }, + // NewStringLabelFilter("5")), + // }, + // { + // in: `topk(5,sum(rate(({foo="bar"} |= "baz" |~ "blip" != "flip" !~ "flap")[5m])) by (app))`, + // exp: mustNewVectorAggregationExpr( + // mustNewVectorAggregationExpr( + // newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeRate, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"app"}, + // }, + // nil), + // "topk", + // nil, + // NewStringLabelFilter("5")), + // }, + // { + // in: `count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")`, + // exp: newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // }, + // { + // in: `sum(count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil), + // }, + // { + // in: `topk(5,count_over_time({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) without (foo)`, + // exp: mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "topk", + // &grouping{ + // without: true, + // groups: []string{"foo"}, + // }, + // NewStringLabelFilter("5")), + // }, + // { + // in: `topk(5,sum(rate({foo="bar"}[5m] |= "baz" |~ "blip" != "flip" !~ "flap")) by (app))`, + // exp: mustNewVectorAggregationExpr( + // mustNewVectorAggregationExpr( + // newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), + // MultiStageExpr{ + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr( + // newLineFilterExpr(nil, labels.MatchEqual, "baz"), + // labels.MatchRegexp, "blip"), + // labels.MatchNotEqual, "flip"), + // labels.MatchNotRegexp, "flap"), + // }, + // ), + // interval: 5 * time.Minute, + // }, OpRangeTypeRate, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"app"}, + // }, + // nil), + // "topk", + // nil, + // NewStringLabelFilter("5")), + // }, + // { + // in: `{foo="bar}`, + // err: ParseError{ + // msg: "literal not terminated", + // line: 1, + // col: 6, + // }, + // }, + // { + // in: `{foo="bar"`, + // err: ParseError{ + // msg: "syntax error: unexpected $end, expecting } or ,", + // line: 1, + // col: 11, + // }, + // }, - { - in: `{foo="bar"} |~`, - err: ParseError{ - msg: "syntax error: unexpected $end, expecting STRING", - line: 1, - col: 15, - }, - }, + // { + // in: `{foo="bar"} |~`, + // err: ParseError{ + // msg: "syntax error: unexpected $end, expecting STRING", + // line: 1, + // col: 15, + // }, + // }, - { - in: `{foo="bar"} "foo"`, - err: ParseError{ - msg: "syntax error: unexpected STRING", - line: 1, - col: 13, - }, - }, - { - in: `{foo="bar"} foo`, - err: ParseError{ - msg: "syntax error: unexpected IDENTIFIER", - line: 1, - col: 13, - }, - }, - { - // require left associativity - in: ` - sum(count_over_time({foo="bar"}[5m])) by (foo) / - sum(count_over_time({foo="bar"}[5m])) by (foo) / - sum(count_over_time({foo="bar"}[5m])) by (foo) - `, - exp: mustNewBinOpExpr( - OpTypeDiv, - BinOpOptions{}, - mustNewBinOpExpr( - OpTypeDiv, - BinOpOptions{}, - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - }, - { - in: ` - sum(count_over_time({foo="bar"}[5m])) by (foo) ^ - sum(count_over_time({foo="bar"}[5m])) by (foo) / - sum(count_over_time({foo="bar"}[5m])) by (foo) - `, - exp: mustNewBinOpExpr( - OpTypeDiv, - BinOpOptions{}, - mustNewBinOpExpr( - OpTypePow, - BinOpOptions{}, - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - }, - { - // operator precedence before left associativity - in: ` - sum(count_over_time({foo="bar"}[5m])) by (foo) + - sum(count_over_time({foo="bar"}[5m])) by (foo) / - sum(count_over_time({foo="bar"}[5m])) by (foo) - `, - exp: mustNewBinOpExpr( - OpTypeAdd, - BinOpOptions{}, - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - mustNewBinOpExpr( - OpTypeDiv, - BinOpOptions{}, - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - mustNewVectorAggregationExpr(newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - ), - ), - }, - { - in: `sum by (job) ( - count_over_time({namespace="tns"} |= "level=error"[5m]) - / - count_over_time({namespace="tns"}[5m]) - )`, - exp: mustNewVectorAggregationExpr( - mustNewBinOpExpr(OpTypeDiv, - BinOpOptions{}, - newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - }), - MultiPipelineExpr{ - newLineFilterExpr(nil, labels.MatchEqual, "level=error"), - }), - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), - }, - { - in: `sum by (job) ( - count_over_time({namespace="tns"} |= "level=error"[5m]) - / - count_over_time({namespace="tns"}[5m]) - ) * 100`, - exp: mustNewBinOpExpr(OpTypeMul, BinOpOptions{}, mustNewVectorAggregationExpr( - mustNewBinOpExpr(OpTypeDiv, - BinOpOptions{}, - newRangeAggregationExpr( - &logRange{ - left: newPipelineExpr( - newMatcherExpr([]*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - }), - MultiPipelineExpr{ - newLineFilterExpr(nil, labels.MatchEqual, "level=error"), - }), - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "namespace", "tns"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), - mustNewLiteralExpr("100", false), - ), - }, - { - // reduces binop with two literalExprs - in: `sum(count_over_time({foo="bar"}[5m])) by (foo) + 1 / 2`, - exp: mustNewBinOpExpr( - OpTypeAdd, - BinOpOptions{}, - mustNewVectorAggregationExpr( - newRangeAggregationExpr( - &logRange{ - left: &matchersExpr{ - matchers: []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - }, - }, - interval: 5 * time.Minute, - }, OpRangeTypeCount, nil, nil), - "sum", - &grouping{ - without: false, - groups: []string{"foo"}, - }, - nil, - ), - &literalExpr{value: 0.5}, - ), - }, - { - // test signs - in: `1 + -2 / 1`, - exp: mustNewBinOpExpr( - OpTypeAdd, - BinOpOptions{}, - &literalExpr{value: 1}, - mustNewBinOpExpr(OpTypeDiv, BinOpOptions{}, &literalExpr{value: -2}, &literalExpr{value: 1}), - ), - }, - { - // test signs/ops with equal associativity - in: `1 + 1 - -1`, - exp: mustNewBinOpExpr( - OpTypeSub, - BinOpOptions{}, - mustNewBinOpExpr(OpTypeAdd, BinOpOptions{}, &literalExpr{value: 1}, &literalExpr{value: 1}), - &literalExpr{value: -1}, - ), - }, + // { + // in: `{foo="bar"} "foo"`, + // err: ParseError{ + // msg: "syntax error: unexpected STRING", + // line: 1, + // col: 13, + // }, + // }, + // { + // in: `{foo="bar"} foo`, + // err: ParseError{ + // msg: "syntax error: unexpected IDENTIFIER", + // line: 1, + // col: 13, + // }, + // }, + // { + // // require left associativity + // in: ` + // sum(count_over_time({foo="bar"}[5m])) by (foo) / + // sum(count_over_time({foo="bar"}[5m])) by (foo) / + // sum(count_over_time({foo="bar"}[5m])) by (foo) + // `, + // exp: mustNewBinOpExpr( + // OpTypeDiv, + // BinOpOptions{}, + // mustNewBinOpExpr( + // OpTypeDiv, + // BinOpOptions{}, + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // }, + // { + // in: ` + // sum(count_over_time({foo="bar"}[5m])) by (foo) ^ + // sum(count_over_time({foo="bar"}[5m])) by (foo) / + // sum(count_over_time({foo="bar"}[5m])) by (foo) + // `, + // exp: mustNewBinOpExpr( + // OpTypeDiv, + // BinOpOptions{}, + // mustNewBinOpExpr( + // OpTypePow, + // BinOpOptions{}, + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // }, + // { + // // operator precedence before left associativity + // in: ` + // sum(count_over_time({foo="bar"}[5m])) by (foo) + + // sum(count_over_time({foo="bar"}[5m])) by (foo) / + // sum(count_over_time({foo="bar"}[5m])) by (foo) + // `, + // exp: mustNewBinOpExpr( + // OpTypeAdd, + // BinOpOptions{}, + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // mustNewBinOpExpr( + // OpTypeDiv, + // BinOpOptions{}, + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // mustNewVectorAggregationExpr(newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // ), + // ), + // }, + // { + // in: `sum by (job) ( + // count_over_time({namespace="tns"} |= "level=error"[5m]) + // / + // count_over_time({namespace="tns"}[5m]) + // )`, + // exp: mustNewVectorAggregationExpr( + // mustNewBinOpExpr(OpTypeDiv, + // BinOpOptions{}, + // newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + // }), + // MultiStageExpr{ + // newLineFilterExpr(nil, labels.MatchEqual, "level=error"), + // }), + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), + // }, + // { + // in: `sum by (job) ( + // count_over_time({namespace="tns"} |= "level=error"[5m]) + // / + // count_over_time({namespace="tns"}[5m]) + // ) * 100`, + // exp: mustNewBinOpExpr(OpTypeMul, BinOpOptions{}, mustNewVectorAggregationExpr( + // mustNewBinOpExpr(OpTypeDiv, + // BinOpOptions{}, + // newRangeAggregationExpr( + // &logRange{ + // left: newPipelineExpr( + // newMatcherExpr([]*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + // }), + // MultiStageExpr{ + // newLineFilterExpr(nil, labels.MatchEqual, "level=error"), + // }), + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "namespace", "tns"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil)), OpTypeSum, &grouping{groups: []string{"job"}}, nil), + // mustNewLiteralExpr("100", false), + // ), + // }, + // { + // // reduces binop with two literalExprs + // in: `sum(count_over_time({foo="bar"}[5m])) by (foo) + 1 / 2`, + // exp: mustNewBinOpExpr( + // OpTypeAdd, + // BinOpOptions{}, + // mustNewVectorAggregationExpr( + // newRangeAggregationExpr( + // &logRange{ + // left: &matchersExpr{ + // matchers: []*labels.Matcher{ + // mustNewMatcher(labels.MatchEqual, "foo", "bar"), + // }, + // }, + // interval: 5 * time.Minute, + // }, OpRangeTypeCount, nil, nil), + // "sum", + // &grouping{ + // without: false, + // groups: []string{"foo"}, + // }, + // nil, + // ), + // &literalExpr{value: 0.5}, + // ), + // }, + // { + // // test signs + // in: `1 + -2 / 1`, + // exp: mustNewBinOpExpr( + // OpTypeAdd, + // BinOpOptions{}, + // &literalExpr{value: 1}, + // mustNewBinOpExpr(OpTypeDiv, BinOpOptions{}, &literalExpr{value: -2}, &literalExpr{value: 1}), + // ), + // }, + // { + // // test signs/ops with equal associativity + // in: `1 + 1 - -1`, + // exp: mustNewBinOpExpr( + // OpTypeSub, + // BinOpOptions{}, + // mustNewBinOpExpr(OpTypeAdd, BinOpOptions{}, &literalExpr{value: 1}, &literalExpr{value: 1}), + // &literalExpr{value: -1}, + // ), + // }, { in: `{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200)`, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, @@ -948,16 +948,16 @@ func TestParse(t *testing.T) { in: `{app="foo"} |= "bar" | json | (duration > 1s or status!= 200) and method!="POST"`, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewAnd( - labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThan, "duration", 1*time.Second), - labelfilter.NewNumeric(labelfilter.FilterNotEqual, "status", 200.0), + LabelFilterer: log.NewAndLabelFilter( + log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThan, "duration", 1*time.Second), + log.NewNumericLabelFilter(log.LabelFilterNotEqual, "status", 200.0), ), - labelfilter.NewString(mustNewMatcher(labels.MatchNotEqual, "method", "POST")), + log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotEqual, "method", "POST")), ), }, }, @@ -967,16 +967,16 @@ func TestParse(t *testing.T) { in: `{app="foo"} |= "bar" | json | ( status_code < 500 and status_code > 200) or latency >= 250ms `, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), ), }, }, @@ -986,16 +986,16 @@ func TestParse(t *testing.T) { in: `{app="foo"} |= "bar" | json | ( status_code < 500 or status_code > 200) and latency >= 250ms `, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewAnd( - labelfilter.NewOr( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewAndLabelFilter( + log.NewOrLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), ), }, }, @@ -1005,15 +1005,15 @@ func TestParse(t *testing.T) { in: `{app="foo"} |= "bar" | json | status_code < 500 or status_code > 200 and latency >= 250ms `, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + LabelFilterer: log.NewOrLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), ), ), }, @@ -1025,26 +1025,26 @@ func TestParse(t *testing.T) { | foo="bar" buzz!="blip", blop=~"boop" or fuzz==5`, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, &labelFilterExpr{ - Filterer: labelfilter.NewAnd( - labelfilter.NewString(mustNewMatcher(labels.MatchEqual, "foo", "bar")), - labelfilter.NewAnd( - labelfilter.NewString(mustNewMatcher(labels.MatchNotEqual, "buzz", "blip")), - labelfilter.NewOr( - labelfilter.NewString(mustNewMatcher(labels.MatchRegexp, "blop", "boop")), - labelfilter.NewNumeric(labelfilter.FilterEqual, "fuzz", 5), + LabelFilterer: log.NewAndLabelFilter( + log.NewStringLabelFilter(mustNewMatcher(labels.MatchEqual, "foo", "bar")), + log.NewAndLabelFilter( + log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotEqual, "buzz", "blip")), + log.NewOrLabelFilter( + log.NewStringLabelFilter(mustNewMatcher(labels.MatchRegexp, "blop", "boop")), + log.NewNumericLabelFilter(log.LabelFilterEqual, "fuzz", 5), ), ), ), @@ -1056,7 +1056,7 @@ func TestParse(t *testing.T) { in: `{app="foo"} |= "bar" | line_format "blip{{ .foo }}blop"`, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLineFmtExpr("blip{{ .foo }}blop"), }, @@ -1067,15 +1067,15 @@ func TestParse(t *testing.T) { | line_format "blip{{ .foo }}blop {{.status_code}}"`, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, @@ -1088,22 +1088,22 @@ func TestParse(t *testing.T) { | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}"`, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, @@ -1114,22 +1114,22 @@ func TestParse(t *testing.T) { exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, @@ -1154,15 +1154,15 @@ func TestParse(t *testing.T) { in: `{app="foo"} |= "bar" | json | status_code < 500 or status_code > 200 and size >= 2.5KiB `, exp: &pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), - labelfilter.NewBytes(labelfilter.FilterGreaterThanOrEqual, "size", 2560), + LabelFilterer: log.NewOrLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), + log.NewBytesLabelFilter(log.LabelFilterGreaterThanOrEqual, "size", 2560), ), ), }, @@ -1175,22 +1175,22 @@ func TestParse(t *testing.T) { exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, @@ -1204,22 +1204,22 @@ func TestParse(t *testing.T) { exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, @@ -1233,13 +1233,13 @@ func TestParse(t *testing.T) { exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterGreaterThanOrEqual, "foo", 5), - labelfilter.NewDuration(labelfilter.FilterLesserThan, "bar", 25*time.Millisecond), + LabelFilterer: log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterGreaterThanOrEqual, "foo", 5), + log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond), ), }, }, @@ -1254,13 +1254,13 @@ func TestParse(t *testing.T) { exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "namespace", Value: "tns"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "level=error"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterEqual, "foo", 5), - labelfilter.NewDuration(labelfilter.FilterLesserThan, "bar", 25*time.Millisecond), + LabelFilterer: log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterEqual, "foo", 5), + log.NewDurationLabelFilter(log.LabelFilterLesserThan, "bar", 25*time.Millisecond), ), }, }, @@ -1275,7 +1275,7 @@ func TestParse(t *testing.T) { exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), }, }, @@ -1300,22 +1300,22 @@ func TestParse(t *testing.T) { exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, @@ -1330,28 +1330,28 @@ func TestParse(t *testing.T) { exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, 5*time.Minute, newUnwrapExpr("foo", "")), - OpRangeTypeQuantile, nil, newString("0.99998"), + OpRangeTypeQuantile, nil, NewStringLabelFilter("0.99998"), ), }, { @@ -1360,28 +1360,28 @@ func TestParse(t *testing.T) { exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, 5*time.Minute, newUnwrapExpr("foo", "")), - OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"), ), }, { @@ -1394,28 +1394,28 @@ func TestParse(t *testing.T) { newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, 5*time.Minute, newUnwrapExpr("foo", "")), - OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"), ), OpTypeSum, &grouping{without: true, groups: []string{"foo"}}, @@ -1432,28 +1432,28 @@ func TestParse(t *testing.T) { newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, 5*time.Minute, newUnwrapExpr("foo", OpConvDuration)), - OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"), ), OpTypeSum, &grouping{without: true, groups: []string{"foo"}}, @@ -1470,28 +1470,28 @@ func TestParse(t *testing.T) { newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, 5*time.Minute, newUnwrapExpr("foo", OpConvDuration)), - OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString(".99998"), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter(".99998"), ), OpTypeSum, &grouping{without: true, groups: []string{"foo"}}, @@ -1508,28 +1508,28 @@ func TestParse(t *testing.T) { newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, 5*time.Minute, newUnwrapExpr("foo", OpConvDurationSeconds)), - OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString(".99998"), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter(".99998"), ), OpTypeSum, &grouping{without: true, groups: []string{"foo"}}, @@ -1546,32 +1546,32 @@ func TestParse(t *testing.T) { newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, 5*time.Minute, newUnwrapExpr("foo", "")), - OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"), ), OpTypeTopK, nil, - newString("10"), + NewStringLabelFilter("10"), ), }, { @@ -1593,28 +1593,28 @@ func TestParse(t *testing.T) { newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, 5*time.Minute, newUnwrapExpr("foo", "")), - OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, newString("0.99998"), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"), ), OpTypeSum, &grouping{groups: []string{"foo", "bar"}}, @@ -1624,22 +1624,22 @@ func TestParse(t *testing.T) { newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), - pipeline: MultiPipelineExpr{ + pipeline: MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "bar"), newLabelParserExpr(OpParserTypeJSON, ""), &labelFilterExpr{ - Filterer: labelfilter.NewOr( - labelfilter.NewDuration(labelfilter.FilterGreaterThanOrEqual, "latency", 250*time.Millisecond), - labelfilter.NewAnd( - labelfilter.NewNumeric(labelfilter.FilterLesserThan, "status_code", 500.0), - labelfilter.NewNumeric(labelfilter.FilterGreaterThan, "status_code", 200.0), + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), ), ), }, newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), - newLabelFmtExpr([]labelFmt{ - newRenameLabelFmt("foo", "bar"), - newTemplateLabelFmt("status_code", "buzz{{.bar}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), }), }, }, @@ -1932,7 +1932,7 @@ func TestIsParseError(t *testing.T) { } func Test_PipelineCombined(t *testing.T) { - query := `{job="cortex-ops/query-frontend"} |= "logging.go" | logfmt | line_format "{{.msg}}" | regexp "(?P\\w+) (?P[\\w|/]+) \\((?P\\d+?)\\) (?P.*)" | (duration > 1s or status!=200) and method!="POST" | line_format "{{.duration}}|{{.method}}|{{.status}}"` + query := `{job="cortex-ops/query-frontend"} |= "logging.go" | logfmt | line_format "{{.msg}}" | regexp "(?P\\w+) (?P[\\w|/]+) \\((?P\\d+?)\\) (?P.*)" | (duration > 1s or status!=200) and method="POST" | line_format "{{.duration}}|{{.method}}|{{.status}}"` expr, err := ParseLogSelector(query) require.Nil(t, err) @@ -1941,7 +1941,7 @@ func Test_PipelineCombined(t *testing.T) { require.Nil(t, err) _, lbs, ok := p.Process([]byte(`level=debug ts=2020-10-02T10:10:42.092268913Z caller=logging.go:66 traceID=a9d4d8a928d8db1 msg="POST /api/prom/api/v1/query_range (200) 1.5s"`), labels.Labels{}) - require.False(t, ok) + require.True(t, ok) require.Equal( t, labels.Labels{labels.Label{Name: "caller", Value: "logging.go:66"}, labels.Label{Name: "duration", Value: "1.5s"}, labels.Label{Name: "level", Value: "debug"}, labels.Label{Name: "method", Value: "POST"}, labels.Label{Name: "msg", Value: "POST /api/prom/api/v1/query_range (200) 1.5s"}, labels.Label{Name: "path", Value: "/api/prom/api/v1/query_range"}, labels.Label{Name: "status", Value: "200"}, labels.Label{Name: "traceID", Value: "a9d4d8a928d8db1"}, labels.Label{Name: "ts", Value: "2020-10-02T10:10:42.092268913Z"}}, diff --git a/pkg/logql/shardmapper_test.go b/pkg/logql/shardmapper_test.go index 745b63300c6dd..454fd71cc67cd 100644 --- a/pkg/logql/shardmapper_test.go +++ b/pkg/logql/shardmapper_test.go @@ -238,7 +238,7 @@ func TestMapping(t *testing.T) { }, LogSelectorExpr: newPipelineExpr( newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ + MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "error"), }, ), @@ -251,7 +251,7 @@ func TestMapping(t *testing.T) { }, LogSelectorExpr: newPipelineExpr( newMatcherExpr([]*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}), - MultiPipelineExpr{ + MultiStageExpr{ newLineFilterExpr(nil, labels.MatchEqual, "error"), }, ), diff --git a/pkg/logql/test_utils.go b/pkg/logql/test_utils.go index 50d6eebc39dd1..8ac0534af7f65 100644 --- a/pkg/logql/test_utils.go +++ b/pkg/logql/test_utils.go @@ -3,7 +3,7 @@ package logql import ( "context" "fmt" - "log" + logger "log" "time" "github.com/cortexproject/cortex/pkg/querier/astmapper" @@ -12,6 +12,7 @@ import ( "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql/log" ) func NewMockQuerier(shards int, streams []logproto.Stream) MockQuerier { @@ -89,7 +90,7 @@ outer: return iter.NewHeapIterator(ctx, streamIters, req.Direction), nil } -func processStream(in []logproto.Stream, pipeline Pipeline) []logproto.Stream { +func processStream(in []logproto.Stream, pipeline log.Pipeline) []logproto.Stream { resByStream := map[string]*logproto.Stream{} for _, stream := range in { @@ -116,16 +117,12 @@ func processStream(in []logproto.Stream, pipeline Pipeline) []logproto.Stream { return streams } -func processSeries(in []logproto.Stream, pipeline Pipeline, ex SampleExtractor) []logproto.Series { +func processSeries(in []logproto.Stream, ex log.SampleExtractor) []logproto.Series { resBySeries := map[string]*logproto.Series{} for _, stream := range in { for _, e := range stream.Entries { - if l, out, ok := pipeline.Process([]byte(e.Line), mustParseLabels(stream.Labels)); ok { - ok, f, lbs := ex.Extract(l, out) - if !ok { - continue - } + if f, lbs, ok := ex.Process([]byte(e.Line), mustParseLabels(stream.Labels)); ok { var s *logproto.Series var found bool s, found = resBySeries[lbs.String()] @@ -152,10 +149,7 @@ func (q MockQuerier) SelectSamples(ctx context.Context, req SelectSampleParams) if err != nil { return nil, err } - pipeline, err := selector.Pipeline() - if err != nil { - return nil, err - } + expr, err := req.Expr() if err != nil { return nil, err @@ -196,32 +190,7 @@ outer: matched = append(matched, stream) } - // apply the LineFilter - filtered := processSeries(matched, pipeline, extractor) - // for _, s := range matched { - // var samples []logproto.Sample - // for _, entry := range s.Entries { - // // todo(cyriltovena) - // // if filter == nil || filter.Filter([]byte(entry.Line)) { - // v, ok := extractor.Extract([]byte(entry.Line)) - // if !ok { - // continue - // } - // samples = append(samples, logproto.Sample{ - // Timestamp: entry.Timestamp.UnixNano(), - // Value: v, - // Hash: xxhash.Sum64([]byte(entry.Line)), - // }) - // // } - // } - - // if len(samples) > 0 { - // filtered = append(filtered, logproto.Series{ - // Labels: s.Labels, - // Samples: samples, - // }) - // } - // } + filtered := processSeries(matched, extractor) return iter.NewTimeRangedSampleIterator( iter.NewMultiSeriesIterator(ctx, filtered), @@ -298,7 +267,7 @@ func randomStreams(nStreams, nEntries, nShards int, labelNames []string) (stream func mustParseLabels(s string) labels.Labels { labels, err := parser.ParseMetric(s) if err != nil { - log.Fatalf("Failed to parse %s", s) + logger.Fatalf("Failed to parse %s", s) } return labels diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index 4a7920022e424..83462839cf764 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -448,7 +448,6 @@ type sampleBatchIterator struct { ctx context.Context metrics *ChunkMetrics matchers []*labels.Matcher - pipeline logql.Pipeline extractor logql.SampleExtractor } @@ -458,7 +457,6 @@ func newSampleBatchIterator( chunks []*LazyChunk, batchSize int, matchers []*labels.Matcher, - pipeline logql.Pipeline, extractor logql.SampleExtractor, start, end time.Time, ) (iter.SampleIterator, error) { @@ -469,7 +467,6 @@ func newSampleBatchIterator( samplebatch := &sampleBatchIterator{ matchers: matchers, - pipeline: pipeline, extractor: extractor, metrics: metrics, ctx: ctx, @@ -523,7 +520,7 @@ func (it *sampleBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, thro if !chks[i][j].IsValid { continue } - iterator, err := chks[i][j].SampleIterator(it.ctx, from, through, it.pipeline, it.extractor, nextChunk) + iterator, err := chks[i][j].SampleIterator(it.ctx, from, through, it.extractor, nextChunk) if err != nil { return nil, err } diff --git a/pkg/storage/batch_test.go b/pkg/storage/batch_test.go index 485b981fd89e1..8b3c39b2db4b4 100644 --- a/pkg/storage/batch_test.go +++ b/pkg/storage/batch_test.go @@ -1241,7 +1241,7 @@ func Test_newSampleBatchChunkIterator(t *testing.T) { for name, tt := range tests { tt := tt t.Run(name, func(t *testing.T) { - it, err := newSampleBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), logql.NoopPipeline, logql.ExtractCount, tt.start, tt.end) + it, err := newSampleBatchIterator(context.Background(), NilMetrics, tt.chunks, tt.batchSize, newMatchers(tt.matchers), logql.ExtractCount, tt.start, tt.end) require.NoError(t, err) series, _, err := iter.ReadSampleBatch(it, 1000) _ = it.Close() diff --git a/pkg/storage/lazy_chunk.go b/pkg/storage/lazy_chunk.go index 6683f46f1f81b..67cc91b02c4b5 100644 --- a/pkg/storage/lazy_chunk.go +++ b/pkg/storage/lazy_chunk.go @@ -95,7 +95,6 @@ func (c *LazyChunk) Iterator( func (c *LazyChunk) SampleIterator( ctx context.Context, from, through time.Time, - pipeline logql.Pipeline, extractor logql.SampleExtractor, nextChunk *LazyChunk, ) (iter.SampleIterator, error) { @@ -123,7 +122,7 @@ func (c *LazyChunk) SampleIterator( // if the block is overlapping cache it with the next chunk boundaries. if nextChunk != nil && IsBlockOverlapping(b, nextChunk, logproto.FORWARD) { // todo(cyriltovena) we can avoid to drop the metric name for each chunks since many chunks have the same metric/labelset. - it := newCachedSampleIterator(b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), pipeline, extractor), b.Entries()) + it := newCachedSampleIterator(b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), extractor), b.Entries()) its = append(its, it) if c.overlappingSampleBlocks == nil { c.overlappingSampleBlocks = make(map[int]*cachedSampleIterator) @@ -135,7 +134,7 @@ func (c *LazyChunk) SampleIterator( delete(c.overlappingSampleBlocks, b.Offset()) } // non-overlapping block with the next chunk are not cached. - its = append(its, b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), pipeline, extractor)) + its = append(its, b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), extractor)) } // build the final iterator bound to the requested time range. diff --git a/pkg/storage/lazy_chunk_test.go b/pkg/storage/lazy_chunk_test.go index 1ec863bbc3a42..98b64c6417a03 100644 --- a/pkg/storage/lazy_chunk_test.go +++ b/pkg/storage/lazy_chunk_test.go @@ -177,7 +177,7 @@ func (f fakeBlock) MaxTime() int64 { return f.maxt } func (fakeBlock) Iterator(context.Context, labels.Labels, logql.Pipeline) iter.EntryIterator { return nil } -func (fakeBlock) SampleIterator(context.Context, labels.Labels, logql.Pipeline, logql.SampleExtractor) iter.SampleIterator { +func (fakeBlock) SampleIterator(context.Context, labels.Labels, logql.SampleExtractor) iter.SampleIterator { return nil } diff --git a/pkg/storage/store.go b/pkg/storage/store.go index 72a07701ce5f2..e5b500ad6ec5c 100644 --- a/pkg/storage/store.go +++ b/pkg/storage/store.go @@ -306,11 +306,6 @@ func (s *store) SelectSamples(ctx context.Context, req logql.SelectSampleParams) return nil, err } - pipeline, err := expr.Selector().Pipeline() - if err != nil { - return nil, err - } - extractor, err := expr.Extractor() if err != nil { return nil, err @@ -324,7 +319,7 @@ func (s *store) SelectSamples(ctx context.Context, req logql.SelectSampleParams) if len(lazyChunks) == 0 { return iter.NoopIterator, nil } - return newSampleBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, pipeline, extractor, req.Start, req.End) + return newSampleBatchIterator(ctx, s.chunkMetrics, lazyChunks, s.cfg.MaxChunkBatchSize, matchers, extractor, req.Start, req.End) } func (s *store) GetSchemaConfigs() []chunk.PeriodConfig { From 1af9c14c8e14d5b5b6a2e3afffd491c5d419fb97 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 14 Oct 2020 23:27:44 +0200 Subject: [PATCH 35/45] Add error handling + fixes groupins and post filtering. Signed-off-by: Cyril Tovena --- pkg/logql/engine.go | 10 ++- pkg/logql/evaluator.go | 20 +++++- pkg/logql/functions.go | 10 ++- pkg/logql/log/error.go | 2 +- pkg/logql/log/fmt.go | 2 +- pkg/logql/log/fmt_test.go | 2 +- pkg/logql/log/label_filter.go | 12 ++-- pkg/logql/log/label_filter_test.go | 98 ++++++++++++++++++++++++++++- pkg/logql/log/labels.go | 4 +- pkg/logql/log/metrics_extraction.go | 3 +- pkg/logql/log/parser_test.go | 4 +- pkg/logql/parser_test.go | 30 +++++++++ 12 files changed, 173 insertions(+), 24 deletions(-) diff --git a/pkg/logql/engine.go b/pkg/logql/engine.go index 70547362faa0f..b5e063300f6f5 100644 --- a/pkg/logql/engine.go +++ b/pkg/logql/engine.go @@ -204,7 +204,9 @@ func (q *query) evalSample(ctx context.Context, expr SampleExpr) (parser.Value, seriesIndex := map[uint64]*promql.Series{} next, ts, vec := stepEvaluator.Next() - + if stepEvaluator.Error() != nil { + return nil, stepEvaluator.Error() + } if GetRangeType(q.params) == InstantType { sort.Slice(vec, func(i, j int) bool { return labels.Compare(vec[i].Metric, vec[j].Metric) < 0 }) return vec, nil @@ -237,6 +239,9 @@ func (q *query) evalSample(ctx context.Context, expr SampleExpr) (parser.Value, }) } next, ts, vec = stepEvaluator.Next() + if stepEvaluator.Error() != nil { + return nil, stepEvaluator.Error() + } } series := make([]promql.Series, 0, len(seriesIndex)) @@ -246,8 +251,7 @@ func (q *query) evalSample(ctx context.Context, expr SampleExpr) (parser.Value, result := promql.Matrix(series) sort.Sort(result) - err = stepEvaluator.Error() - return result, err + return result, stepEvaluator.Error() } func (q *query) evalLiteral(_ context.Context, expr *literalExpr) (parser.Value, error) { diff --git a/pkg/logql/evaluator.go b/pkg/logql/evaluator.go index 3b91c3bc70b4a..2d7e8eb8ba999 100644 --- a/pkg/logql/evaluator.go +++ b/pkg/logql/evaluator.go @@ -14,6 +14,7 @@ import ( "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" + "github.com/grafana/loki/pkg/logql/log" ) type QueryRangeType string @@ -383,7 +384,7 @@ func rangeAggEvaluator( if err != nil { return nil, err } - return rangeVectorEvaluator{ + return &rangeVectorEvaluator{ iter: newRangeVectorIterator( it, expr.left.interval.Nanoseconds(), @@ -397,20 +398,33 @@ func rangeAggEvaluator( type rangeVectorEvaluator struct { agg RangeVectorAggregator iter RangeVectorIterator + + err error } -func (r rangeVectorEvaluator) Next() (bool, int64, promql.Vector) { +func (r *rangeVectorEvaluator) Next() (bool, int64, promql.Vector) { next := r.iter.Next() if !next { return false, 0, promql.Vector{} } ts, vec := r.iter.At(r.agg) + for _, s := range vec { + if s.Metric.Has(log.ErrorLabel) { + r.err = errors.Errorf(s.Metric.Get(log.ErrorLabel)) + return false, 0, promql.Vector{} + } + } return true, ts, vec } func (r rangeVectorEvaluator) Close() error { return r.iter.Close() } -func (r rangeVectorEvaluator) Error() error { return r.iter.Error() } +func (r rangeVectorEvaluator) Error() error { + if r.err != nil { + return r.err + } + return r.iter.Error() +} // binOpExpr explicitly does not handle when both legs are literals as // it makes the type system simpler and these are reduced in mustNewBinOpExpr diff --git a/pkg/logql/functions.go b/pkg/logql/functions.go index d70b1a2c94356..27a87738bf2b6 100644 --- a/pkg/logql/functions.go +++ b/pkg/logql/functions.go @@ -36,7 +36,15 @@ func (r rangeAggregationExpr) Extractor() (log.SampleExtractor, error) { default: convOp = log.ConvertFloat } - return stages.WithLabelExtractor(r.left.unwrap.identifier, convOp, groups, without, log.ReduceAndLabelFilter(r.left.unwrap.postFilters)) + if r.grouping != nil { + groups = r.grouping.groups + without = r.grouping.without + } + return stages.WithLabelExtractor( + r.left.unwrap.identifier, + convOp, groups, without, + log.ReduceAndLabelFilter(r.left.unwrap.postFilters), + ) } // otherwise we extract metrics from the log line. switch r.operation { diff --git a/pkg/logql/log/error.go b/pkg/logql/log/error.go index 44a784009f86b..c3754211e805f 100644 --- a/pkg/logql/log/error.go +++ b/pkg/logql/log/error.go @@ -6,5 +6,5 @@ var ( errSampleExtraction = "SampleExtraction" errLabelFilter = "LabelFilter" - errorLabel = "__error__" + ErrorLabel = "__error__" ) diff --git a/pkg/logql/log/fmt.go b/pkg/logql/log/fmt.go index 689f1cc1dec08..112734109012c 100644 --- a/pkg/logql/log/fmt.go +++ b/pkg/logql/log/fmt.go @@ -118,7 +118,7 @@ func validate(fmts []LabelFmt) error { // To avoid confusion we allow to have a label name only once per stage. uniqueLabelName := map[string]struct{}{} for _, f := range fmts { - if f.Name == errorLabel { + if f.Name == ErrorLabel { return fmt.Errorf("%s cannot be formatted", f.Name) } if _, ok := uniqueLabelName[f.Name]; ok { diff --git a/pkg/logql/log/fmt_test.go b/pkg/logql/log/fmt_test.go index bd87285aff6dc..ff04148a58ac4 100644 --- a/pkg/logql/log/fmt_test.go +++ b/pkg/logql/log/fmt_test.go @@ -111,7 +111,7 @@ func Test_validate(t *testing.T) { }{ {"no dup", []LabelFmt{NewRenameLabelFmt("foo", "bar"), NewRenameLabelFmt("bar", "foo")}, false}, {"dup", []LabelFmt{NewRenameLabelFmt("foo", "bar"), NewRenameLabelFmt("foo", "blip")}, true}, - {"no error", []LabelFmt{NewRenameLabelFmt(errorLabel, "bar")}, true}, + {"no error", []LabelFmt{NewRenameLabelFmt(ErrorLabel, "bar")}, true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/pkg/logql/log/label_filter.go b/pkg/logql/log/label_filter.go index 13b71d781c3a6..e67f27ecd7cc0 100644 --- a/pkg/logql/log/label_filter.go +++ b/pkg/logql/log/label_filter.go @@ -113,8 +113,11 @@ func ReduceAndLabelFilter(filters []LabelFilterer) LabelFilterer { if len(filters) == 0 { return NoopLabelFilter } + if len(filters) == 1 { + return filters[0] + } result := filters[0] - for _, f := range filters[0:] { + for _, f := range filters[1:] { result = NewAndLabelFilter(result, f) } return result @@ -292,10 +295,5 @@ func NewStringLabelFilter(m *labels.Matcher) *StringLabelFilter { } func (s *StringLabelFilter) Process(line []byte, lbs Labels) ([]byte, bool) { - for k, v := range lbs { - if k == s.Name { - return line, s.Matches(v) - } - } - return line, false + return line, s.Matches(lbs[s.Name]) } diff --git a/pkg/logql/log/label_filter_test.go b/pkg/logql/log/label_filter_test.go index 6bdd5f3c5f1c6..a8b70c385f539 100644 --- a/pkg/logql/log/label_filter_test.go +++ b/pkg/logql/log/label_filter_test.go @@ -1,6 +1,7 @@ package log import ( + "reflect" "testing" "time" @@ -11,7 +12,7 @@ import ( func TestBinary_Filter(t *testing.T) { tests := []struct { - f *BinaryLabelFilter + f LabelFilterer lbs Labels want bool wantLbs Labels @@ -144,6 +145,62 @@ func TestBinary_Filter(t *testing.T) { "method": "POST", }, }, + { + + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotEqual, ErrorLabel, errJSON)), + Labels{ + ErrorLabel: errJSON, + "status": "200", + "method": "POST", + }, + false, + Labels{ + ErrorLabel: errJSON, + "status": "200", + "method": "POST", + }, + }, + { + + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotRegexp, ErrorLabel, ".*")), + Labels{ + ErrorLabel: "foo", + "status": "200", + "method": "POST", + }, + false, + Labels{ + ErrorLabel: "foo", + "status": "200", + "method": "POST", + }, + }, + { + + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotRegexp, ErrorLabel, ".*")), + Labels{ + "status": "200", + "method": "POST", + }, + true, + Labels{ + "status": "200", + "method": "POST", + }, + }, + { + + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotEqual, ErrorLabel, errJSON)), + Labels{ + "status": "200", + "method": "POST", + }, + true, + Labels{ + "status": "200", + "method": "POST", + }, + }, } for _, tt := range tests { t.Run(tt.f.String(), func(t *testing.T) { @@ -153,3 +210,42 @@ func TestBinary_Filter(t *testing.T) { }) } } + +func TestReduceAndLabelFilter(t *testing.T) { + tests := []struct { + name string + filters []LabelFilterer + want LabelFilterer + }{ + {"empty", nil, NoopLabelFilter}, + {"1", []LabelFilterer{NewBytesLabelFilter(LabelFilterEqual, "foo", 5)}, NewBytesLabelFilter(LabelFilterEqual, "foo", 5)}, + {"2", + []LabelFilterer{ + NewBytesLabelFilter(LabelFilterEqual, "foo", 5), + NewBytesLabelFilter(LabelFilterGreaterThanOrEqual, "bar", 6), + }, + NewAndLabelFilter(NewBytesLabelFilter(LabelFilterEqual, "foo", 5), NewBytesLabelFilter(LabelFilterGreaterThanOrEqual, "bar", 6)), + }, + {"3", + []LabelFilterer{ + NewBytesLabelFilter(LabelFilterEqual, "foo", 5), + NewBytesLabelFilter(LabelFilterGreaterThanOrEqual, "bar", 6), + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchEqual, "buzz", "bla")), + }, + NewAndLabelFilter( + NewAndLabelFilter( + NewBytesLabelFilter(LabelFilterEqual, "foo", 5), + NewBytesLabelFilter(LabelFilterGreaterThanOrEqual, "bar", 6), + ), + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchEqual, "buzz", "bla")), + ), + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := ReduceAndLabelFilter(tt.filters); !reflect.DeepEqual(got, tt.want) { + t.Errorf("ReduceAndLabelFilter() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/logql/log/labels.go b/pkg/logql/log/labels.go index b415bf6860dfc..b38dbf3efcecf 100644 --- a/pkg/logql/log/labels.go +++ b/pkg/logql/log/labels.go @@ -8,10 +8,10 @@ func (l Labels) Has(key string) bool { } func (l Labels) SetError(err string) { - l[errorLabel] = err + l[ErrorLabel] = err } func (l Labels) HasError() bool { - _, ok := l[errorLabel] + _, ok := l[ErrorLabel] return ok } diff --git a/pkg/logql/log/metrics_extraction.go b/pkg/logql/log/metrics_extraction.go index 57695eaf5e186..76a12e3fd4f07 100644 --- a/pkg/logql/log/metrics_extraction.go +++ b/pkg/logql/log/metrics_extraction.go @@ -113,8 +113,7 @@ func (l *labelSampleExtractor) Process(line []byte, lbs labels.Labels) (float64, } } // post filters - _, ok = l.postFilter.Process(line, labelmap) - if !ok { + if _, ok = l.postFilter.Process(line, labelmap); !ok { return 0, nil, false } if labelmap.HasError() { diff --git a/pkg/logql/log/parser_test.go b/pkg/logql/log/parser_test.go index ad32b6f2c0bf9..1e82e378d7de4 100644 --- a/pkg/logql/log/parser_test.go +++ b/pkg/logql/log/parser_test.go @@ -55,7 +55,7 @@ func Test_jsonParser_Parse(t *testing.T) { []byte(`{n}`), Labels{}, Labels{ - errorLabel: errJSON, + ErrorLabel: errJSON, }, }, { @@ -186,7 +186,7 @@ func Test_logfmtParser_Parse(t *testing.T) { }, Labels{ "foo": "bar", - errorLabel: errLogfmt, + ErrorLabel: errLogfmt, }, }, { diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index ae212872931c0..9858baaa94aa7 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -1384,6 +1384,36 @@ func TestParse(t *testing.T) { OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"), ), }, + { + in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo | __error__ !~".*"[5m]) by (namespace,instance)`, + exp: newRangeAggregationExpr( + newLogRange(&pipelineExpr{ + left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), + pipeline: MultiStageExpr{ + newLineFilterExpr(nil, labels.MatchEqual, "bar"), + newLabelParserExpr(OpParserTypeJSON, ""), + &labelFilterExpr{ + LabelFilterer: log.NewOrLabelFilter( + log.NewDurationLabelFilter(log.LabelFilterGreaterThanOrEqual, "latency", 250*time.Millisecond), + log.NewAndLabelFilter( + log.NewNumericLabelFilter(log.LabelFilterLesserThan, "status_code", 500.0), + log.NewNumericLabelFilter(log.LabelFilterGreaterThan, "status_code", 200.0), + ), + ), + }, + newLineFmtExpr("blip{{ .foo }}blop {{.status_code}}"), + newLabelFmtExpr([]log.LabelFmt{ + log.NewRenameLabelFmt("foo", "bar"), + log.NewTemplateLabelFmt("status_code", "buzz{{.bar}}"), + }), + }, + }, + 5*time.Minute, + newUnwrapExpr("foo", "").addPostFilter(log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotRegexp, log.ErrorLabel, ".*")))), + OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"), + ), + }, { in: `sum without (foo) ( quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) From 960ef5a78327a946945f68c1bb11bb3cfc51539d Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Wed, 14 Oct 2020 23:57:53 +0200 Subject: [PATCH 36/45] 400 on pipeline errors. Signed-off-by: Cyril Tovena --- pkg/logql/engine.go | 2 +- pkg/logql/evaluator.go | 23 ++++++++++++++++++++++- pkg/logql/log/error.go | 8 ++++---- pkg/util/server/error.go | 2 +- 4 files changed, 28 insertions(+), 7 deletions(-) diff --git a/pkg/logql/engine.go b/pkg/logql/engine.go index b5e063300f6f5..af161d94f47ff 100644 --- a/pkg/logql/engine.go +++ b/pkg/logql/engine.go @@ -146,7 +146,7 @@ func (q *query) Exec(ctx context.Context) (Result, error) { status := "200" if err != nil { status = "500" - if IsParseError(err) { + if IsParseError(err) || IsPipelineError(err) { status = "400" } } diff --git a/pkg/logql/evaluator.go b/pkg/logql/evaluator.go index 2d7e8eb8ba999..09ad8307df5b1 100644 --- a/pkg/logql/evaluator.go +++ b/pkg/logql/evaluator.go @@ -402,6 +402,27 @@ type rangeVectorEvaluator struct { err error } +type pipelineError struct { + metric labels.Labels + errorType string +} + +func newPipelineErr(metric labels.Labels) *pipelineError { + return &pipelineError{ + metric: metric, + errorType: metric.Get(log.ErrorLabel), + } +} + +func (e pipelineError) Error() string { + return fmt.Sprintf("pipeline error: '%s' for series: '%s' use label filters to intentionally skip those errors. (e.g | __error__=\"\")", e.errorType, e.metric) +} + +func IsPipelineError(err error) bool { + _, ok := err.(*pipelineError) + return ok +} + func (r *rangeVectorEvaluator) Next() (bool, int64, promql.Vector) { next := r.iter.Next() if !next { @@ -410,7 +431,7 @@ func (r *rangeVectorEvaluator) Next() (bool, int64, promql.Vector) { ts, vec := r.iter.At(r.agg) for _, s := range vec { if s.Metric.Has(log.ErrorLabel) { - r.err = errors.Errorf(s.Metric.Get(log.ErrorLabel)) + r.err = newPipelineErr(s.Metric) return false, 0, promql.Vector{} } } diff --git a/pkg/logql/log/error.go b/pkg/logql/log/error.go index c3754211e805f..040627bc0f593 100644 --- a/pkg/logql/log/error.go +++ b/pkg/logql/log/error.go @@ -1,10 +1,10 @@ package log var ( - errJSON = "JSONParser" - errLogfmt = "LogfmtParser" - errSampleExtraction = "SampleExtraction" - errLabelFilter = "LabelFilter" + errJSON = "JSONParserErr" + errLogfmt = "LogfmtParserErr" + errSampleExtraction = "SampleExtractionErr" + errLabelFilter = "LabelFilterErr" ErrorLabel = "__error__" ) diff --git a/pkg/util/server/error.go b/pkg/util/server/error.go index 1c528669b2108..1d8487c92eef0 100644 --- a/pkg/util/server/error.go +++ b/pkg/util/server/error.go @@ -30,7 +30,7 @@ func WriteError(err error, w http.ResponseWriter) { http.Error(w, ErrDeadlineExceeded, http.StatusGatewayTimeout) case errors.As(err, &queryErr): http.Error(w, err.Error(), http.StatusBadRequest) - case logql.IsParseError(err): + case logql.IsParseError(err) || logql.IsPipelineError(err): http.Error(w, err.Error(), http.StatusBadRequest) default: if grpcErr, ok := httpgrpc.HTTPResponseFromError(err); ok { From de8346553ad5664ab043e42bf54dd58cd5b82cac Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 15 Oct 2020 14:03:14 +0200 Subject: [PATCH 37/45] Fixes a races in the log pipeline. Signed-off-by: Cyril Tovena --- pkg/logql/log/logfmt/decode.go | 4 ++-- pkg/logql/log/parser.go | 21 +++++++++++---------- pkg/logql/log/parser_test.go | 11 ++++++++++- pkg/logql/log/pipeline.go | 3 +++ pkg/logql/parser_test.go | 5 +++-- 5 files changed, 29 insertions(+), 15 deletions(-) diff --git a/pkg/logql/log/logfmt/decode.go b/pkg/logql/log/logfmt/decode.go index 65cff00d1e8d0..24bc1f4aafa2f 100644 --- a/pkg/logql/log/logfmt/decode.go +++ b/pkg/logql/log/logfmt/decode.go @@ -20,8 +20,8 @@ type Decoder struct { // // The decoder introduces its own buffering and may read data from r beyond // the logfmt records requested. -func NewDecoder() *Decoder { - dec := &Decoder{} +func NewDecoder(line []byte) *Decoder { + dec := &Decoder{line: line} return dec } diff --git a/pkg/logql/log/parser.go b/pkg/logql/log/parser.go index 22b2bf69710de..1650b205ae082 100644 --- a/pkg/logql/log/parser.go +++ b/pkg/logql/log/parser.go @@ -138,23 +138,24 @@ func (r *regexpParser) Process(line []byte, lbs Labels) ([]byte, bool) { return line, true } -type logfmtParser struct { - dec *logfmt.Decoder -} +type logfmtParser struct{} func NewLogfmtParser() *logfmtParser { - return &logfmtParser{ - dec: logfmt.NewDecoder(), - } + return &logfmtParser{} } func (l *logfmtParser) Process(line []byte, lbs Labels) ([]byte, bool) { - l.dec.Reset(line) + // todo(cyriltovena): we should be using the same decoder for the whole query. + // However right now backward queries, because of the batch iterator that has a go loop, + // can run this method in parallel. This causes a race e.g it will reset to a new line while scaning for keyvals. + dec := logfmt.NewDecoder(line) add := addLabel(lbs) - for l.dec.ScanKeyval() { - add(string(l.dec.Key()), string(l.dec.Value())) + for dec.ScanKeyval() { + key := string(dec.Key()) + val := string(dec.Value()) + add(key, val) } - if l.dec.Err() != nil { + if dec.Err() != nil { lbs.SetError(errLogfmt) return line, true } diff --git a/pkg/logql/log/parser_test.go b/pkg/logql/log/parser_test.go index 1e82e378d7de4..18867b0d6fe5d 100644 --- a/pkg/logql/log/parser_test.go +++ b/pkg/logql/log/parser_test.go @@ -7,7 +7,6 @@ import ( ) func Test_jsonParser_Parse(t *testing.T) { - tests := []struct { name string line []byte @@ -260,6 +259,16 @@ func Test_logfmtParser_Parse(t *testing.T) { "test_dash": "foo", }, }, + { + "nil", + nil, + Labels{ + "foo": "bar", + }, + Labels{ + "foo": "bar", + }, + }, } p := NewLogfmtParser() for _, tt := range tests { diff --git a/pkg/logql/log/pipeline.go b/pkg/logql/log/pipeline.go index 75a2cbddc88f9..69e03f253636c 100644 --- a/pkg/logql/log/pipeline.go +++ b/pkg/logql/log/pipeline.go @@ -42,6 +42,9 @@ func (m MultiStage) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labe if len(m) == 0 { return line, lbs, true } + // todo(cyriltovena): this should be deferred within a specific Labels type. + // Not all stages will need to access the labels map (e.g line filter). + // This could optimize queries that uses only those stages. labelmap := lbs.Map() for _, p := range m { line, ok = p.Process(line, labelmap) diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 9858baaa94aa7..5749331faebeb 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -1962,7 +1962,7 @@ func TestIsParseError(t *testing.T) { } func Test_PipelineCombined(t *testing.T) { - query := `{job="cortex-ops/query-frontend"} |= "logging.go" | logfmt | line_format "{{.msg}}" | regexp "(?P\\w+) (?P[\\w|/]+) \\((?P\\d+?)\\) (?P.*)" | (duration > 1s or status!=200) and method="POST" | line_format "{{.duration}}|{{.method}}|{{.status}}"` + query := `{job="cortex-ops/query-frontend"} |= "logging.go" | logfmt | line_format "{{.msg}}" | regexp "(?P\\w+) (?P[\\w|/]+) \\((?P\\d+?)\\) (?P.*)" | (duration > 1s or status==200) and method="POST" | line_format "{{.duration}}|{{.method}}|{{.status}}"` expr, err := ParseLogSelector(query) require.Nil(t, err) @@ -1970,11 +1970,12 @@ func Test_PipelineCombined(t *testing.T) { p, err := expr.Pipeline() require.Nil(t, err) - _, lbs, ok := p.Process([]byte(`level=debug ts=2020-10-02T10:10:42.092268913Z caller=logging.go:66 traceID=a9d4d8a928d8db1 msg="POST /api/prom/api/v1/query_range (200) 1.5s"`), labels.Labels{}) + line, lbs, ok := p.Process([]byte(`level=debug ts=2020-10-02T10:10:42.092268913Z caller=logging.go:66 traceID=a9d4d8a928d8db1 msg="POST /api/prom/api/v1/query_range (200) 1.5s"`), labels.Labels{}) require.True(t, ok) require.Equal( t, labels.Labels{labels.Label{Name: "caller", Value: "logging.go:66"}, labels.Label{Name: "duration", Value: "1.5s"}, labels.Label{Name: "level", Value: "debug"}, labels.Label{Name: "method", Value: "POST"}, labels.Label{Name: "msg", Value: "POST /api/prom/api/v1/query_range (200) 1.5s"}, labels.Label{Name: "path", Value: "/api/prom/api/v1/query_range"}, labels.Label{Name: "status", Value: "200"}, labels.Label{Name: "traceID", Value: "a9d4d8a928d8db1"}, labels.Label{Name: "ts", Value: "2020-10-02T10:10:42.092268913Z"}}, lbs, ) + require.Equal(t, string([]byte(`1.5s|POST|200`)), string(line)) } From 87c4f00b64e83ad426300d9f66f9344dfc34a393 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 15 Oct 2020 17:20:51 +0200 Subject: [PATCH 38/45] Unsure the key is parsable and valid. Signed-off-by: Cyril Tovena --- pkg/logql/log/parser.go | 22 +++++++++++++++++----- pkg/logql/log/parser_test.go | 21 +++++++++++++++++++++ pkg/logql/marshal/labels_test.go | 32 ++++++++++++++++++++++++++++++++ pkg/logql/marshal/query.go | 3 ++- 4 files changed, 72 insertions(+), 6 deletions(-) create mode 100644 pkg/logql/marshal/labels_test.go diff --git a/pkg/logql/log/parser.go b/pkg/logql/log/parser.go index 1650b205ae082..64489efd422de 100644 --- a/pkg/logql/log/parser.go +++ b/pkg/logql/log/parser.go @@ -24,10 +24,6 @@ var ( _ Stage = &logfmtParser{} errMissingCapture = errors.New("at least one named capture must be supplied") - - underscore = "_" - point = "." - dash = "-" ) func addLabel(lbs Labels) func(key, value string) { @@ -38,7 +34,7 @@ func addLabel(lbs Labels) func(key, value string) { return } unique[key] = struct{}{} - key = strings.ReplaceAll(strings.ReplaceAll(key, point, underscore), dash, underscore) + key = sanitizeKey(key) if lbs.Has(key) { key = fmt.Sprintf("%s%s", key, duplicateSuffix) } @@ -46,6 +42,22 @@ func addLabel(lbs Labels) func(key, value string) { } } +func sanitizeKey(key string) string { + if len(key) == 0 { + return key + } + key = strings.TrimSpace(key) + if key[0] >= '0' && key[0] <= '9' { + key = "_" + key + } + return strings.Map(func(r rune) rune { + if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || r == '_' || (r >= '0' && r <= '9') { + return r + } + return '_' + }, key) +} + type jsonParser struct{} func NewJSONParser() *jsonParser { diff --git a/pkg/logql/log/parser_test.go b/pkg/logql/log/parser_test.go index 18867b0d6fe5d..027dadfacc951 100644 --- a/pkg/logql/log/parser_test.go +++ b/pkg/logql/log/parser_test.go @@ -278,3 +278,24 @@ func Test_logfmtParser_Parse(t *testing.T) { }) } } + +func Test_sanitizeKey(t *testing.T) { + tests := []struct { + key string + want string + }{ + {"1", "_1"}, + {"1 1 1", "_1_1_1"}, + {"abc", "abc"}, + {"$a$bc", "_a_bc"}, + {"$a$bc", "_a_bc"}, + {" 1 1 1 \t", "_1_1_1"}, + } + for _, tt := range tests { + t.Run(tt.key, func(t *testing.T) { + if got := sanitizeKey(tt.key); got != tt.want { + t.Errorf("sanitizeKey() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/logql/marshal/labels_test.go b/pkg/logql/marshal/labels_test.go new file mode 100644 index 0000000000000..8095ec3a1a908 --- /dev/null +++ b/pkg/logql/marshal/labels_test.go @@ -0,0 +1,32 @@ +package marshal + +import ( + "reflect" + "testing" + + "github.com/grafana/loki/pkg/loghttp" +) + +func TestNewLabelSet(t *testing.T) { + + tests := []struct { + lbs string + want loghttp.LabelSet + wantErr bool + }{ + {`{1="foo"}`, nil, true}, + {`{_1="foo"}`, loghttp.LabelSet{"_1": "foo"}, false}, + } + for _, tt := range tests { + t.Run(tt.lbs, func(t *testing.T) { + got, err := NewLabelSet(tt.lbs) + if (err != nil) != tt.wantErr { + t.Errorf("NewLabelSet() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("NewLabelSet() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/pkg/logql/marshal/query.go b/pkg/logql/marshal/query.go index 645aef0e0fc39..6f449c7604b7a 100644 --- a/pkg/logql/marshal/query.go +++ b/pkg/logql/marshal/query.go @@ -3,6 +3,7 @@ package marshal import ( "fmt" + "github.com/pkg/errors" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/pkg/labels" "github.com/prometheus/prometheus/promql" @@ -84,7 +85,7 @@ func NewStreams(s logql.Streams) (loghttp.Streams, error) { func NewStream(s logproto.Stream) (loghttp.Stream, error) { labels, err := NewLabelSet(s.Labels) if err != nil { - return loghttp.Stream{}, err + return loghttp.Stream{}, errors.Wrapf(err, "err while creating labelset for %s", s.Labels) } ret := loghttp.Stream{ From 50315b1a2c886cddf6cecfb3f40df855c2484e7f Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 15 Oct 2020 21:36:47 +0200 Subject: [PATCH 39/45] Cleanup and code documentation. Signed-off-by: Cyril Tovena --- pkg/logql/ast.go | 7 ++++-- pkg/logql/evaluator.go | 9 ++++++- pkg/logql/log/error.go | 1 + pkg/logql/log/filter.go | 1 + pkg/logql/log/fmt.go | 33 +++++++++++++++---------- pkg/logql/log/fmt_test.go | 8 +++---- pkg/logql/log/label_filter.go | 14 +++++++++++ pkg/logql/log/label_filter_test.go | 4 ++-- pkg/logql/log/labels.go | 2 ++ pkg/logql/log/logfmt/decode.go | 1 + pkg/logql/log/metrics_extraction.go | 23 ++++++++++++------ pkg/logql/log/parser.go | 37 ++++++++++++++++------------- pkg/logql/log/parser_test.go | 2 +- pkg/logql/log/pipeline.go | 4 ++++ pkg/logql/parser_test.go | 4 ++-- 15 files changed, 103 insertions(+), 47 deletions(-) diff --git a/pkg/logql/ast.go b/pkg/logql/ast.go index 868f6794872b4..9a9ad82833739 100644 --- a/pkg/logql/ast.go +++ b/pkg/logql/ast.go @@ -80,7 +80,7 @@ type LogSelectorExpr interface { Expr } -// type alias for backward compatibility +// Type alias for backward compatibility type Pipeline = log.Pipeline type SampleExtractor = log.SampleExtractor @@ -89,16 +89,19 @@ var ( ExtractCount = log.CountExtractor.ToSampleExtractor() ) +// PipelineExpr is an expression defining a log pipeline. type PipelineExpr interface { Pipeline() (Pipeline, error) Expr } +// StageExpr is an expression defining a single step into a log pipeline type StageExpr interface { Stage() (log.Stage, error) Expr } +// MultiStageExpr is multiple stages which implement a PipelineExpr. type MultiStageExpr []StageExpr func (m MultiStageExpr) Pipeline() (log.Pipeline, error) { @@ -203,6 +206,7 @@ func (e *pipelineExpr) Pipeline() (log.Pipeline, error) { return e.pipeline.Pipeline() } +// HasFilter returns true if the pipeline contains stage that can filter out lines. func (e *pipelineExpr) HasFilter() bool { for _, p := range e.pipeline { switch p.(type) { @@ -299,7 +303,6 @@ type labelParserExpr struct { } func newLabelParserExpr(op, param string) *labelParserExpr { - // todo(cyriltovena): we might want to pre-validate param here to fail fast. return &labelParserExpr{ op: op, param: param, diff --git a/pkg/logql/evaluator.go b/pkg/logql/evaluator.go index 09ad8307df5b1..0f3d803638b19 100644 --- a/pkg/logql/evaluator.go +++ b/pkg/logql/evaluator.go @@ -415,9 +415,15 @@ func newPipelineErr(metric labels.Labels) *pipelineError { } func (e pipelineError) Error() string { - return fmt.Sprintf("pipeline error: '%s' for series: '%s' use label filters to intentionally skip those errors. (e.g | __error__=\"\")", e.errorType, e.metric) + return fmt.Sprintf( + `pipeline error: '%s' for series: '%s'. + Use a label filter to intentionally skip this error. (e.g | __error__!="%s"). + To skip all potential errors you can match empty errors.(e.g __error__="") + The label filter can also be specified after unwrap. (e.g | unwrap latency | __error__="" )`, + e.errorType, e.metric, e.errorType) } +// IsPipelineError tells if the error is generated by a Pipeline. func IsPipelineError(err error) bool { _, ok := err.(*pipelineError) return ok @@ -430,6 +436,7 @@ func (r *rangeVectorEvaluator) Next() (bool, int64, promql.Vector) { } ts, vec := r.iter.At(r.agg) for _, s := range vec { + // Errors are not allowed in metrics. if s.Metric.Has(log.ErrorLabel) { r.err = newPipelineErr(s.Metric) return false, 0, promql.Vector{} diff --git a/pkg/logql/log/error.go b/pkg/logql/log/error.go index 040627bc0f593..04dd20ecdcd15 100644 --- a/pkg/logql/log/error.go +++ b/pkg/logql/log/error.go @@ -1,6 +1,7 @@ package log var ( + // Possible errors thrown by a log pipeline. errJSON = "JSONParserErr" errLogfmt = "LogfmtParserErr" errSampleExtraction = "SampleExtractionErr" diff --git a/pkg/logql/log/filter.go b/pkg/logql/log/filter.go index 79c51233b7db4..4bf0ff1e120b2 100644 --- a/pkg/logql/log/filter.go +++ b/pkg/logql/log/filter.go @@ -175,6 +175,7 @@ func (l containsFilter) String() string { return string(l.match) } +// newContainsFilter creates a contains filter that checks if a log line contains a match. func newContainsFilter(match []byte, caseInsensitive bool) Filterer { if len(match) == 0 { return TrueFilter diff --git a/pkg/logql/log/fmt.go b/pkg/logql/log/fmt.go index 112734109012c..4869e1df019b8 100644 --- a/pkg/logql/log/fmt.go +++ b/pkg/logql/log/fmt.go @@ -9,9 +9,10 @@ import ( ) var ( - _ Stage = &lineFormatter{} - _ Stage = &labelsFormatter{} + _ Stage = &LineFormatter{} + _ Stage = &LabelsFormatter{} + // Available map of functions for the text template engine. functionMap = template.FuncMap{ "ToLower": strings.ToLower, "ToUpper": strings.ToUpper, @@ -33,32 +34,34 @@ var ( } ) -type lineFormatter struct { +type LineFormatter struct { *template.Template buf *bytes.Buffer } -func NewFormatter(tmpl string) (*lineFormatter, error) { +// NewFormatter creates a new log line formatter from a given text template. +func NewFormatter(tmpl string) (*LineFormatter, error) { t, err := template.New("line").Option("missingkey=zero").Funcs(functionMap).Parse(tmpl) if err != nil { return nil, fmt.Errorf("invalid line template: %s", err) } - return &lineFormatter{ + return &LineFormatter{ Template: t, buf: bytes.NewBuffer(make([]byte, 4096)), }, nil } -func (lf *lineFormatter) Process(_ []byte, lbs Labels) ([]byte, bool) { +func (lf *LineFormatter) Process(_ []byte, lbs Labels) ([]byte, bool) { lf.buf.Reset() - // todo(cyriltovena) handle error + // todo(cyriltovena): handle error _ = lf.Template.Execute(lf.buf, lbs) - // todo we might want to reuse the input line. + // todo(cyriltovena): we might want to reuse the input line or a bytes buffer. res := make([]byte, len(lf.buf.Bytes())) copy(res, lf.buf.Bytes()) return res, true } +// LabelFmt is a configuration struct for formatting a label. type LabelFmt struct { Name string Value string @@ -66,6 +69,7 @@ type LabelFmt struct { Rename bool } +// NewRenameLabelFmt creates a configuration to rename a label. func NewRenameLabelFmt(dst, target string) LabelFmt { return LabelFmt{ Name: dst, @@ -73,6 +77,8 @@ func NewRenameLabelFmt(dst, target string) LabelFmt { Value: target, } } + +// NewTemplateLabelFmt creates a configuration to format a label using text template. func NewTemplateLabelFmt(dst, template string) LabelFmt { return LabelFmt{ Name: dst, @@ -86,12 +92,15 @@ type labelFormatter struct { LabelFmt } -type labelsFormatter struct { +type LabelsFormatter struct { formats []labelFormatter buf *bytes.Buffer } -func NewLabelsFormatter(fmts []LabelFmt) (*labelsFormatter, error) { +// NewLabelsFormatter creates a new formatter that can format multiple labels at once. +// Either by renaming or using text template. +// It is not allowed to reformat the same label twice within the same formatter. +func NewLabelsFormatter(fmts []LabelFmt) (*LabelsFormatter, error) { if err := validate(fmts); err != nil { return nil, err } @@ -107,7 +116,7 @@ func NewLabelsFormatter(fmts []LabelFmt) (*labelsFormatter, error) { } formats = append(formats, toAdd) } - return &labelsFormatter{ + return &LabelsFormatter{ formats: formats, buf: bytes.NewBuffer(make([]byte, 1024)), }, nil @@ -129,7 +138,7 @@ func validate(fmts []LabelFmt) error { return nil } -func (lf *labelsFormatter) Process(l []byte, lbs Labels) ([]byte, bool) { +func (lf *LabelsFormatter) Process(l []byte, lbs Labels) ([]byte, bool) { for _, f := range lf.formats { if f.Rename { lbs[f.Name] = lbs[f.Value] diff --git a/pkg/logql/log/fmt_test.go b/pkg/logql/log/fmt_test.go index ff04148a58ac4..eece181d84e02 100644 --- a/pkg/logql/log/fmt_test.go +++ b/pkg/logql/log/fmt_test.go @@ -9,7 +9,7 @@ import ( func Test_lineFormatter_Format(t *testing.T) { tests := []struct { name string - fmter *lineFormatter + fmter *LineFormatter lbs map[string]string want []byte @@ -46,7 +46,7 @@ func Test_lineFormatter_Format(t *testing.T) { } } -func newMustLineFormatter(tmpl string) *lineFormatter { +func newMustLineFormatter(tmpl string) *LineFormatter { l, err := NewFormatter(tmpl) if err != nil { panic(err) @@ -57,7 +57,7 @@ func newMustLineFormatter(tmpl string) *lineFormatter { func Test_labelsFormatter_Format(t *testing.T) { tests := []struct { name string - fmter *labelsFormatter + fmter *LabelsFormatter in Labels want Labels @@ -95,7 +95,7 @@ func Test_labelsFormatter_Format(t *testing.T) { } } -func mustNewLabelsFormatter(fmts []LabelFmt) *labelsFormatter { +func mustNewLabelsFormatter(fmts []LabelFmt) *LabelsFormatter { lf, err := NewLabelsFormatter(fmts) if err != nil { panic(err) diff --git a/pkg/logql/log/label_filter.go b/pkg/logql/log/label_filter.go index e67f27ecd7cc0..32cace018ec6c 100644 --- a/pkg/logql/log/label_filter.go +++ b/pkg/logql/log/label_filter.go @@ -17,6 +17,7 @@ var ( _ LabelFilterer = &NumericLabelFilter{} _ LabelFilterer = &StringLabelFilter{} + // NoopLabelFilter is a label filter that doesn't filter out any values. NoopLabelFilter = noopLabelFilter{} ) @@ -52,6 +53,7 @@ func (f LabelFilterType) String() string { } } +// LabelFilterer can filter extracted labels. type LabelFilterer interface { Stage fmt.Stringer @@ -63,6 +65,7 @@ type BinaryLabelFilter struct { and bool } +// NewAndLabelFilter creates a new LabelFilterer from a and binary operation of two LabelFilterer. func NewAndLabelFilter(left LabelFilterer, right LabelFilterer) *BinaryLabelFilter { return &BinaryLabelFilter{ Left: left, @@ -71,6 +74,7 @@ func NewAndLabelFilter(left LabelFilterer, right LabelFilterer) *BinaryLabelFilt } } +// NewOrLabelFilter creates a new LabelFilterer from a or binary operation of two LabelFilterer. func NewOrLabelFilter(left LabelFilterer, right LabelFilterer) *BinaryLabelFilter { return &BinaryLabelFilter{ Left: left, @@ -109,6 +113,7 @@ type noopLabelFilter struct{} func (noopLabelFilter) String() string { return "" } func (noopLabelFilter) Process(line []byte, lbs Labels) ([]byte, bool) { return line, true } +// ReduceAndLabelFilter Reduces multiple label filterer into one using binary and operation. func ReduceAndLabelFilter(filters []LabelFilterer) LabelFilterer { if len(filters) == 0 { return NoopLabelFilter @@ -129,6 +134,8 @@ type BytesLabelFilter struct { Type LabelFilterType } +// NewBytesLabelFilter creates a new label filterer which parses bytes string representation (1KB) from the value of the named label +// and compares it with the given b value. func NewBytesLabelFilter(t LabelFilterType, name string, b uint64) *BytesLabelFilter { return &BytesLabelFilter{ Name: name, @@ -181,6 +188,8 @@ type DurationLabelFilter struct { Type LabelFilterType } +// NewDurationLabelFilter creates a new label filterer which parses duration string representation (5s) +// from the value of the named label and compares it with the given d value. func NewDurationLabelFilter(t LabelFilterType, name string, d time.Duration) *DurationLabelFilter { return &DurationLabelFilter{ Name: name, @@ -233,6 +242,8 @@ type NumericLabelFilter struct { Type LabelFilterType } +// NewNumericLabelFilter creates a new label filterer which parses float64 string representation (5.2) +// from the value of the named label and compares it with the given f value. func NewNumericLabelFilter(t LabelFilterType, name string, v float64) *NumericLabelFilter { return &NumericLabelFilter{ Name: name, @@ -288,6 +299,9 @@ type StringLabelFilter struct { *labels.Matcher } +// NewStringLabelFilter creates a new label filterer which compares string label. +// This is the only LabelFilterer that can filter out the __error__ label. +// Unlike other LabelFilterer which apply conversion, if the label name doesn't exist it is compared with an empty value. func NewStringLabelFilter(m *labels.Matcher) *StringLabelFilter { return &StringLabelFilter{ Matcher: m, diff --git a/pkg/logql/log/label_filter_test.go b/pkg/logql/log/label_filter_test.go index a8b70c385f539..9926329810e24 100644 --- a/pkg/logql/log/label_filter_test.go +++ b/pkg/logql/log/label_filter_test.go @@ -162,7 +162,7 @@ func TestBinary_Filter(t *testing.T) { }, { - NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotRegexp, ErrorLabel, ".*")), + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotRegexp, ErrorLabel, ".+")), Labels{ ErrorLabel: "foo", "status": "200", @@ -177,7 +177,7 @@ func TestBinary_Filter(t *testing.T) { }, { - NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotRegexp, ErrorLabel, ".*")), + NewStringLabelFilter(labels.MustNewMatcher(labels.MatchNotRegexp, ErrorLabel, ".+")), Labels{ "status": "200", "method": "POST", diff --git a/pkg/logql/log/labels.go b/pkg/logql/log/labels.go index b38dbf3efcecf..00a3edfb15ecb 100644 --- a/pkg/logql/log/labels.go +++ b/pkg/logql/log/labels.go @@ -1,5 +1,7 @@ package log +// Labels is the type that is passed across multiple stages. +// I expect this type to become more and more complex over time as we optimize it. type Labels map[string]string func (l Labels) Has(key string) bool { diff --git a/pkg/logql/log/logfmt/decode.go b/pkg/logql/log/logfmt/decode.go index 24bc1f4aafa2f..7228ce9dca8b2 100644 --- a/pkg/logql/log/logfmt/decode.go +++ b/pkg/logql/log/logfmt/decode.go @@ -1,4 +1,5 @@ // Adapted from https://github.com/go-logfmt/logfmt/ but []byte as parameter instead +// Original license is MIT. package logfmt import ( diff --git a/pkg/logql/log/metrics_extraction.go b/pkg/logql/log/metrics_extraction.go index 76a12e3fd4f07..556d0e8feccbf 100644 --- a/pkg/logql/log/metrics_extraction.go +++ b/pkg/logql/log/metrics_extraction.go @@ -8,6 +8,12 @@ import ( "github.com/prometheus/prometheus/pkg/labels" ) +const ( + ConvertDuration = "duration" + ConvertFloat = "float" +) + +// SampleExtractor extracts sample for a log line. type SampleExtractor interface { Process(line []byte, lbs labels.Labels) (float64, labels.Labels, bool) } @@ -18,8 +24,11 @@ func (fn SampleExtractorFunc) Process(line []byte, lbs labels.Labels) (float64, return fn(line, lbs) } +// LineExtractor extracts a float64 from a log line. type LineExtractor func([]byte) float64 +// ToSampleExtractor transform a LineExtractor into a SampleExtractor. +// Useful for metric conversion without log Pipeline. func (l LineExtractor) ToSampleExtractor() SampleExtractor { return SampleExtractorFunc(func(line []byte, lbs labels.Labels) (float64, labels.Labels, bool) { return l(line), lbs, true @@ -45,6 +54,8 @@ func (l lineSampleExtractor) Process(line []byte, lbs labels.Labels) (float64, l return l.LineExtractor(line), labels.FromMap(labelmap), true } +// WithLineExtractor creates a SampleExtractor from a LineExtractor. +// Multiple log stages are run before converting the log line. func (m MultiStage) WithLineExtractor(ex LineExtractor) (SampleExtractor, error) { if len(m) == 0 { return ex.ToSampleExtractor(), nil @@ -64,11 +75,9 @@ type labelSampleExtractor struct { without bool } -const ( - ConvertDuration = "duration" - ConvertFloat = "float" -) - +// WithLabelExtractor creates a SampleExtractor that will extract metrics from a labels. +// A set of log stage is executed before the conversion. A Filtering stage is executed after the conversion allowing +// to remove sample containing the __error__ label. func (m MultiStage) WithLabelExtractor( labelName, conversion string, groups []string, without bool, @@ -94,13 +103,13 @@ func (m MultiStage) WithLabelExtractor( } func (l *labelSampleExtractor) Process(line []byte, lbs labels.Labels) (float64, labels.Labels, bool) { - // apply pipeline + // Apply the pipeline first. labelmap := Labels(lbs.Map()) line, ok := l.preStage.Process(line, labelmap) if !ok { return 0, nil, false } - // convert + // convert the label value. var v float64 stringValue := labelmap[l.labelName] if stringValue == "" { diff --git a/pkg/logql/log/parser.go b/pkg/logql/log/parser.go index 64489efd422de..9743a9589e506 100644 --- a/pkg/logql/log/parser.go +++ b/pkg/logql/log/parser.go @@ -19,9 +19,9 @@ const ( ) var ( - _ Stage = &jsonParser{} - _ Stage = ®expParser{} - _ Stage = &logfmtParser{} + _ Stage = &JSONParser{} + _ Stage = &RegexpParser{} + _ Stage = &LogfmtParser{} errMissingCapture = errors.New("at least one named capture must be supplied") ) @@ -58,13 +58,14 @@ func sanitizeKey(key string) string { }, key) } -type jsonParser struct{} +type JSONParser struct{} -func NewJSONParser() *jsonParser { - return &jsonParser{} +// NewJSONParser creates a log stage that can parse a json log line and add properties as labels. +func NewJSONParser() *JSONParser { + return &JSONParser{} } -func (j *jsonParser) Process(line []byte, lbs Labels) ([]byte, bool) { +func (j *JSONParser) Process(line []byte, lbs Labels) ([]byte, bool) { data := map[string]interface{}{} err := jsoniter.ConfigFastest.Unmarshal(line, &data) if err != nil { @@ -96,12 +97,14 @@ func jsonKey(prefix, key string) string { return fmt.Sprintf("%s%s%s", prefix, jsonSpacer, key) } -type regexpParser struct { +type RegexpParser struct { regex *regexp.Regexp nameIndex map[int]string } -func NewRegexpParser(re string) (*regexpParser, error) { +// NewRegexpParser creates a new log stage that can extract labels from a log line using a regex expression. +// The regex expression must contains at least one named match. If the regex doesn't match the line is not filtered out. +func NewRegexpParser(re string) (*RegexpParser, error) { regex, err := regexp.Compile(re) if err != nil { return nil, err @@ -126,13 +129,13 @@ func NewRegexpParser(re string) (*regexpParser, error) { if len(nameIndex) == 0 { return nil, errMissingCapture } - return ®expParser{ + return &RegexpParser{ regex: regex, nameIndex: nameIndex, }, nil } -func mustNewRegexParser(re string) *regexpParser { +func mustNewRegexParser(re string) *RegexpParser { r, err := NewRegexpParser(re) if err != nil { panic(err) @@ -140,7 +143,7 @@ func mustNewRegexParser(re string) *regexpParser { return r } -func (r *regexpParser) Process(line []byte, lbs Labels) ([]byte, bool) { +func (r *RegexpParser) Process(line []byte, lbs Labels) ([]byte, bool) { add := addLabel(lbs) for i, value := range r.regex.FindSubmatch(line) { if name, ok := r.nameIndex[i]; ok { @@ -150,13 +153,15 @@ func (r *regexpParser) Process(line []byte, lbs Labels) ([]byte, bool) { return line, true } -type logfmtParser struct{} +type LogfmtParser struct{} -func NewLogfmtParser() *logfmtParser { - return &logfmtParser{} +// NewLogfmtParser creates a parser that can extract labels from a logfmt log line. +// Each keyval is extracted into a respective label. +func NewLogfmtParser() *LogfmtParser { + return &LogfmtParser{} } -func (l *logfmtParser) Process(line []byte, lbs Labels) ([]byte, bool) { +func (l *LogfmtParser) Process(line []byte, lbs Labels) ([]byte, bool) { // todo(cyriltovena): we should be using the same decoder for the whole query. // However right now backward queries, because of the batch iterator that has a go loop, // can run this method in parallel. This causes a race e.g it will reset to a new line while scaning for keyvals. diff --git a/pkg/logql/log/parser_test.go b/pkg/logql/log/parser_test.go index 027dadfacc951..c1bef0e27fc7c 100644 --- a/pkg/logql/log/parser_test.go +++ b/pkg/logql/log/parser_test.go @@ -108,7 +108,7 @@ func TestNewRegexpParser(t *testing.T) { func Test_regexpParser_Parse(t *testing.T) { tests := []struct { name string - parser *regexpParser + parser *RegexpParser line []byte lbs Labels want Labels diff --git a/pkg/logql/log/pipeline.go b/pkg/logql/log/pipeline.go index 69e03f253636c..a39a877b3577c 100644 --- a/pkg/logql/log/pipeline.go +++ b/pkg/logql/log/pipeline.go @@ -4,10 +4,12 @@ import ( "github.com/prometheus/prometheus/pkg/labels" ) +// Pipeline transform and filter log lines and labels. type Pipeline interface { Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) } +// Stage is a single step of a Pipeline. type Stage interface { Process(line []byte, lbs Labels) ([]byte, bool) } @@ -35,6 +37,8 @@ func (fn StageFunc) Process(line []byte, lbs Labels) ([]byte, bool) { return fn(line, lbs) } +// MultiStage is a combinations of multiple stages. Which implement Pipeline +// or can be reduced into a single stage for convenience. type MultiStage []Stage func (m MultiStage) Process(line []byte, lbs labels.Labels) ([]byte, labels.Labels, bool) { diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go index 5749331faebeb..2883a0648a1ef 100644 --- a/pkg/logql/parser_test.go +++ b/pkg/logql/parser_test.go @@ -1386,7 +1386,7 @@ func TestParse(t *testing.T) { }, { in: `quantile_over_time(0.99998,{app="foo"} |= "bar" | json | latency >= 250ms or ( status_code < 500 and status_code > 200) - | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo | __error__ !~".*"[5m]) by (namespace,instance)`, + | line_format "blip{{ .foo }}blop {{.status_code}}" | label_format foo=bar,status_code="buzz{{.bar}}" | unwrap foo | __error__ !~".+"[5m]) by (namespace,instance)`, exp: newRangeAggregationExpr( newLogRange(&pipelineExpr{ left: newMatcherExpr([]*labels.Matcher{{Type: labels.MatchEqual, Name: "app", Value: "foo"}}), @@ -1410,7 +1410,7 @@ func TestParse(t *testing.T) { }, }, 5*time.Minute, - newUnwrapExpr("foo", "").addPostFilter(log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotRegexp, log.ErrorLabel, ".*")))), + newUnwrapExpr("foo", "").addPostFilter(log.NewStringLabelFilter(mustNewMatcher(labels.MatchNotRegexp, log.ErrorLabel, ".+")))), OpRangeTypeQuantile, &grouping{without: false, groups: []string{"namespace", "instance"}}, NewStringLabelFilter("0.99998"), ), }, From 54dd6c21a41f93929571bbf571c0500d7d3fcb68 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 15 Oct 2020 22:18:15 +0200 Subject: [PATCH 40/45] Lint. Signed-off-by: Cyril Tovena --- pkg/chunkenc/dumb_chunk.go | 3 +- pkg/chunkenc/interface.go | 3 +- pkg/chunkenc/memchunk.go | 27 +++--- pkg/logql/evaluator.go | 8 +- pkg/logql/lex_test.go | 2 +- pkg/logql/log/logfmt/jsonstring.go | 138 ----------------------------- 6 files changed, 21 insertions(+), 160 deletions(-) diff --git a/pkg/chunkenc/dumb_chunk.go b/pkg/chunkenc/dumb_chunk.go index 263d2c60ed8d2..ea2086802bc2e 100644 --- a/pkg/chunkenc/dumb_chunk.go +++ b/pkg/chunkenc/dumb_chunk.go @@ -5,10 +5,11 @@ import ( "sort" "time" + "github.com/prometheus/prometheus/pkg/labels" + "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" - "github.com/prometheus/prometheus/pkg/labels" ) const ( diff --git a/pkg/chunkenc/interface.go b/pkg/chunkenc/interface.go index fb0f46d5fb8fe..74803adc50f34 100644 --- a/pkg/chunkenc/interface.go +++ b/pkg/chunkenc/interface.go @@ -7,10 +7,11 @@ import ( "strings" "time" + "github.com/prometheus/prometheus/pkg/labels" + "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" "github.com/grafana/loki/pkg/logql" - "github.com/prometheus/prometheus/pkg/labels" ) // Errors returned by the chunk interface. diff --git a/pkg/chunkenc/memchunk.go b/pkg/chunkenc/memchunk.go index 617561ce74ef9..e69d052dd8783 100644 --- a/pkg/chunkenc/memchunk.go +++ b/pkg/chunkenc/memchunk.go @@ -670,8 +670,7 @@ type bufferedIterator struct { closed bool - baseLbs labels.Labels - pipeline logql.Pipeline + baseLbs labels.Labels } func newBufferedIterator(ctx context.Context, pool ReaderPool, b []byte, lbs labels.Labels) *bufferedIterator { @@ -695,20 +694,18 @@ func (si *bufferedIterator) Next() bool { si.bufReader = BufReaderPool.Get(si.reader) } - for { - ts, line, ok := si.moveNext() - if !ok { - si.Close() - return false - } - // we decode always the line length and ts as varint - si.stats.DecompressedBytes += int64(len(line)) + 2*binary.MaxVarintLen64 - si.stats.DecompressedLines++ - - si.currTs = ts - si.currLine = line - return true + ts, line, ok := si.moveNext() + if !ok { + si.Close() + return false } + // we decode always the line length and ts as varint + si.stats.DecompressedBytes += int64(len(line)) + 2*binary.MaxVarintLen64 + si.stats.DecompressedLines++ + + si.currTs = ts + si.currLine = line + return true } // moveNext moves the buffer to the next entry diff --git a/pkg/logql/evaluator.go b/pkg/logql/evaluator.go index 0f3d803638b19..7a31da02667d4 100644 --- a/pkg/logql/evaluator.go +++ b/pkg/logql/evaluator.go @@ -416,10 +416,10 @@ func newPipelineErr(metric labels.Labels) *pipelineError { func (e pipelineError) Error() string { return fmt.Sprintf( - `pipeline error: '%s' for series: '%s'. - Use a label filter to intentionally skip this error. (e.g | __error__!="%s"). - To skip all potential errors you can match empty errors.(e.g __error__="") - The label filter can also be specified after unwrap. (e.g | unwrap latency | __error__="" )`, + "pipeline error: '%s' for series: '%s'.\n"+ + "Use a label filter to intentionally skip this error. (e.g | __error__!=\"%s\").\n"+ + "To skip all potential errors you can match empty errors.(e.g __error__=\"\")\n"+ + "The label filter can also be specified after unwrap. (e.g | unwrap latency | __error__=\"\" )\n", e.errorType, e.metric, e.errorType) } diff --git a/pkg/logql/lex_test.go b/pkg/logql/lex_test.go index 7c004b90eef97..d5ef2537cae24 100644 --- a/pkg/logql/lex_test.go +++ b/pkg/logql/lex_test.go @@ -31,7 +31,7 @@ func TestLex(t *testing.T) { PIPE, IDENTIFIER, GT, DURATION, OR, IDENTIFIER, CMP_EQ, NUMBER, AND, IDENTIFIER, EQ, STRING, PIPE, UNWRAP, IDENTIFIER}}, {`{foo="bar"} |~ "\\w+" | size > 250kB`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, BYTES}}, {`{foo="bar"} |~ "\\w+" | size > 250kB and latency <= 1h15m30s or bar=1`, - []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, + []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, BYTES, AND, IDENTIFIER, LTE, DURATION, OR, IDENTIFIER, EQ, NUMBER}}, {`{foo="bar"} |~ "\\w+" | size > 200MiB or foo == 4.00`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, PIPE_MATCH, STRING, PIPE, IDENTIFIER, GT, BYTES, OR, IDENTIFIER, CMP_EQ, NUMBER}}, diff --git a/pkg/logql/log/logfmt/jsonstring.go b/pkg/logql/log/logfmt/jsonstring.go index 030ac85fcc2e9..cd2d76bbe95b8 100644 --- a/pkg/logql/log/logfmt/jsonstring.go +++ b/pkg/logql/log/logfmt/jsonstring.go @@ -1,10 +1,7 @@ package logfmt import ( - "bytes" - "io" "strconv" - "sync" "unicode" "unicode/utf16" "unicode/utf8" @@ -16,141 +13,6 @@ import ( // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -var hex = "0123456789abcdef" - -var bufferPool = sync.Pool{ - New: func() interface{} { - return &bytes.Buffer{} - }, -} - -func getBuffer() *bytes.Buffer { - return bufferPool.Get().(*bytes.Buffer) -} - -func poolBuffer(buf *bytes.Buffer) { - buf.Reset() - bufferPool.Put(buf) -} - -// NOTE: keep in sync with writeQuotedBytes below. -func writeQuotedString(w io.Writer, s string) (int, error) { - buf := getBuffer() - buf.WriteByte('"') - start := 0 - for i := 0; i < len(s); { - if b := s[i]; b < utf8.RuneSelf { - if 0x20 <= b && b != '\\' && b != '"' { - i++ - continue - } - if start < i { - buf.WriteString(s[start:i]) - } - switch b { - case '\\', '"': - buf.WriteByte('\\') - buf.WriteByte(b) - case '\n': - buf.WriteByte('\\') - buf.WriteByte('n') - case '\r': - buf.WriteByte('\\') - buf.WriteByte('r') - case '\t': - buf.WriteByte('\\') - buf.WriteByte('t') - default: - // This encodes bytes < 0x20 except for \n, \r, and \t. - buf.WriteString(`\u00`) - buf.WriteByte(hex[b>>4]) - buf.WriteByte(hex[b&0xF]) - } - i++ - start = i - continue - } - c, size := utf8.DecodeRuneInString(s[i:]) - if c == utf8.RuneError { - if start < i { - buf.WriteString(s[start:i]) - } - buf.WriteString(`\ufffd`) - i += size - start = i - continue - } - i += size - } - if start < len(s) { - buf.WriteString(s[start:]) - } - buf.WriteByte('"') - n, err := w.Write(buf.Bytes()) - poolBuffer(buf) - return n, err -} - -// NOTE: keep in sync with writeQuoteString above. -func writeQuotedBytes(w io.Writer, s []byte) (int, error) { - buf := getBuffer() - buf.WriteByte('"') - start := 0 - for i := 0; i < len(s); { - if b := s[i]; b < utf8.RuneSelf { - if 0x20 <= b && b != '\\' && b != '"' { - i++ - continue - } - if start < i { - buf.Write(s[start:i]) - } - switch b { - case '\\', '"': - buf.WriteByte('\\') - buf.WriteByte(b) - case '\n': - buf.WriteByte('\\') - buf.WriteByte('n') - case '\r': - buf.WriteByte('\\') - buf.WriteByte('r') - case '\t': - buf.WriteByte('\\') - buf.WriteByte('t') - default: - // This encodes bytes < 0x20 except for \n, \r, and \t. - buf.WriteString(`\u00`) - buf.WriteByte(hex[b>>4]) - buf.WriteByte(hex[b&0xF]) - } - i++ - start = i - continue - } - c, size := utf8.DecodeRune(s[i:]) - if c == utf8.RuneError { - if start < i { - buf.Write(s[start:i]) - } - buf.WriteString(`\ufffd`) - i += size - start = i - continue - } - i += size - } - if start < len(s) { - buf.Write(s[start:]) - } - buf.WriteByte('"') - n, err := w.Write(buf.Bytes()) - poolBuffer(buf) - return n, err -} - -// getu4 decodes \uXXXX from the beginning of s, returning the hex value, -// or it returns -1. func getu4(s []byte) rune { if len(s) < 6 || s[0] != '\\' || s[1] != 'u' { return -1 From a50490fdc1bc6451f3482413a35d928b5b5fa82f Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 15 Oct 2020 22:45:55 +0200 Subject: [PATCH 41/45] Lint. Signed-off-by: Cyril Tovena --- pkg/storage/batch.go | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index 83462839cf764..8cd31fd43dfe1 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -332,18 +332,6 @@ func (it *batchChunkIterator) Close() error { return nil } -type labelCache map[model.Fingerprint]string - -// computeLabels compute the labels string representation, uses a map to cache result per fingerprint. -func (l labelCache) computeLabels(c *LazyChunk) string { - if lbs, ok := l[c.Chunk.Fingerprint]; ok { - return lbs - } - lbs := dropLabels(c.Chunk.Metric, labels.MetricName).String() - l[c.Chunk.Fingerprint] = lbs - return lbs -} - type logBatchIterator struct { *batchChunkIterator From 801b72136faad0f74fd79acaca51a6c6d66f0616 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 15 Oct 2020 23:23:26 +0200 Subject: [PATCH 42/45] Fixes frontend handler. Signed-off-by: Cyril Tovena --- pkg/querier/queryrange/roundtrip.go | 1 + pkg/querier/queryrange/roundtrip_test.go | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/querier/queryrange/roundtrip.go b/pkg/querier/queryrange/roundtrip.go index 97054dafce120..ce6c5a511045a 100644 --- a/pkg/querier/queryrange/roundtrip.go +++ b/pkg/querier/queryrange/roundtrip.go @@ -168,6 +168,7 @@ func transformRegexQuery(req *http.Request, expr logql.LogSelectorExpr) (logql.L // force the form and query to be parsed again. req.Form = nil req.PostForm = nil + return filterExpr, nil } return expr, nil } diff --git a/pkg/querier/queryrange/roundtrip_test.go b/pkg/querier/queryrange/roundtrip_test.go index 21685acdb6113..e05e524dde8e7 100644 --- a/pkg/querier/queryrange/roundtrip_test.go +++ b/pkg/querier/queryrange/roundtrip_test.go @@ -384,7 +384,7 @@ func TestRegexpParamsSupport(t *testing.T) { count, h := promqlResult(streams) rt.setHandler(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { // the query params should contain the filter. - require.Contains(t, r.URL.Query().Get("query"), `|~"foo"`) + require.Contains(t, r.URL.Query().Get("query"), `|~ "foo"`) h.ServeHTTP(rw, r) })) _, err = tpw(rt).RoundTrip(req) From 1aee4152acadee6200c5895a25f167b54112584a Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Thu, 15 Oct 2020 23:39:09 +0200 Subject: [PATCH 43/45] Fixes old test. Signed-off-by: Cyril Tovena --- pkg/logentry/stages/metrics_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/logentry/stages/metrics_test.go b/pkg/logentry/stages/metrics_test.go index 33319e37272c1..bf98b917616f8 100644 --- a/pkg/logentry/stages/metrics_test.go +++ b/pkg/logentry/stages/metrics_test.go @@ -222,7 +222,7 @@ func Test(t *testing.T) { IdleDuration: &metricTestInvalidIdle, }, }, - errors.Errorf(ErrInvalidIdleDur, "time: unknown unit f in duration 10f"), + errors.Errorf(ErrInvalidIdleDur, "time: unknown unit \"f\" in duration \"10f\""), }, "valid": { MetricsConfig{ From 1ea917fc6058b2d8a34c0422f75defe64c89672e Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 16 Oct 2020 00:28:58 +0200 Subject: [PATCH 44/45] Fix go1.15 local failing test. Signed-off-by: Cyril Tovena --- pkg/logentry/stages/metrics_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/logentry/stages/metrics_test.go b/pkg/logentry/stages/metrics_test.go index bf98b917616f8..33319e37272c1 100644 --- a/pkg/logentry/stages/metrics_test.go +++ b/pkg/logentry/stages/metrics_test.go @@ -222,7 +222,7 @@ func Test(t *testing.T) { IdleDuration: &metricTestInvalidIdle, }, }, - errors.Errorf(ErrInvalidIdleDur, "time: unknown unit \"f\" in duration \"10f\""), + errors.Errorf(ErrInvalidIdleDur, "time: unknown unit f in duration 10f"), }, "valid": { MetricsConfig{ From faecc804a06592c9216b66d96afc410e1db1f921 Mon Sep 17 00:00:00 2001 From: Cyril Tovena Date: Fri, 16 Oct 2020 22:47:22 +0200 Subject: [PATCH 45/45] Fixes race conditions in the batch iterator. We should never advance an iterator in parallel. Unfortunately before the code was building iterators while advancing previous one, building iterator can advance iterator and thus creates a race condition. This changeset make sure we only fetch chunks in advance and build iterator and iterate over them in sequence. Also add support for labels in the cacheIterator which is required for logqlv2. Signed-off-by: Cyril Tovena --- pkg/{storage => iter}/cache.go | 135 ++++++-------- pkg/{storage => iter}/cache_test.go | 25 ++- pkg/storage/batch.go | 279 +++++++++++++++------------- pkg/storage/batch_test.go | 15 +- pkg/storage/lazy_chunk.go | 22 +-- 5 files changed, 236 insertions(+), 240 deletions(-) rename pkg/{storage => iter}/cache.go (53%) rename pkg/{storage => iter}/cache_test.go (83%) diff --git a/pkg/storage/cache.go b/pkg/iter/cache.go similarity index 53% rename from pkg/storage/cache.go rename to pkg/iter/cache.go index 41070b4206d03..9f9df5624c3da 100644 --- a/pkg/storage/cache.go +++ b/pkg/iter/cache.go @@ -1,66 +1,56 @@ -package storage +package iter import ( - "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" ) +type CacheEntryIterator interface { + EntryIterator + Reset() +} + // cachedIterator is an iterator that caches iteration to be replayed later on. type cachedIterator struct { - cache []*logproto.Entry - base iter.EntryIterator + cache []entryWithLabels + base EntryIterator // once set to nil it means we have to use the cache. - labels string - curr int + curr int closeErr error iterErr error } -// newCachedIterator creates an iterator that cache iteration result and can be iterated again +// NewCachedIterator creates an iterator that cache iteration result and can be iterated again // after closing it without re-using the underlaying iterator `it`. -// The cache iterator should be used for entries that belongs to the same stream only. -func newCachedIterator(it iter.EntryIterator, cap int) *cachedIterator { +func NewCachedIterator(it EntryIterator, cap int) CacheEntryIterator { c := &cachedIterator{ base: it, - cache: make([]*logproto.Entry, 0, cap), + cache: make([]entryWithLabels, 0, cap), curr: -1, } - c.load() return c } -func (it *cachedIterator) reset() { +func (it *cachedIterator) Reset() { it.curr = -1 } -func (it *cachedIterator) load() { +func (it *cachedIterator) Next() bool { if it.base != nil { - defer func() { + ok := it.base.Next() + // we're done with the base iterator. + if !ok { it.closeErr = it.base.Close() it.iterErr = it.base.Error() it.base = nil - it.reset() - }() - // set labels using the first entry - if !it.base.Next() { - return + return false } - it.labels = it.base.Labels() - - // add all entries until the base iterator is exhausted - for { - e := it.base.Entry() - it.cache = append(it.cache, &e) - if !it.base.Next() { - break - } - } - + // we're caching entries + it.cache = append(it.cache, entryWithLabels{entry: it.base.Entry(), labels: it.base.Labels()}) + it.curr++ + return true } -} - -func (it *cachedIterator) Next() bool { + // second pass if len(it.cache) == 0 { it.cache = nil return false @@ -73,33 +63,38 @@ func (it *cachedIterator) Next() bool { } func (it *cachedIterator) Entry() logproto.Entry { - if len(it.cache) == 0 { + if len(it.cache) == 0 || it.curr < 0 { return logproto.Entry{} } - if it.curr < 0 { - return *it.cache[0] - } - return *it.cache[it.curr] + + return it.cache[it.curr].entry } func (it *cachedIterator) Labels() string { - return it.labels + if len(it.cache) == 0 || it.curr < 0 { + return "" + } + return it.cache[it.curr].labels } func (it *cachedIterator) Error() error { return it.iterErr } func (it *cachedIterator) Close() error { - it.reset() + it.Reset() return it.closeErr } +type CacheSampleIterator interface { + SampleIterator + Reset() +} + // cachedIterator is an iterator that caches iteration to be replayed later on. type cachedSampleIterator struct { - cache []logproto.Sample - base iter.SampleIterator + cache []sampleWithLabels + base SampleIterator - labels string - curr int + curr int closeErr error iterErr error @@ -107,47 +102,35 @@ type cachedSampleIterator struct { // newSampleCachedIterator creates an iterator that cache iteration result and can be iterated again // after closing it without re-using the underlaying iterator `it`. -// The cache iterator should be used for entries that belongs to the same stream only. -func newCachedSampleIterator(it iter.SampleIterator, cap int) *cachedSampleIterator { +func NewCachedSampleIterator(it SampleIterator, cap int) CacheSampleIterator { c := &cachedSampleIterator{ base: it, - cache: make([]logproto.Sample, 0, cap), + cache: make([]sampleWithLabels, 0, cap), curr: -1, } - c.load() return c } -func (it *cachedSampleIterator) reset() { +func (it *cachedSampleIterator) Reset() { it.curr = -1 } -func (it *cachedSampleIterator) load() { +func (it *cachedSampleIterator) Next() bool { if it.base != nil { - defer func() { + ok := it.base.Next() + // we're done with the base iterator. + if !ok { it.closeErr = it.base.Close() it.iterErr = it.base.Error() it.base = nil - it.reset() - }() - // set labels using the first entry - if !it.base.Next() { - return + return false } - it.labels = it.base.Labels() - - // add all entries until the base iterator is exhausted - for { - it.cache = append(it.cache, it.base.Sample()) - if !it.base.Next() { - break - } - } - + // we're caching entries + it.cache = append(it.cache, sampleWithLabels{Sample: it.base.Sample(), labels: it.base.Labels()}) + it.curr++ + return true } -} - -func (it *cachedSampleIterator) Next() bool { + // second pass if len(it.cache) == 0 { it.cache = nil return false @@ -160,22 +143,22 @@ func (it *cachedSampleIterator) Next() bool { } func (it *cachedSampleIterator) Sample() logproto.Sample { - if len(it.cache) == 0 { + if len(it.cache) == 0 || it.curr < 0 { return logproto.Sample{} } - if it.curr < 0 { - return it.cache[0] - } - return it.cache[it.curr] + return it.cache[it.curr].Sample } func (it *cachedSampleIterator) Labels() string { - return it.labels + if len(it.cache) == 0 || it.curr < 0 { + return "" + } + return it.cache[it.curr].labels } func (it *cachedSampleIterator) Error() error { return it.iterErr } func (it *cachedSampleIterator) Close() error { - it.reset() + it.Reset() return it.closeErr } diff --git a/pkg/storage/cache_test.go b/pkg/iter/cache_test.go similarity index 83% rename from pkg/storage/cache_test.go rename to pkg/iter/cache_test.go index 40cb45220ef06..b04811caedd76 100644 --- a/pkg/storage/cache_test.go +++ b/pkg/iter/cache_test.go @@ -1,4 +1,4 @@ -package storage +package iter import ( "errors" @@ -7,7 +7,6 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" ) @@ -20,12 +19,11 @@ func Test_CachedIterator(t *testing.T) { {Timestamp: time.Unix(0, 3), Line: "3"}, }, } - c := newCachedIterator(iter.NewStreamIterator(stream), 3) + c := NewCachedIterator(NewStreamIterator(stream), 3) assert := func() { - // we should crash for call of entry without next although that's not expected. - require.Equal(t, stream.Labels, c.Labels()) - require.Equal(t, stream.Entries[0], c.Entry()) + require.Equal(t, "", c.Labels()) + require.Equal(t, logproto.Entry{}, c.Entry()) require.Equal(t, true, c.Next()) require.Equal(t, stream.Entries[0], c.Entry()) require.Equal(t, true, c.Next()) @@ -48,7 +46,7 @@ func Test_CachedIterator(t *testing.T) { func Test_EmptyCachedIterator(t *testing.T) { - c := newCachedIterator(iter.NoopIterator, 0) + c := NewCachedIterator(NoopIterator, 0) require.Equal(t, "", c.Labels()) require.Equal(t, logproto.Entry{}, c.Entry()) @@ -68,7 +66,7 @@ func Test_EmptyCachedIterator(t *testing.T) { func Test_ErrorCachedIterator(t *testing.T) { - c := newCachedIterator(&errorIter{}, 0) + c := NewCachedIterator(&errorIter{}, 0) require.Equal(t, false, c.Next()) require.Equal(t, "", c.Labels()) @@ -86,12 +84,11 @@ func Test_CachedSampleIterator(t *testing.T) { {Timestamp: time.Unix(0, 3).UnixNano(), Hash: 3, Value: 3.}, }, } - c := newCachedSampleIterator(iter.NewSeriesIterator(series), 3) + c := NewCachedSampleIterator(NewSeriesIterator(series), 3) assert := func() { - // we should crash for call of entry without next although that's not expected. - require.Equal(t, series.Labels, c.Labels()) - require.Equal(t, series.Samples[0], c.Sample()) + require.Equal(t, "", c.Labels()) + require.Equal(t, logproto.Sample{}, c.Sample()) require.Equal(t, true, c.Next()) require.Equal(t, series.Samples[0], c.Sample()) require.Equal(t, true, c.Next()) @@ -114,7 +111,7 @@ func Test_CachedSampleIterator(t *testing.T) { func Test_EmptyCachedSampleIterator(t *testing.T) { - c := newCachedSampleIterator(iter.NoopIterator, 0) + c := NewCachedSampleIterator(NoopIterator, 0) require.Equal(t, "", c.Labels()) require.Equal(t, logproto.Sample{}, c.Sample()) @@ -134,7 +131,7 @@ func Test_EmptyCachedSampleIterator(t *testing.T) { func Test_ErrorCachedSampleIterator(t *testing.T) { - c := newCachedSampleIterator(&errorIter{}, 0) + c := NewCachedSampleIterator(&errorIter{}, 0) require.Equal(t, false, c.Next()) require.Equal(t, "", c.Labels()) diff --git a/pkg/storage/batch.go b/pkg/storage/batch.go index 8cd31fd43dfe1..df73b94895b9b 100644 --- a/pkg/storage/batch.go +++ b/pkg/storage/batch.go @@ -73,15 +73,6 @@ func NewChunkMetrics(r prometheus.Registerer, maxBatchSize int) *ChunkMetrics { } } -type genericIterator interface { - Next() bool - Labels() string - Error() error - Close() error -} - -type chunksIteratorFactory func(chunks []*LazyChunk, from, through time.Time, nextChunk *LazyChunk) (genericIterator, error) - // batchChunkIterator is an EntryIterator that iterates through chunks by batch of `batchSize`. // Since chunks can overlap across batches for each iteration the iterator will keep all overlapping // chunks with the next chunk from the next batch and added it to the next iteration. In this case the boundaries of the batch @@ -89,20 +80,15 @@ type chunksIteratorFactory func(chunks []*LazyChunk, from, through time.Time, ne type batchChunkIterator struct { chunks lazyChunks batchSize int - err error - curr genericIterator lastOverlapping []*LazyChunk - iterFactory chunksIteratorFactory + metrics *ChunkMetrics + matchers []*labels.Matcher begun bool ctx context.Context - cancel context.CancelFunc start, end time.Time direction logproto.Direction - next chan *struct { - iter genericIterator - err error - } + next chan *chunkBatch } // newBatchChunkIterator creates a new batch iterator with the given batchSize. @@ -112,24 +98,20 @@ func newBatchChunkIterator( batchSize int, direction logproto.Direction, start, end time.Time, - iterFactory chunksIteratorFactory, + metrics *ChunkMetrics, + matchers []*labels.Matcher, ) *batchChunkIterator { - ctx, cancel := context.WithCancel(ctx) res := &batchChunkIterator{ batchSize: batchSize, - - start: start, - end: end, - direction: direction, - ctx: ctx, - cancel: cancel, - iterFactory: iterFactory, - chunks: lazyChunks{direction: direction, chunks: chunks}, - next: make(chan *struct { - iter genericIterator - err error - }), + metrics: metrics, + matchers: matchers, + start: start, + end: end, + direction: direction, + ctx: ctx, + chunks: lazyChunks{direction: direction, chunks: chunks}, + next: make(chan *chunkBatch), } sort.Sort(res.chunks) return res @@ -149,54 +131,21 @@ func (it *batchChunkIterator) loop(ctx context.Context) { close(it.next) return } - next, err := it.nextBatch() select { case <-ctx.Done(): close(it.next) - // next can be nil if we are waiting to return that the nextBatch was empty and the context is closed - // or if another error occurred reading nextBatch - if next == nil { - return - } - err = next.Close() - if err != nil { - level.Error(util.WithContext(ctx, util.Logger)).Log("msg", "Failed to close the pre-fetched iterator when pre-fetching was canceled", "err", err) - } return - case it.next <- &struct { - iter genericIterator - err error - }{next, err}: + case it.next <- it.nextBatch(): } } } -func (it *batchChunkIterator) Next() bool { +func (it *batchChunkIterator) Next() *chunkBatch { it.Start() // Ensure the iterator has started. - - var err error - // for loop to avoid recursion - for { - if it.curr != nil && it.curr.Next() { - return true - } - // close previous iterator - if it.curr != nil { - it.err = it.curr.Close() - } - next := <-it.next - if next == nil { - return false - } - it.curr = next.iter - if next.err != nil { - it.err = err - return false - } - } + return <-it.next } -func (it *batchChunkIterator) nextBatch() (genericIterator, error) { +func (it *batchChunkIterator) nextBatch() *chunkBatch { // the first chunk of the batch headChunk := it.chunks.Peek() from, through := it.start, it.end @@ -306,38 +255,35 @@ func (it *batchChunkIterator) nextBatch() (genericIterator, error) { } } } - // create the new chunks iterator from the current batch. - return it.iterFactory(batch, from, through, nextChunk) -} - -func (it *batchChunkIterator) Labels() string { - return it.curr.Labels() -} - -func (it *batchChunkIterator) Error() error { - if it.err != nil { - return it.err + // download chunk for this batch. + chksBySeries, err := fetchChunkBySeries(it.ctx, it.metrics, batch, it.matchers) + if err != nil { + return &chunkBatch{err: err} } - if it.curr != nil { - return it.curr.Error() + return &chunkBatch{ + chunksBySeries: chksBySeries, + err: err, + from: from, + through: through, + nextChunk: nextChunk, } - return nil } -func (it *batchChunkIterator) Close() error { - it.cancel() - if it.curr != nil { - return it.curr.Close() - } - return nil +type chunkBatch struct { + chunksBySeries map[model.Fingerprint][][]*LazyChunk + err error + + from, through time.Time + nextChunk *LazyChunk } type logBatchIterator struct { *batchChunkIterator + curr iter.EntryIterator + err error ctx context.Context - metrics *ChunkMetrics - matchers []*labels.Matcher + cancel context.CancelFunc pipeline logql.Pipeline } @@ -351,37 +297,76 @@ func newLogBatchIterator( direction logproto.Direction, start, end time.Time, ) (iter.EntryIterator, error) { + ctx, cancel := context.WithCancel(ctx) // __name__ is not something we filter by because it's a constant in loki // and only used for upstream compatibility; therefore remove it. // The same applies to the sharding label which is injected by the cortex storage code. matchers = removeMatchersByName(matchers, labels.MetricName, astmapper.ShardLabel) logbatch := &logBatchIterator{ - matchers: matchers, - pipeline: pipeline, - metrics: metrics, - ctx: ctx, + pipeline: pipeline, + ctx: ctx, + cancel: cancel, + batchChunkIterator: newBatchChunkIterator(ctx, chunks, batchSize, direction, start, end, metrics, matchers), } - - batch := newBatchChunkIterator(ctx, chunks, batchSize, direction, start, end, logbatch.newChunksIterator) - // Important: since the batchChunkIterator is bound to the LogBatchIterator, - // ensure embedded fields are present before it's started. - logbatch.batchChunkIterator = batch - batch.Start() return logbatch, nil } +func (it *logBatchIterator) Labels() string { + return it.curr.Labels() +} + +func (it *logBatchIterator) Error() error { + if it.err != nil { + return it.err + } + if it.curr != nil { + return it.curr.Error() + } + return nil +} + +func (it *logBatchIterator) Close() error { + it.cancel() + if it.curr != nil { + return it.curr.Close() + } + return nil +} + func (it *logBatchIterator) Entry() logproto.Entry { - return it.curr.(iter.EntryIterator).Entry() + return it.curr.Entry() } -// newChunksIterator creates an iterator over a set of lazychunks. -func (it *logBatchIterator) newChunksIterator(chunks []*LazyChunk, from, through time.Time, nextChunk *LazyChunk) (genericIterator, error) { - chksBySeries, err := fetchChunkBySeries(it.ctx, it.metrics, chunks, it.matchers) - if err != nil { - return nil, err +func (it *logBatchIterator) Next() bool { + // for loop to avoid recursion + for { + if it.curr != nil && it.curr.Next() { + return true + } + // close previous iterator + if it.curr != nil { + it.err = it.curr.Close() + } + next := it.batchChunkIterator.Next() + if next == nil { + return false + } + if next.err != nil { + it.err = next.err + return false + } + var err error + it.curr, err = it.newChunksIterator(next) + if err != nil { + it.err = err + return false + } } +} - iters, err := it.buildIterators(chksBySeries, from, through, nextChunk) +// newChunksIterator creates an iterator over a set of lazychunks. +func (it *logBatchIterator) newChunksIterator(b *chunkBatch) (iter.EntryIterator, error) { + iters, err := it.buildIterators(b.chunksBySeries, b.from, b.through, b.nextChunk) if err != nil { return nil, err } @@ -432,10 +417,11 @@ func (it *logBatchIterator) buildHeapIterator(chks [][]*LazyChunk, from, through type sampleBatchIterator struct { *batchChunkIterator + curr iter.SampleIterator + err error ctx context.Context - metrics *ChunkMetrics - matchers []*labels.Matcher + cancel context.CancelFunc extractor logql.SampleExtractor } @@ -448,37 +434,80 @@ func newSampleBatchIterator( extractor logql.SampleExtractor, start, end time.Time, ) (iter.SampleIterator, error) { + ctx, cancel := context.WithCancel(ctx) + // __name__ is not something we filter by because it's a constant in loki // and only used for upstream compatibility; therefore remove it. // The same applies to the sharding label which is injected by the cortex storage code. matchers = removeMatchersByName(matchers, labels.MetricName, astmapper.ShardLabel) samplebatch := &sampleBatchIterator{ - matchers: matchers, - extractor: extractor, - metrics: metrics, - ctx: ctx, + extractor: extractor, + ctx: ctx, + cancel: cancel, + batchChunkIterator: newBatchChunkIterator(ctx, chunks, batchSize, logproto.FORWARD, start, end, metrics, matchers), } - batch := newBatchChunkIterator(ctx, chunks, batchSize, logproto.FORWARD, start, end, samplebatch.newChunksIterator) - // Important: since the batchChunkIterator is bound to the SampleBatchIterator, - // ensure embedded fields are present before it's started. - samplebatch.batchChunkIterator = batch - batch.Start() return samplebatch, nil } +func (it *sampleBatchIterator) Labels() string { + return it.curr.Labels() +} + +func (it *sampleBatchIterator) Error() error { + if it.err != nil { + return it.err + } + if it.curr != nil { + return it.curr.Error() + } + return nil +} + +func (it *sampleBatchIterator) Close() error { + it.cancel() + if it.curr != nil { + return it.curr.Close() + } + return nil +} + func (it *sampleBatchIterator) Sample() logproto.Sample { - return it.curr.(iter.SampleIterator).Sample() + return it.curr.Sample() } -// newChunksIterator creates an iterator over a set of lazychunks. -func (it *sampleBatchIterator) newChunksIterator(chunks []*LazyChunk, from, through time.Time, nextChunk *LazyChunk) (genericIterator, error) { - chksBySeries, err := fetchChunkBySeries(it.ctx, it.metrics, chunks, it.matchers) - if err != nil { - return nil, err +func (it *sampleBatchIterator) Next() bool { + // for loop to avoid recursion + for { + if it.curr != nil && it.curr.Next() { + return true + } + // close previous iterator + if it.curr != nil { + it.err = it.curr.Close() + } + next := it.batchChunkIterator.Next() + if next == nil { + return false + } + if next.err != nil { + it.err = next.err + return false + } + var err error + it.curr, err = it.newChunksIterator(next) + if err != nil { + it.err = err + return false + } } - iters, err := it.buildIterators(chksBySeries, from, through, nextChunk) +} + +// newChunksIterator creates an iterator over a set of lazychunks. +func (it *sampleBatchIterator) newChunksIterator(b *chunkBatch) (iter.SampleIterator, error) { + + iters, err := it.buildIterators(b.chunksBySeries, b.from, b.through, b.nextChunk) if err != nil { return nil, err } diff --git a/pkg/storage/batch_test.go b/pkg/storage/batch_test.go index 8b3c39b2db4b4..b99cb58568fe5 100644 --- a/pkg/storage/batch_test.go +++ b/pkg/storage/batch_test.go @@ -41,27 +41,16 @@ func Test_batchIterSafeStart(t *testing.T) { newLazyChunk(stream), } - var ok bool - - batch := newBatchChunkIterator(context.Background(), chks, 1, logproto.FORWARD, from, from.Add(4*time.Millisecond), func(chunks []*LazyChunk, from, through time.Time, nextChunk *LazyChunk) (genericIterator, error) { - if !ok { - panic("unexpected") - } - - // we don't care about the actual data for this test, just give it an iterator. - return iter.NewStreamIterator(stream), nil - }) + batch := newBatchChunkIterator(context.Background(), chks, 1, logproto.FORWARD, from, from.Add(4*time.Millisecond), NilMetrics, []*labels.Matcher{}) // if it was started already, we should see a panic before this time.Sleep(time.Millisecond) - ok = true // ensure idempotency batch.Start() batch.Start() - ok = batch.Next() - require.Equal(t, true, ok) + require.NotNil(t, batch.Next()) } diff --git a/pkg/storage/lazy_chunk.go b/pkg/storage/lazy_chunk.go index 67cc91b02c4b5..006574302c4c9 100644 --- a/pkg/storage/lazy_chunk.go +++ b/pkg/storage/lazy_chunk.go @@ -22,8 +22,8 @@ type LazyChunk struct { // cache of overlapping block. // We use the offset of the block as key since it's unique per chunk. - overlappingBlocks map[int]*cachedIterator - overlappingSampleBlocks map[int]*cachedSampleIterator + overlappingBlocks map[int]iter.CacheEntryIterator + overlappingSampleBlocks map[int]iter.CacheSampleIterator } // Iterator returns an entry iterator. @@ -52,18 +52,17 @@ func (c *LazyChunk) Iterator( for _, b := range blocks { // if we have already processed and cache block let's use it. if cache, ok := c.overlappingBlocks[b.Offset()]; ok { - clone := *cache - clone.reset() - its = append(its, &clone) + cache.Reset() + its = append(its, cache) continue } // if the block is overlapping cache it with the next chunk boundaries. if nextChunk != nil && IsBlockOverlapping(b, nextChunk, direction) { // todo(cyriltovena) we can avoid to drop the metric name for each chunks since many chunks have the same metric/labelset. - it := newCachedIterator(b.Iterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), pipeline), b.Entries()) + it := iter.NewCachedIterator(b.Iterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), pipeline), b.Entries()) its = append(its, it) if c.overlappingBlocks == nil { - c.overlappingBlocks = make(map[int]*cachedIterator) + c.overlappingBlocks = make(map[int]iter.CacheEntryIterator) } c.overlappingBlocks[b.Offset()] = it continue @@ -114,18 +113,17 @@ func (c *LazyChunk) SampleIterator( for _, b := range blocks { // if we have already processed and cache block let's use it. if cache, ok := c.overlappingSampleBlocks[b.Offset()]; ok { - clone := *cache - clone.reset() - its = append(its, &clone) + cache.Reset() + its = append(its, cache) continue } // if the block is overlapping cache it with the next chunk boundaries. if nextChunk != nil && IsBlockOverlapping(b, nextChunk, logproto.FORWARD) { // todo(cyriltovena) we can avoid to drop the metric name for each chunks since many chunks have the same metric/labelset. - it := newCachedSampleIterator(b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), extractor), b.Entries()) + it := iter.NewCachedSampleIterator(b.SampleIterator(ctx, dropLabels(c.Chunk.Metric, labels.MetricName), extractor), b.Entries()) its = append(its, it) if c.overlappingSampleBlocks == nil { - c.overlappingSampleBlocks = make(map[int]*cachedSampleIterator) + c.overlappingSampleBlocks = make(map[int]iter.CacheSampleIterator) } c.overlappingSampleBlocks[b.Offset()] = it continue