Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make max search limit configurable #1044

Merged
merged 3 commits into from
Oct 15, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
* [ENHANCEMENT] **BREAKING CHANGE** Support partial results from failed block queries [#1007](https://github.com/grafana/tempo/pull/1007) (@mapno)
Querier [`GET /querier/api/traces/<traceid>`](https://grafana.com/docs/tempo/latest/api_docs/#query) response's body has been modified
to return `tempopb.TraceByIDResponse` instead of simply `tempopb.Trace`. This will cause a disruption of the read path during rollout of the change.
* [ENHANCEMENT] Add `search_default_limit` to querier config. [#1022](https://github.com/grafana/tempo/pull/1022) (@kvrhdn)
* [ENHANCEMENT] Add `search_default_limit` and `search_max_result_limit` to querier config. [#1022](https://github.com/grafana/tempo/pull/1022) [#1044](https://github.com/grafana/tempo/pull/1044) (@kvrhdn)
* [ENHANCEMENT] Add new metric `tempo_distributor_push_duration_seconds` [#1027](https://github.com/grafana/tempo/pull/1027) (@zalegrala)
* [BUGFIX] Update port spec for GCS docker-compose example [#869](https://github.com/grafana/tempo/pull/869) (@zalegrala)
* [BUGFIX] Fix "magic number" errors and other block mishandling when an ingester forcefully shuts down [#937](https://github.com/grafana/tempo/issues/937) (@mdisibio)
Expand Down
4 changes: 4 additions & 0 deletions docs/tempo/website/configuration/_index.md
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,10 @@ querier:
# Limit used for search requests if none is set by the caller
[search_default_result_limit: <int> | default = 20]

# The maximum limit allowed for search requests, higher limits will be truncated
# Default is 0, which does not apply a limit
[search_max_result_limit: <int> | default = 0]

# config of the worker that connects to the query frontend
frontend_worker:

Expand Down
2 changes: 2 additions & 0 deletions modules/querier/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ type Config struct {
TraceLookupQueryTimeout time.Duration `yaml:"query_timeout"`
SearchQueryTimeout time.Duration `yaml:"search_query_timeout"`
SearchDefaultResultLimit uint32 `yaml:"search_default_result_limit"`
SearchMaxResultLimit uint32 `yaml:"search_max_result_limit"`
ExtraQueryDelay time.Duration `yaml:"extra_query_delay,omitempty"`
MaxConcurrentQueries int `yaml:"max_concurrent_queries"`
Worker cortex_worker.Config `yaml:"frontend_worker"`
Expand All @@ -24,6 +25,7 @@ func (cfg *Config) RegisterFlagsAndApplyDefaults(prefix string, f *flag.FlagSet)
cfg.TraceLookupQueryTimeout = 10 * time.Second
cfg.SearchQueryTimeout = 30 * time.Second
cfg.SearchDefaultResultLimit = 20
cfg.SearchMaxResultLimit = 0
cfg.ExtraQueryDelay = 0
cfg.MaxConcurrentQueries = 5
cfg.Worker = cortex_worker.Config{
Expand Down
50 changes: 4 additions & 46 deletions modules/querier/http.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (
"encoding/hex"
"fmt"
"net/http"
"strconv"
"time"

"github.com/golang/protobuf/jsonpb"
Expand All @@ -28,10 +27,6 @@ const (
QueryModeIngesters = "ingesters"
QueryModeBlocks = "blocks"
QueryModeAll = "all"

urlParamMinDuration = "minDuration"
urlParamMaxDuration = "maxDuration"
urlParamLimit = "limit"
)

// TraceByIDHandler is a http.HandlerFunc to retrieve traces
Expand Down Expand Up @@ -156,47 +151,10 @@ func (q *Querier) SearchHandler(w http.ResponseWriter, r *http.Request) {
span, ctx := opentracing.StartSpanFromContext(ctx, "Querier.SearchHandler")
defer span.Finish()

req := &tempopb.SearchRequest{
Tags: map[string]string{},
Limit: q.cfg.SearchDefaultResultLimit,
}

for k, v := range r.URL.Query() {
// Skip known values
if k == urlParamMinDuration || k == urlParamMaxDuration || k == urlParamLimit {
continue
}

if len(v) > 0 && v[0] != "" {
req.Tags[k] = v[0]
}
}

if s := r.URL.Query().Get(urlParamMinDuration); s != "" {
dur, err := time.ParseDuration(s)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
}
req.MinDurationMs = uint32(dur.Milliseconds())
}

if s := r.URL.Query().Get(urlParamMaxDuration); s != "" {
dur, err := time.ParseDuration(s)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
}
req.MaxDurationMs = uint32(dur.Milliseconds())
}

if s := r.URL.Query().Get(urlParamLimit); s != "" {
limit, err := strconv.Atoi(s)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
if limit > 0 {
req.Limit = uint32(limit)
}
req, err := q.parseSearchRequest(r)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}

resp, err := q.Search(ctx, req)
Expand Down
67 changes: 67 additions & 0 deletions modules/querier/querier_search.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
package querier

import (
"errors"
"net/http"
"strconv"
"time"

"github.com/grafana/tempo/pkg/tempopb"
)

const (
urlParamMinDuration = "minDuration"
urlParamMaxDuration = "maxDuration"
urlParamLimit = "limit"
)

func (q *Querier) parseSearchRequest(r *http.Request) (*tempopb.SearchRequest, error) {
req := &tempopb.SearchRequest{
Tags: map[string]string{},
Limit: q.cfg.SearchDefaultResultLimit,
}

for k, v := range r.URL.Query() {
// Skip reserved keywords
if k == urlParamMinDuration || k == urlParamMaxDuration || k == urlParamLimit {
continue
}

if len(v) > 0 && v[0] != "" {
req.Tags[k] = v[0]
}
}

if s := r.URL.Query().Get(urlParamMinDuration); s != "" {
dur, err := time.ParseDuration(s)
if err != nil {
return nil, err
}
req.MinDurationMs = uint32(dur.Milliseconds())
}

if s := r.URL.Query().Get(urlParamMaxDuration); s != "" {
dur, err := time.ParseDuration(s)
if err != nil {
return nil, err
}
req.MaxDurationMs = uint32(dur.Milliseconds())
}

if s := r.URL.Query().Get(urlParamLimit); s != "" {
limit, err := strconv.Atoi(s)
if err != nil {
return nil, err
}
if limit <= 0 {
return nil, errors.New("limit must be a positive number")
}
req.Limit = uint32(limit)
}

if q.cfg.SearchMaxResultLimit != 0 && req.Limit > q.cfg.SearchMaxResultLimit {
req.Limit = q.cfg.SearchMaxResultLimit
}

return req, nil
}
121 changes: 121 additions & 0 deletions modules/querier/querier_search_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
package querier

import (
"net/http/httptest"
"testing"

"github.com/grafana/tempo/pkg/tempopb"
"github.com/stretchr/testify/assert"
)

func TestQuerierParseSearchRequest(t *testing.T) {
q := Querier{
cfg: Config{
SearchDefaultResultLimit: 20,
SearchMaxResultLimit: 100,
},
}

tests := []struct {
name string
urlQuery string
err string
expected *tempopb.SearchRequest
}{
{
name: "Empty url query",
expected: &tempopb.SearchRequest{
Tags: map[string]string{},
Limit: q.cfg.SearchDefaultResultLimit,
},
},
{
name: "With limit set",
urlQuery: "limit=10",
expected: &tempopb.SearchRequest{
Tags: map[string]string{},
Limit: 10,
},
},
{
name: "With limit exceeding max",
urlQuery: "limit=120",
expected: &tempopb.SearchRequest{
Tags: map[string]string{},
Limit: q.cfg.SearchMaxResultLimit,
},
},
{
name: "With zero limit",
urlQuery: "limit=0",
err: "limit must be a positive number",
},
{
name: "With negative limit",
urlQuery: "limit=-5",
yvrhdn marked this conversation as resolved.
Show resolved Hide resolved
err: "limit must be a positive number",
},
{
name: "With non-numeric limit",
urlQuery: "limit=five",
err: "strconv.Atoi: parsing \"five\": invalid syntax",
},
{
name: "With minDuration and maxDuration",
urlQuery: "minDuration=10s&maxDuration=20s",
expected: &tempopb.SearchRequest{
Tags: map[string]string{},
MinDurationMs: 10000,
MaxDurationMs: 20000,
Limit: q.cfg.SearchDefaultResultLimit,
},
},
// TODO should we fail this query?
{
name: "With minDuration greater than maxDuration",
urlQuery: "minDuration=20s&maxDuration=5s",
expected: &tempopb.SearchRequest{
Tags: map[string]string{},
MinDurationMs: 20000,
MaxDurationMs: 5000,
Limit: q.cfg.SearchDefaultResultLimit,
},
},
{
name: "With invalid minDuration",
urlQuery: "minDuration=10seconds",
err: "time: unknown unit \"seconds\" in duration \"10seconds\"",
},
{
name: "With invalid maxDuration",
urlQuery: "maxDuration=1msec",
err: "time: unknown unit \"msec\" in duration \"1msec\"",
},
{
name: "With tags and limit",
urlQuery: "service.name=foo.bar&limit=5&query=1%2B1%3D2",
expected: &tempopb.SearchRequest{
Tags: map[string]string{
"service.name": "foo.bar",
"query": "1+1=2",
},
Limit: 5,
},
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := httptest.NewRequest("GET", "http://tempo/api/search?"+tt.urlQuery, nil)

searchRequest, err := q.parseSearchRequest(r)

if tt.err != "" {
assert.EqualError(t, err, tt.err)
} else {
assert.NoError(t, err)
assert.Equal(t, tt.expected, searchRequest)
}
})
}
}