Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace validation error with error in query limiter #4240

Merged
merged 2 commits into from
Jun 2, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions integration/querier_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -962,10 +962,9 @@ func TestQueryLimitsWithBlocksStorageRunningInMicroServices(t *testing.T) {
res, err = c.Push(series4)
require.NoError(t, err)
require.Equal(t, 200, res.StatusCode)

_, err = c.QueryRange("{__name__=~\"series_.+\"}", series1Timestamp, series4Timestamp.Add(1*time.Hour), blockRangePeriod)
require.Error(t, err)
assert.Contains(t, err.Error(), "max number of series limit")
assert.Contains(t, err.Error(), "500")
Copy link
Contributor Author

@treid314 treid314 Jun 1, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For some reason after this change, err.Error() is returning encoded strings rather than plain strings, so the assert contains no longer works. Instead of checking the response string, I checked for 500 like we do in some other tests. Is there a better way to do this?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is not correct. We do not expect a 500 error if the limit is reached, but a 4xx error.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh :( You're right. My bad for overlooking that.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm investigating it.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This PR fixes it:
#4251

}

func TestHashCollisionHandling(t *testing.T) {
Expand Down
4 changes: 2 additions & 2 deletions pkg/distributor/query.go
Original file line number Diff line number Diff line change
Expand Up @@ -235,12 +235,12 @@ func (d *Distributor) queryIngesterStream(ctx context.Context, userID string, re

for _, series := range resp.Chunkseries {
if limitErr := queryLimiter.AddSeries(series.Labels); limitErr != nil {
return nil, limitErr
return nil, validation.LimitError(limitErr.Error())
}
}

if chunkBytesLimitErr := queryLimiter.AddChunkBytes(resp.ChunksSize()); chunkBytesLimitErr != nil {
return nil, chunkBytesLimitErr
return nil, validation.LimitError(chunkBytesLimitErr.Error())
}

for _, series := range resp.Timeseries {
Expand Down
4 changes: 2 additions & 2 deletions pkg/querier/blocks_store_queryable.go
Original file line number Diff line number Diff line change
Expand Up @@ -616,7 +616,7 @@ func (q *blocksStoreQuerier) fetchSeriesFromStores(
// Add series fingerprint to query limiter; will return error if we are over the limit
limitErr := queryLimiter.AddSeries(cortexpb.FromLabelsToLabelAdapters(s.PromLabels()))
if limitErr != nil {
return limitErr
return validation.LimitError(limitErr.Error())
}

// Ensure the max number of chunks limit hasn't been reached (max == 0 means disabled).
Expand All @@ -631,7 +631,7 @@ func (q *blocksStoreQuerier) fetchSeriesFromStores(
chunksSize += c.Size()
}
if chunkBytesLimitErr := queryLimiter.AddChunkBytes(chunksSize); chunkBytesLimitErr != nil {
return chunkBytesLimitErr
return validation.LimitError(chunkBytesLimitErr.Error())
}
}

Expand Down
2 changes: 1 addition & 1 deletion pkg/querier/blocks_store_queryable_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,7 @@ func TestBlocksStoreQuerier_Select(t *testing.T) {
},
limits: &blocksStoreLimitsMock{},
queryLimiter: limiter.NewQueryLimiter(1, 0),
expectedErr: validation.LimitError(fmt.Sprintf("The query hit the max number of series limit (limit: %d)", 1)),
expectedErr: validation.LimitError(fmt.Sprintf(limiter.ErrMaxSeriesHit, 1)),
},
"max chunk bytes per query limit hit while fetching chunks": {
finderResult: bucketindex.Blocks{
Expand Down
9 changes: 4 additions & 5 deletions pkg/util/limiter/query_limiter.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,14 @@ import (

"github.com/cortexproject/cortex/pkg/cortexpb"
"github.com/cortexproject/cortex/pkg/ingester/client"
"github.com/cortexproject/cortex/pkg/util/validation"
)

type queryLimiterCtxKey struct{}

var (
ctxKey = &queryLimiterCtxKey{}
errMaxSeriesHit = "The query hit the max number of series limit (limit: %d)"
ErrMaxChunkBytesHit = "The query hit the aggregated chunks size limit (limit: %d bytes)"
ErrMaxSeriesHit = "the query hit the max number of series limit (limit: %d series)"
ErrMaxChunkBytesHit = "the query hit the aggregated chunks size limit (limit: %d bytes)"
)

type QueryLimiter struct {
Expand Down Expand Up @@ -72,7 +71,7 @@ func (ql *QueryLimiter) AddSeries(seriesLabels []cortexpb.LabelAdapter) error {
ql.uniqueSeries[fingerprint] = struct{}{}
if len(ql.uniqueSeries) > ql.maxSeriesPerQuery {
// Format error with max limit
return validation.LimitError(fmt.Sprintf(errMaxSeriesHit, ql.maxSeriesPerQuery))
return fmt.Errorf(ErrMaxSeriesHit, ql.maxSeriesPerQuery)
}
return nil
}
Expand All @@ -90,7 +89,7 @@ func (ql *QueryLimiter) AddChunkBytes(chunkSizeInBytes int) error {
return nil
}
if ql.chunkBytesCount.Add(int64(chunkSizeInBytes)) > int64(ql.maxChunkBytesPerQuery) {
return validation.LimitError(fmt.Sprintf(ErrMaxChunkBytesHit, ql.maxChunkBytesPerQuery))
return fmt.Errorf(ErrMaxChunkBytesHit, ql.maxChunkBytesPerQuery)
}
return nil
}