-
Notifications
You must be signed in to change notification settings - Fork 1.4k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
server+util: Limit max request sizes, prealloc request buffers (#6868)
This commit introduces a few major changes: - (Breaking change) Limits now exist for maximum request body sizes. - Buffers are preallocated for reading request bodies. - Buffers are preallocated for decompressing request bodies. - Gzip decoder instances are reused in a `sync.Pool` across requests. The effect on garbage collection is dramatically fewer GC pauses, giving a roughly 9% RPS improvement in load tests with gzipped request bodies. For larger request sizes, the number of GC pauses is dramatically reduced, although the peak pause time may increase by a few percent. Implementation notes: - The DecodingLimits handler enforces the max request body size both through a Content-Length check, and a MaxBytesReader wrapper around the payload. - The DecodingLimits handler passes the gzip payload size limit down using a context key. Signed-off-by: Philip Conrad <philipaconrad@gmail.com>
- Loading branch information
1 parent
0ca35e2
commit c5706ee
Showing
13 changed files
with
566 additions
and
39 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,102 @@ | ||
// Package decoding implements the configuration side of the upgraded gzip | ||
// decompression framework. The original work only enabled gzip decoding for | ||
// a few endpoints-- here we enable if for all of OPA. Additionally, we provide | ||
// some new defensive configuration options: max_length, and gzip.max_length. | ||
// These allow rejecting requests that indicate their contents are larger than | ||
// the size limits. | ||
// | ||
// The request handling pipeline now looks roughly like this: | ||
// | ||
// Request -> MaxBytesReader(Config.MaxLength) -> ir.CopyN(dest, req, Gzip.MaxLength) | ||
// | ||
// The intent behind this design is to improve how OPA handles large and/or | ||
// malicious requests, compressed or otherwise. The benefit of being a little | ||
// more strict in what we allow is that we can now use "riskier", but | ||
// dramatically more performant techniques, like preallocating content buffers | ||
// for gzipped data. This also should help OPAs in limited memory situations. | ||
package decoding | ||
|
||
import ( | ||
"fmt" | ||
|
||
"github.com/open-policy-agent/opa/util" | ||
) | ||
|
||
var ( | ||
defaultMaxRequestLength = int64(268435456) // 256 MB | ||
defaultGzipMaxContentLength = int64(536870912) // 512 MB | ||
) | ||
|
||
// Config represents the configuration for the Server.Decoding settings | ||
type Config struct { | ||
MaxLength *int64 `json:"max_length,omitempty"` // maximum request size that will be read, regardless of compression. | ||
Gzip *Gzip `json:"gzip,omitempty"` | ||
} | ||
|
||
// Gzip represents the configuration for the Server.Decoding.Gzip settings | ||
type Gzip struct { | ||
MaxLength *int64 `json:"max_length,omitempty"` // Max number of bytes allowed to be read from the decompressor. | ||
} | ||
|
||
// ConfigBuilder assists in the construction of the plugin configuration. | ||
type ConfigBuilder struct { | ||
raw []byte | ||
} | ||
|
||
// NewConfigBuilder returns a new ConfigBuilder to build and parse the server config | ||
func NewConfigBuilder() *ConfigBuilder { | ||
return &ConfigBuilder{} | ||
} | ||
|
||
// WithBytes sets the raw server config | ||
func (b *ConfigBuilder) WithBytes(config []byte) *ConfigBuilder { | ||
b.raw = config | ||
return b | ||
} | ||
|
||
// Parse returns a valid Config object with defaults injected. | ||
func (b *ConfigBuilder) Parse() (*Config, error) { | ||
if b.raw == nil { | ||
defaultConfig := &Config{ | ||
MaxLength: &defaultMaxRequestLength, | ||
Gzip: &Gzip{ | ||
MaxLength: &defaultGzipMaxContentLength, | ||
}, | ||
} | ||
return defaultConfig, nil | ||
} | ||
|
||
var result Config | ||
|
||
if err := util.Unmarshal(b.raw, &result); err != nil { | ||
return nil, err | ||
} | ||
|
||
return &result, result.validateAndInjectDefaults() | ||
} | ||
|
||
// validateAndInjectDefaults populates defaults if the fields are nil, then | ||
// validates the config values. | ||
func (c *Config) validateAndInjectDefaults() error { | ||
if c.MaxLength == nil { | ||
c.MaxLength = &defaultMaxRequestLength | ||
} | ||
|
||
if c.Gzip == nil { | ||
c.Gzip = &Gzip{ | ||
MaxLength: &defaultGzipMaxContentLength, | ||
} | ||
} | ||
if c.Gzip.MaxLength == nil { | ||
c.Gzip.MaxLength = &defaultGzipMaxContentLength | ||
} | ||
|
||
if *c.MaxLength <= 0 { | ||
return fmt.Errorf("invalid value for server.decoding.max_length field, should be a positive number") | ||
} | ||
if *c.Gzip.MaxLength <= 0 { | ||
return fmt.Errorf("invalid value for server.decoding.gzip.max_length field, should be a positive number") | ||
} | ||
|
||
return nil | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,108 @@ | ||
package decoding | ||
|
||
import ( | ||
"fmt" | ||
"testing" | ||
) | ||
|
||
func TestConfigValidation(t *testing.T) { | ||
tests := []struct { | ||
input string | ||
wantErr bool | ||
}{ | ||
{ | ||
input: `{}`, | ||
wantErr: false, | ||
}, | ||
{ | ||
input: `{"gzip": {"max_length": "not-a-number"}}`, | ||
wantErr: true, | ||
}, | ||
{ | ||
input: `{"gzip": {max_length": 42}}`, | ||
wantErr: false, | ||
}, | ||
{ | ||
input: `{"gzip":{"max_length": "42"}}`, | ||
wantErr: true, | ||
}, | ||
{ | ||
input: `{"gzip":{"max_length": 0}}`, | ||
wantErr: true, | ||
}, | ||
{ | ||
input: `{"gzip":{"max_length": -10}}`, | ||
wantErr: true, | ||
}, | ||
{ | ||
input: `{"gzip":{"random_key": 0}}`, | ||
wantErr: false, | ||
}, | ||
{ | ||
input: `{"gzip": {"max_length": -10}}`, | ||
wantErr: true, | ||
}, | ||
{ | ||
input: `{"max_length": "not-a-number"}`, | ||
wantErr: true, | ||
}, | ||
{ | ||
input: `{"gzip":{}}`, | ||
wantErr: false, | ||
}, | ||
{ | ||
input: `{"max_length": "not-a-number", "gzip":{}}`, | ||
wantErr: true, | ||
}, | ||
{ | ||
input: `{"max_length": 42, "gzip":{"max_length": 42}}`, | ||
wantErr: false, | ||
}, | ||
} | ||
|
||
for i, test := range tests { | ||
t.Run(fmt.Sprintf("TestConfigValidation_case_%d", i), func(t *testing.T) { | ||
_, err := NewConfigBuilder().WithBytes([]byte(test.input)).Parse() | ||
if err != nil && !test.wantErr { | ||
t.Fatalf("Unexpected error: %s", err.Error()) | ||
} | ||
if err == nil && test.wantErr { | ||
t.Fail() | ||
} | ||
}) | ||
} | ||
} | ||
|
||
func TestConfigValue(t *testing.T) { | ||
tests := []struct { | ||
input string | ||
maxLengthExpectedValue int64 | ||
gzipMaxLengthExpectedValue int64 | ||
}{ | ||
{ | ||
input: `{}`, | ||
maxLengthExpectedValue: 268435456, | ||
gzipMaxLengthExpectedValue: 536870912, | ||
}, | ||
{ | ||
input: `{"max_length": 5, "gzip":{"max_length": 42}}`, | ||
maxLengthExpectedValue: 5, | ||
gzipMaxLengthExpectedValue: 42, | ||
}, | ||
} | ||
|
||
for i, test := range tests { | ||
t.Run(fmt.Sprintf("TestConfigValue_case_%d", i), func(t *testing.T) { | ||
config, err := NewConfigBuilder().WithBytes([]byte(test.input)).Parse() | ||
if err != nil { | ||
t.Fatalf("Error building configuration: %s", err.Error()) | ||
} | ||
if *config.MaxLength != test.maxLengthExpectedValue { | ||
t.Fatalf("Unexpected config value for max_length (exp/actual): %d, %d", test.maxLengthExpectedValue, *config.MaxLength) | ||
} | ||
if *config.Gzip.MaxLength != test.gzipMaxLengthExpectedValue { | ||
t.Fatalf("Unexpected config value for gzip.max_length (exp/actual): %d, %d", test.gzipMaxLengthExpectedValue, *config.Gzip.MaxLength) | ||
} | ||
}) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
package handlers | ||
|
||
import ( | ||
"net/http" | ||
"strings" | ||
|
||
"github.com/open-policy-agent/opa/server/types" | ||
"github.com/open-policy-agent/opa/server/writer" | ||
util_decoding "github.com/open-policy-agent/opa/util/decoding" | ||
) | ||
|
||
// This handler provides hard limits on the size of the request body, for both | ||
// the raw body content, and also for the decompressed size when gzip | ||
// compression is used. | ||
// | ||
// The Content-Length restriction happens here in the handler, but the | ||
// decompressed size limit is enforced later, in `util.ReadMaybeCompressedBody`. | ||
// The handler passes the gzip size limits down to that function through the | ||
// request context whenever gzip encoding is present. | ||
func DecodingLimitsHandler(handler http.Handler, maxLength, gzipMaxLength int64) http.Handler { | ||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { | ||
// Reject too-large requests before doing any further processing. | ||
// Note(philipc): This likely does nothing in the case of "chunked" | ||
// requests, since those should report a ContentLength of -1. | ||
if r.ContentLength > maxLength { | ||
writer.Error(w, http.StatusBadRequest, types.NewErrorV1(types.CodeInvalidParameter, types.MsgDecodingLimitError)) | ||
return | ||
} | ||
// Pass server.decoding.gzip.max_length down, using the request context. | ||
if strings.Contains(r.Header.Get("Content-Encoding"), "gzip") { | ||
ctx := util_decoding.AddServerDecodingGzipMaxLen(r.Context(), gzipMaxLength) | ||
r = r.WithContext(ctx) | ||
} | ||
|
||
// Copied over from the net/http package; enforces max body read limits. | ||
r2 := *r | ||
r2.Body = http.MaxBytesReader(w, r.Body, maxLength) | ||
handler.ServeHTTP(w, &r2) | ||
}) | ||
} |
Oops, something went wrong.