Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add ability to compress request body #989

Merged
merged 23 commits into from
Apr 12, 2019
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
9efecb2
Add ability to gzip compress request body fix #988
mstoykov Apr 8, 2019
9e43517
Add Release note for #988
mstoykov Apr 8, 2019
d85f528
Add test for unsupported body compression
mstoykov Apr 8, 2019
3993938
Move closing of gzip in defer
mstoykov Apr 8, 2019
679b66d
Refactor body compression per PR comments
mstoykov Apr 10, 2019
8479b0c
Update the release notes for #988
mstoykov Apr 10, 2019
816ace7
WIP on not overwriting user set Content-(Length|Encoding) headers
mstoykov Apr 10, 2019
af39174
Don't generate json/text marshalling for CompressionType
mstoykov Apr 11, 2019
afc72be
WIP refactoring
mstoykov Apr 11, 2019
df03bd3
WIP on pritnting warning when reseting already set headers
mstoykov Apr 11, 2019
a13c01e
Change flate to zlib for deflate support
mstoykov Apr 11, 2019
8561e39
Update release notes
mstoykov Apr 11, 2019
eae1eae
refactor the compression of body loggic to a function
mstoykov Apr 11, 2019
ab32def
Add more tests for handling rare/imposible body read problems
mstoykov Apr 11, 2019
a8388a0
Better error message when not overwriting headers and compressing the…
mstoykov Apr 11, 2019
cca9c55
Add test for bad/unknown compression algorithm
mstoykov Apr 11, 2019
5257fa0
Refactor the whole setting of body and content length to MakeRequest …
mstoykov Apr 11, 2019
a1195f6
Test that badCompressionType is bad
mstoykov Apr 11, 2019
4fd808c
switch to switch
mstoykov Apr 11, 2019
e4bca50
small type
mstoykov Apr 12, 2019
727a31c
Remove warning when Content-Length is set for now
mstoykov Apr 12, 2019
1f2604b
Use the new CompressionType for uncompression matching as well
mstoykov Apr 12, 2019
5ae8d5c
error strings should not be capitalized
mstoykov Apr 12, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 34 additions & 5 deletions js/modules/k6/http/request.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ import (
"net/textproto"
"net/url"
"reflect"
"strconv"
"strings"
"sync"
"time"
Expand Down Expand Up @@ -235,11 +236,6 @@ func (h *HTTP) parseRequest(
}
}

if result.Body != nil {
result.Req.Body = ioutil.NopCloser(result.Body)
result.Req.ContentLength = int64(result.Body.Len())
}

if userAgent := state.Options.UserAgent; userAgent.String != "" {
result.Req.Header.Set("User-Agent", userAgent.String)
}
Expand Down Expand Up @@ -310,6 +306,22 @@ func (h *HTTP) parseRequest(
case *HTTPCookieJar:
result.ActiveJar = v.jar
}
case "compression":
var algosString = strings.TrimSpace(params.Get(k).ToString().String())
na-- marked this conversation as resolved.
Show resolved Hide resolved
if algosString == "" {
continue
}
var algos = strings.Split(algosString, ",")
var err error
result.Compressions = make([]httpext.CompressionType, len(algos))
for index, algo := range algos {
algo = strings.TrimSpace(algo)
result.Compressions[index], err = httpext.CompressionTypeString(algo)
if err != nil {
return nil, fmt.Errorf("unknown compression algorithm %s, supported algorithms are %s",
algo, httpext.CompressionTypeValues())
}
}
case "redirects":
result.Redirects = null.IntFrom(params.Get(k).ToInteger())
case "tags":
Expand Down Expand Up @@ -340,6 +352,23 @@ func (h *HTTP) parseRequest(
}
}

if contentLength := result.Req.Header.Get("Content-Length"); contentLength != "" {
na-- marked this conversation as resolved.
Show resolved Hide resolved
length, err := strconv.Atoi(contentLength)
if err == nil {
result.Req.ContentLength = int64(length)
}
// TODO: maybe do something in the other case ... but no error
}
if result.Body != nil {
result.Req.Body = ioutil.NopCloser(result.Body)
if result.Req.Header.Get("Content-Length") == "" {
result.Req.ContentLength = int64(result.Body.Len())
} else {
// TODO: print line number, maybe don't print this at all ?
state.Logger.Warningf("Content-Length is specifically set won't reset it based on body length")
}
}

if result.ActiveJar != nil {
httpext.SetRequestCookies(result.Req, result.ActiveJar, result.Cookies)
}
Expand Down
142 changes: 142 additions & 0 deletions js/modules/k6/http/request_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,11 @@ package http

import (
"bytes"
"compress/gzip"
"compress/zlib"
"context"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/cookiejar"
Expand Down Expand Up @@ -1212,6 +1215,145 @@ func TestSystemTags(t *testing.T) {
}
}

func TestRequestCompression(t *testing.T) {
t.Parallel()
tb, state, _, rt, _ := newRuntime(t)
defer tb.Cleanup()

// We don't expect any failed requests
state.Options.Throw = null.BoolFrom(true)

var text = `
Lorem ipsum dolor sit amet, consectetur adipiscing elit.
Maecenas sed pharetra sapien. Nunc laoreet molestie ante ac gravida.
Etiam interdum dui viverra posuere egestas. Pellentesque at dolor tristique,
mattis turpis eget, commodo purus. Nunc orci aliquam.`

var decompress = func(algo string, input io.Reader) io.Reader {
switch algo {
case "gzip":
w, err := gzip.NewReader(input)
if err != nil {
t.Fatal(err)
}
return w
case "deflate":
w, err := zlib.NewReader(input)
if err != nil {
t.Fatal(err)
}
return w
default:
t.Fatal("unknown algorithm " + algo)
}
return nil // unreachable
}

var (
expectedEncoding string
actualEncoding string
)
tb.Mux.HandleFunc("/compressed-text", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
require.Equal(t, expectedEncoding, r.Header.Get("Content-Encoding"))

expectedLength, err := strconv.Atoi(r.Header.Get("Content-Length"))
require.NoError(t, err)
var algos = strings.Split(actualEncoding, ", ")
var compressedBuf = new(bytes.Buffer)
n, err := io.Copy(compressedBuf, r.Body)
require.Equal(t, int(n), expectedLength)
require.NoError(t, err)
var prev io.Reader = compressedBuf

if expectedEncoding != "" {
for i := len(algos) - 1; i >= 0; i-- {
prev = decompress(algos[i], prev)
}
}

var buf bytes.Buffer
_, err = io.Copy(&buf, prev)
require.NoError(t, err)
require.Equal(t, text, buf.String())
}))

var testCases = []struct {
name string
compression string
expectedError string
}{
{compression: ""},
{compression: " "},
{compression: "gzip"},
{compression: "gzip, gzip"},
{compression: "gzip, gzip "},
{compression: "gzip,gzip"},
{compression: "gzip, gzip, gzip, gzip, gzip, gzip, gzip"},
{compression: "deflate"},
{compression: "deflate, gzip"},
{compression: "gzip,deflate, gzip"},
{
compression: "George",
expectedError: `unknown compression algorithm George`,
},
{
compression: "gzip, George",
expectedError: `unknown compression algorithm George`,
},
}
for _, testCase := range testCases {
testCase := testCase
t.Run(testCase.compression, func(t *testing.T) {
var algos = strings.Split(testCase.compression, ",")
for i, algo := range algos {
algos[i] = strings.TrimSpace(algo)
}
expectedEncoding = strings.Join(algos, ", ")
actualEncoding = expectedEncoding
_, err := common.RunString(rt, tb.Replacer.Replace(`
http.post("HTTPBIN_URL/compressed-text", `+"`"+text+"`"+`, {"compression": "`+testCase.compression+`"});
`))
if testCase.expectedError == "" {
require.NoError(t, err)
} else {
require.Error(t, err)
require.Contains(t, err.Error(), testCase.expectedError)
}

})
}

t.Run("custom set header", func(t *testing.T) {
expectedEncoding = "not, valid"
actualEncoding = "gzip, deflate"
t.Run("encoding", func(t *testing.T) {
_, err := common.RunString(rt, tb.Replacer.Replace(`
http.post("HTTPBIN_URL/compressed-text", `+"`"+text+"`"+`,
{"compression": "`+actualEncoding+`",
"headers": {"Content-Encoding": "`+expectedEncoding+`"}
}
);
`))
require.NoError(t, err)

})

t.Run("encoding and length", func(t *testing.T) {
_, err := common.RunString(rt, tb.Replacer.Replace(`
http.post("HTTPBIN_URL/compressed-text", `+"`"+text+"`"+`,
{"compression": "`+actualEncoding+`",
"headers": {"Content-Encoding": "`+expectedEncoding+`",
"Content-Length": "12"}
}
);
`))
require.Error(t, err)
// TODO: This probably shouldn't be like this
require.Contains(t, err.Error(), "http: ContentLength=12 with Body length 211")
})
})
}

func TestResponseTypes(t *testing.T) {
t.Parallel()
tb, state, _, rt, _ := newRuntime(t)
Expand Down
49 changes: 49 additions & 0 deletions lib/netext/httpext/compression_type_gen.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

74 changes: 74 additions & 0 deletions lib/netext/httpext/request.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,22 @@ func (u URL) GetURL() *url.URL {
return u.u
}

// CompressionType is used to specify what compression is to be used to compress the body of a
// request
// The conversion and validation methods are auto-generated with https://github.com/alvaroloes/enumer:
//nolint: lll
//go:generate enumer -type=CompressionType -transform=snake -trimprefix CompressionType -output compression_type_gen.go
type CompressionType uint

const (
// CompressionTypeGzip compresses through gzip
CompressionTypeGzip CompressionType = iota
// CompressionTypeDeflate compresses through flate
CompressionTypeDeflate
// TODO: add compress(lzw), brotli maybe bzip2 and others listed at
// https://en.wikipedia.org/wiki/HTTP_compression#Content-Encoding_tokens
)

// Request represent an http request
type Request struct {
Method string `json:"method"`
Expand All @@ -88,6 +104,7 @@ type ParsedHTTPRequest struct {
Auth string
Throw bool
ResponseType ResponseType
Compressions []CompressionType
Redirects null.Int
ActiveJar *cookiejar.Jar
Cookies map[string]*HTTPRequestCookie
Expand All @@ -103,6 +120,43 @@ func stdCookiesToHTTPRequestCookies(cookies []*http.Cookie) map[string][]*HTTPRe
return result
}

func compressBody(algos []CompressionType, body io.Reader) (io.Reader, int64, string, error) {
var contentEncoding string
var prevBuf = body
var buf *bytes.Buffer
for _, compressionType := range algos {
if buf != nil {
prevBuf = buf
}
buf = new(bytes.Buffer)

if contentEncoding != "" {
contentEncoding += ", "
}
contentEncoding += compressionType.String()
var w io.WriteCloser
switch compressionType {
case CompressionTypeGzip:
w = gzip.NewWriter(buf)
case CompressionTypeDeflate:
w = zlib.NewWriter(buf)
default:
return nil, 0, "", fmt.Errorf("unknown compressionType %s", compressionType)
}
// we don;t close in defer because zlib will write it's checksum again if it closes twice :(
na-- marked this conversation as resolved.
Show resolved Hide resolved
var _, err = io.Copy(w, prevBuf)
if err != nil {
_ = w.Close()
return nil, 0, "", err
}

if err = w.Close(); err != nil {
return nil, 0, "", err
}
}
return buf, int64(buf.Len()), contentEncoding, nil
}

// MakeRequest makes http request for tor the provided ParsedHTTPRequest
//TODO break this function up
//nolint: gocyclo
Expand All @@ -116,6 +170,26 @@ func MakeRequest(ctx context.Context, preq *ParsedHTTPRequest) (*Response, error
Headers: preq.Req.Header,
}
if preq.Body != nil {
if len(preq.Compressions) > 0 {
mstoykov marked this conversation as resolved.
Show resolved Hide resolved
compressedBody, length, contentEncoding, err := compressBody(preq.Compressions, preq.Req.Body)
if err != nil {
return nil, err
}
preq.Req.Body = ioutil.NopCloser(compressedBody)
if preq.Req.Header.Get("Content-Length") == "" {
preq.Req.ContentLength = length
} else {
na-- marked this conversation as resolved.
Show resolved Hide resolved
// TODO: print line
na-- marked this conversation as resolved.
Show resolved Hide resolved
state.Logger.Warning("Content-Length is specifically set - won't be reset due to compression being specified")
}
if preq.Req.Header.Get("Content-Encoding") == "" {
preq.Req.Header.Set("Content-Encoding", contentEncoding)
} else {
mstoykov marked this conversation as resolved.
Show resolved Hide resolved
// TODO: print line
state.Logger.Warning("Content-Encoding is specifically set - won't be reset due to compression being specified")
}
}
// TODO: maybe hide this behind of flag in order for this to not happen for big post/puts?
respReq.Body = preq.Body.String()
}

Expand Down
6 changes: 2 additions & 4 deletions release notes/upcoming.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,9 @@ TODO: Intro

## New Features!

### Category: Title (#533)
### HTTP: request body compression (#988)

Description of feature.

**Docs**: [Title](http://k6.readme.io/docs/TODO)
Now all http methods have an additional param called `compression` that will make k6 compress the body before sending it. It will also correctly set both `Content-Encoding` and `Content-Length`, unless they were manually set in the request `headers` by the user. The current supported algorithms are `deflate` and `gzip` and any combination of the two separated by a comma (`,`).

## Bugs fixed!

Expand Down