-
Notifications
You must be signed in to change notification settings - Fork 107
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[ISSUE-520] Pool gzip compressor and buffer used in gzip writer (#521)
* add gzip.go and replace with pooled gzip compressor & buffer Signed-off-by: Kazuma (Pakio) Arimura <k.arimura96@gmail.com> * add test case for gzip.go Signed-off-by: Kazuma (Pakio) Arimura <k.arimura96@gmail.com> * update CHANGELOG.md Signed-off-by: Kazuma (Pakio) Arimura <k.arimura96@gmail.com> * fix lint Signed-off-by: Kazuma (Pakio) Arimura <k.arimura96@gmail.com> * update test package name Signed-off-by: Kazuma (Pakio) Arimura <k.arimura96@gmail.com> * rename test file Signed-off-by: Kazuma (Pakio) Arimura <k.arimura96@gmail.com> --------- Signed-off-by: Kazuma (Pakio) Arimura <k.arimura96@gmail.com>
- Loading branch information
Showing
4 changed files
with
196 additions
and
10 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
// The OpenSearch Contributors require contributions made to | ||
// this file be licensed under the Apache-2.0 license or a | ||
// compatible open source license. | ||
|
||
package opensearchtransport | ||
|
||
import ( | ||
"bytes" | ||
"compress/gzip" | ||
"fmt" | ||
"io" | ||
"sync" | ||
) | ||
|
||
type gzipCompressor struct { | ||
gzipWriterPool *sync.Pool | ||
bufferPool *sync.Pool | ||
} | ||
|
||
// newGzipCompressor returns a new gzipCompressor that uses a sync.Pool to reuse gzip.Writers. | ||
func newGzipCompressor() *gzipCompressor { | ||
gzipWriterPool := sync.Pool{ | ||
New: func() any { | ||
return gzip.NewWriter(io.Discard) | ||
}, | ||
} | ||
|
||
bufferPool := sync.Pool{ | ||
New: func() any { | ||
return new(bytes.Buffer) | ||
}, | ||
} | ||
|
||
return &gzipCompressor{ | ||
gzipWriterPool: &gzipWriterPool, | ||
bufferPool: &bufferPool, | ||
} | ||
} | ||
|
||
func (pg *gzipCompressor) compress(rc io.ReadCloser) (*bytes.Buffer, error) { | ||
writer := pg.gzipWriterPool.Get().(*gzip.Writer) | ||
defer pg.gzipWriterPool.Put(writer) | ||
|
||
buf := pg.bufferPool.Get().(*bytes.Buffer) | ||
buf.Reset() | ||
writer.Reset(buf) | ||
|
||
if _, err := io.Copy(writer, rc); err != nil { | ||
return nil, fmt.Errorf("failed to compress request body: %w", err) | ||
} | ||
if err := writer.Close(); err != nil { | ||
return nil, fmt.Errorf("failed to compress request body (during close): %w", err) | ||
} | ||
return buf, nil | ||
} | ||
|
||
func (pg *gzipCompressor) collectBuffer(buf *bytes.Buffer) { | ||
pg.bufferPool.Put(buf) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,124 @@ | ||
// SPDX-License-Identifier: Apache-2.0 | ||
// | ||
// The OpenSearch Contributors require contributions made to | ||
// this file be licensed under the Apache-2.0 license or a | ||
// compatible open source license. | ||
|
||
//go:build !integration | ||
|
||
package opensearchtransport | ||
|
||
import ( | ||
"compress/gzip" | ||
"io" | ||
"math/rand" | ||
"strings" | ||
"testing" | ||
) | ||
|
||
func TestCompress(t *testing.T) { | ||
t.Run("initialize & compress", func(t *testing.T) { | ||
gzipCompressor := newGzipCompressor() | ||
body := generateRandomString() | ||
rc := io.NopCloser(strings.NewReader(body)) | ||
|
||
buf, err := gzipCompressor.compress(rc) | ||
defer gzipCompressor.collectBuffer(buf) | ||
if err != nil { | ||
t.Fatalf("unexpected error: %v", err) | ||
} | ||
|
||
// unzip | ||
r, _ := gzip.NewReader(buf) | ||
s, _ := io.ReadAll(r) | ||
if string(s) != body { | ||
t.Fatalf("expected body to be the same after compressing and decompressing: expected %s, got %s", body, string(s)) | ||
} | ||
}) | ||
|
||
t.Run("gzip multiple times", func(t *testing.T) { | ||
gzipCompressor := newGzipCompressor() | ||
for i := 0; i < 5; i++ { | ||
body := generateRandomString() | ||
rc := io.NopCloser(strings.NewReader(body)) | ||
|
||
buf, err := gzipCompressor.compress(rc) | ||
defer gzipCompressor.collectBuffer(buf) | ||
if err != nil { | ||
t.Fatalf("unexpected error: %v", err) | ||
} | ||
|
||
// unzip | ||
r, _ := gzip.NewReader(buf) | ||
s, _ := io.ReadAll(r) | ||
if string(s) != body { | ||
t.Fatal("expected body to be the same after compressing and decompressing") | ||
} | ||
} | ||
}) | ||
|
||
t.Run("ensure gzipped data is smaller and different from original", func(t *testing.T) { | ||
gzipCompressor := newGzipCompressor() | ||
body := generateRandomString() | ||
rc := io.NopCloser(strings.NewReader(body)) | ||
|
||
buf, err := gzipCompressor.compress(rc) | ||
defer gzipCompressor.collectBuffer(buf) | ||
if err != nil { | ||
t.Fatalf("unexpected error: %v", err) | ||
} | ||
|
||
if len(buf.Bytes()) <= len(body) { | ||
t.Fatalf("expected compressed data to be smaller than original: expected %d, got %d", len(body), len(buf.Bytes())) | ||
} | ||
|
||
if body == buf.String() { | ||
t.Fatalf("expected compressed data to be different from original") | ||
} | ||
}) | ||
|
||
t.Run("compressing data twice", func(t *testing.T) { | ||
gzipCompressor := newGzipCompressor() | ||
body := generateRandomString() | ||
rc := io.NopCloser(strings.NewReader(body)) | ||
|
||
buf, err := gzipCompressor.compress(rc) | ||
defer gzipCompressor.collectBuffer(buf) | ||
if err != nil { | ||
t.Fatalf("unexpected error: %v", err) | ||
} | ||
|
||
rc = io.NopCloser(buf) | ||
buf2, err := gzipCompressor.compress(rc) | ||
defer gzipCompressor.collectBuffer(buf2) | ||
if err != nil { | ||
t.Fatalf("unexpected error: %v", err) | ||
} | ||
|
||
// unzip | ||
r, _ := gzip.NewReader(buf2) | ||
r, _ = gzip.NewReader(r) | ||
s, _ := io.ReadAll(r) | ||
if string(s) != body { | ||
t.Fatal("expected body to be the same after compressing and decompressing twice") | ||
} | ||
}) | ||
} | ||
|
||
func generateRandomString() string { | ||
length := rand.Intn(100) + 1 | ||
|
||
// Define the characters that can be used in the random string | ||
charset := "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" | ||
|
||
// Create a byte slice with the specified length | ||
randomBytes := make([]byte, length) | ||
|
||
// Generate a random character from the charset for each byte in the slice | ||
for i := 0; i < length; i++ { | ||
randomBytes[i] = charset[rand.Intn(len(charset))] | ||
} | ||
|
||
// Convert the byte slice to a string and return it | ||
return string(randomBytes) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters