Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change benchmark default chunk size to use shareSize (512 bytes) #261

Merged
merged 2 commits into from
Aug 7, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions codec_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,13 @@ var (

func BenchmarkEncoding(b *testing.B) {
// generate some fake data
data := generateRandData(128)
data := generateRandData(128, shareSize)
for codecName, codec := range codecs {
// For some implementations we want to ensure the encoder for this data length
// is already cached and initialized. For this run with same sized arbitrary data.
_, _ = codec.Encode(generateRandData(128))
_, _ = codec.Encode(generateRandData(128, shareSize))
b.Run(
fmt.Sprintf("%s 128 shares", codecName),
fmt.Sprintf("%s 128 shares %d", codecName, shareSize),
func(b *testing.B) {
for n := 0; n < b.N; n++ {
encodedData, err := codec.Encode(data)
Expand All @@ -34,10 +34,10 @@ func BenchmarkEncoding(b *testing.B) {
}
}

func generateRandData(count int) [][]byte {
func generateRandData(count int, chunkSize int) [][]byte {
out := make([][]byte, count)
for i := 0; i < count; i++ {
randData := make([]byte, count)
randData := make([]byte, chunkSize)
_, err := cryptorand.Read(randData)
if err != nil {
panic(err)
Expand All @@ -52,11 +52,11 @@ func BenchmarkDecoding(b *testing.B) {
for codecName, codec := range codecs {
// For some implementations we want to ensure the encoder for this data length
// is already cached and initialized. For this run with same sized arbitrary data.
_, _ = codec.Decode(generateMissingData(128, codec))
_, _ = codec.Decode(generateMissingData(128, shareSize, codec))

data := generateMissingData(128, codec)
data := generateMissingData(128, shareSize, codec)
b.Run(
fmt.Sprintf("%s 128 shares", codecName),
fmt.Sprintf("%s 128 shares %d", codecName, shareSize),
func(b *testing.B) {
for n := 0; n < b.N; n++ {
decodedData, err := codec.Decode(data)
Expand All @@ -70,8 +70,8 @@ func BenchmarkDecoding(b *testing.B) {
}
}

func generateMissingData(count int, codec Codec) [][]byte {
randData := generateRandData(count)
func generateMissingData(count int, chunkSize int, codec Codec) [][]byte {
randData := generateRandData(count, chunkSize)
encoded, err := codec.Encode(randData)
if err != nil {
panic(err)
Expand Down
3 changes: 1 addition & 2 deletions datasquare_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -403,8 +403,7 @@ func Test_setColSlice(t *testing.T) {

func BenchmarkEDSRoots(b *testing.B) {
for i := 32; i < 513; i *= 2 {
chunkSize := uint(256)
square, err := newDataSquare(genRandDS(i*2, int(chunkSize)), NewDefaultTree, chunkSize)
square, err := newDataSquare(genRandDS(i*2, int(shareSize)), NewDefaultTree, shareSize)
if err != nil {
b.Errorf("Failure to create square of size %d: %s", i, err)
}
Expand Down
6 changes: 2 additions & 4 deletions extendeddatasquare_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -278,15 +278,14 @@ var dump *ExtendedDataSquare
// BenchmarkExtension benchmarks extending datasquares sizes 4-128 using all
// supported codecs (encoding only)
func BenchmarkExtensionEncoding(b *testing.B) {
chunkSize := 256
for i := 4; i < 513; i *= 2 {
for codecName, codec := range codecs {
if codec.MaxChunks() < i*i {
// Only test codecs that support this many chunks
continue
}

square := genRandDS(i, chunkSize)
square := genRandDS(i, shareSize)
b.Run(
fmt.Sprintf("%s %dx%dx%d ODS", codecName, i, i, len(square[0])),
func(b *testing.B) {
Expand All @@ -306,15 +305,14 @@ func BenchmarkExtensionEncoding(b *testing.B) {
// BenchmarkExtension benchmarks extending datasquares sizes 4-128 using all
// supported codecs (both encoding and root computation)
func BenchmarkExtensionWithRoots(b *testing.B) {
chunkSize := 256
for i := 4; i < 513; i *= 2 {
for codecName, codec := range codecs {
if codec.MaxChunks() < i*i {
// Only test codecs that support this many chunks
continue
}

square := genRandDS(i, chunkSize)
square := genRandDS(i, shareSize)
b.Run(
fmt.Sprintf("%s %dx%dx%d ODS", codecName, i, i, len(square[0])),
func(b *testing.B) {
Expand Down
Loading