Skip to content

Commit

Permalink
Resolve circular dependency
Browse files Browse the repository at this point in the history
Moved common/encoding/encoding.go from encoding package to ledger
package.

Split common/utils/testutils.go into two files.  Common utility
functions are in common/utils/utils.go.  Common test utility functions
such as creating fixtures are in common/testutils/testutils.go.

This is needed for an upcoming commit that needs to use encoded key
in Payload (which requires encoding functions).
  • Loading branch information
fxamacker committed Aug 2, 2022
1 parent 03634c1 commit 74ee21d
Show file tree
Hide file tree
Showing 28 changed files with 582 additions and 588 deletions.
3 changes: 1 addition & 2 deletions integration/tests/execution/chunk_data_pack_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import (
sdk "github.com/onflow/flow-go-sdk"
"github.com/onflow/flow-go/integration/tests/lib"
"github.com/onflow/flow-go/ledger"
"github.com/onflow/flow-go/ledger/common/encoding"
"github.com/onflow/flow-go/ledger/common/proof"
"github.com/onflow/flow-go/ledger/partial"
"github.com/onflow/flow-go/model/flow"
Expand Down Expand Up @@ -80,7 +79,7 @@ func (gs *ChunkDataPacksSuite) TestVerificationNodesRequestChunkDataPacks() {
require.Equal(gs.T(), erExe1BlockB.ExecutionResult.Chunks[0].StartState, pack2.ChunkDataPack.StartState)

// verify state proofs
batchProof, err := encoding.DecodeTrieBatchProof(pack2.ChunkDataPack.Proof)
batchProof, err := ledger.DecodeTrieBatchProof(pack2.ChunkDataPack.Proof)
require.NoError(gs.T(), err)

isValid := proof.VerifyTrieBatchProof(batchProof, ledger.State(erExe1BlockB.ExecutionResult.Chunks[0].StartState))
Expand Down
10 changes: 5 additions & 5 deletions ledger/common/pathfinder/pathfinder_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,14 @@ import (
"github.com/onflow/flow-go/crypto/hash"
"github.com/onflow/flow-go/ledger"
"github.com/onflow/flow-go/ledger/common/pathfinder"
"github.com/onflow/flow-go/ledger/common/utils"
"github.com/onflow/flow-go/ledger/common/testutils"
)

// Test_KeyToPathV0 tests key to path for V0
func Test_KeyToPathV0(t *testing.T) {

kp1 := utils.KeyPartFixture(1, "key part 1")
kp2 := utils.KeyPartFixture(22, "key part 2")
kp1 := testutils.KeyPartFixture(1, "key part 1")
kp2 := testutils.KeyPartFixture(22, "key part 2")
k := ledger.NewKey([]ledger.KeyPart{kp1, kp2})

path, err := pathfinder.KeyToPath(k, 0)
Expand All @@ -35,8 +35,8 @@ func Test_KeyToPathV0(t *testing.T) {

func Test_KeyToPathV1(t *testing.T) {

kp1 := utils.KeyPartFixture(1, "key part 1")
kp2 := utils.KeyPartFixture(22, "key part 2")
kp1 := testutils.KeyPartFixture(1, "key part 1")
kp2 := testutils.KeyPartFixture(22, "key part 2")
k := ledger.NewKey([]ledger.KeyPart{kp1, kp2})

path, err := pathfinder.KeyToPath(k, 1)
Expand Down
6 changes: 3 additions & 3 deletions ledger/common/proof/proof_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,17 @@ import (
"github.com/stretchr/testify/require"

"github.com/onflow/flow-go/ledger/common/proof"
"github.com/onflow/flow-go/ledger/common/utils"
"github.com/onflow/flow-go/ledger/common/testutils"
)

// Test_ProofVerify tests proof verification
func Test_TrieProofVerify(t *testing.T) {
p, sc := utils.TrieProofFixture()
p, sc := testutils.TrieProofFixture()
require.True(t, proof.VerifyTrieProof(p, sc))
}

// Test_BatchProofVerify tests batch proof verification
func Test_TrieBatchProofVerify(t *testing.T) {
bp, sc := utils.TrieBatchProofFixture()
bp, sc := testutils.TrieBatchProofFixture()
require.True(t, proof.VerifyTrieBatchProof(bp, sc))
}
Original file line number Diff line number Diff line change
@@ -1,188 +1,16 @@
package utils
package testutils

import (
"encoding/binary"
"encoding/hex"
"fmt"
"io"
"math"
"math/rand"

l "github.com/onflow/flow-go/ledger"
"github.com/onflow/flow-go/ledger/common/hash"
"github.com/onflow/flow-go/ledger/common/utils"
)

// MaxUint16 returns the max value of two uint16
func MaxUint16(a, b uint16) uint16 {
if a > b {
return a
}
return b
}

// Uint16ToBinary converst a uint16 to a byte slice (big endian)
func Uint16ToBinary(integer uint16) []byte {
b := make([]byte, 2)
binary.BigEndian.PutUint16(b, integer)
return b
}

// Uint64ToBinary converst a uint64 to a byte slice (big endian)
func Uint64ToBinary(integer uint64) []byte {
b := make([]byte, 8)
binary.BigEndian.PutUint64(b, integer)
return b
}

// AppendUint8 appends the value byte to the input slice
func AppendUint8(input []byte, value uint8) []byte {
return append(input, value)
}

// AppendUint16 appends the value bytes to the input slice (big endian)
func AppendUint16(input []byte, value uint16) []byte {
buffer := make([]byte, 2)
binary.BigEndian.PutUint16(buffer, value)
return append(input, buffer...)
}

// AppendUint32 appends the value bytes to the input slice (big endian)
func AppendUint32(input []byte, value uint32) []byte {
buffer := make([]byte, 4)
binary.BigEndian.PutUint32(buffer, value)
return append(input, buffer...)
}

// AppendUint64 appends the value bytes to the input slice (big endian)
func AppendUint64(input []byte, value uint64) []byte {
buffer := make([]byte, 8)
binary.BigEndian.PutUint64(buffer, value)
return append(input, buffer...)
}

// AppendShortData appends data shorter than 16kB
func AppendShortData(input []byte, data []byte) []byte {
if len(data) > math.MaxUint16 {
panic(fmt.Sprintf("short data too long! %d", len(data)))
}
input = AppendUint16(input, uint16(len(data)))
input = append(input, data...)
return input
}

// AppendLongData appends data shorter than 32MB
func AppendLongData(input []byte, data []byte) []byte {
if len(data) > math.MaxUint32 {
panic(fmt.Sprintf("long data too long! %d", len(data)))
}
input = AppendUint32(input, uint32(len(data)))
input = append(input, data...)
return input
}

// ReadSlice reads `size` bytes from the input
func ReadSlice(input []byte, size int) (value []byte, rest []byte, err error) {
if len(input) < size {
return nil, input, fmt.Errorf("input size is too small to be splited %d < %d ", len(input), size)
}
return input[:size], input[size:], nil
}

// ReadUint8 reads a uint8 from the input and returns the rest
func ReadUint8(input []byte) (value uint8, rest []byte, err error) {
if len(input) < 1 {
return 0, input, fmt.Errorf("input size (%d) is too small to read a uint8", len(input))
}
return input[0], input[1:], nil
}

// ReadUint16 reads a uint16 from the input and returns the rest
func ReadUint16(input []byte) (value uint16, rest []byte, err error) {
if len(input) < 2 {
return 0, input, fmt.Errorf("input size (%d) is too small to read a uint16", len(input))
}
return binary.BigEndian.Uint16(input[:2]), input[2:], nil
}

// ReadUint32 reads a uint32 from the input and returns the rest
func ReadUint32(input []byte) (value uint32, rest []byte, err error) {
if len(input) < 4 {
return 0, input, fmt.Errorf("input size (%d) is too small to read a uint32", len(input))
}
return binary.BigEndian.Uint32(input[:4]), input[4:], nil
}

// ReadUint64 reads a uint64 from the input and returns the rest
func ReadUint64(input []byte) (value uint64, rest []byte, err error) {
if len(input) < 8 {
return 0, input, fmt.Errorf("input size (%d) is too small to read a uint64", len(input))
}
return binary.BigEndian.Uint64(input[:8]), input[8:], nil
}

// ReadShortData read data shorter than 16kB and return the rest of bytes
func ReadShortData(input []byte) (data []byte, rest []byte, err error) {
var size uint16
size, rest, err = ReadUint16(input)
if err != nil {
return nil, rest, err
}
data = rest[:size]
rest = rest[size:]
return
}

// ReadShortDataFromReader reads data shorter than 16kB from reader
func ReadShortDataFromReader(reader io.Reader) ([]byte, error) {
buf, err := ReadFromBuffer(reader, 2)
if err != nil {
return nil, fmt.Errorf("cannot read short data length: %w", err)
}

size, _, err := ReadUint16(buf)
if err != nil {
return nil, fmt.Errorf("cannot read short data length: %w", err)
}

buf, err = ReadFromBuffer(reader, int(size))
if err != nil {
return nil, fmt.Errorf("cannot read short data: %w", err)
}

return buf, nil
}

// ReadLongDataFromReader reads data shorter than 16kB from reader
func ReadLongDataFromReader(reader io.Reader) ([]byte, error) {
buf, err := ReadFromBuffer(reader, 4)
if err != nil {
return nil, fmt.Errorf("cannot read long data length: %w", err)
}
size, _, err := ReadUint32(buf)
if err != nil {
return nil, fmt.Errorf("cannot read long data length: %w", err)
}
buf, err = ReadFromBuffer(reader, int(size))
if err != nil {
return nil, fmt.Errorf("cannot read long data: %w", err)
}

return buf, nil
}

// ReadFromBuffer reads 'length' bytes from the input
func ReadFromBuffer(reader io.Reader, length int) ([]byte, error) {
if length == 0 {
return nil, nil
}
buf := make([]byte, length)
_, err := io.ReadFull(reader, buf)
if err != nil {
return nil, fmt.Errorf("cannot read data: %w", err)
}
return buf, nil
}

// TrieUpdateFixture returns a trie update fixture
func TrieUpdateFixture(n int, minPayloadByteSize int, maxPayloadByteSize int) *l.TrieUpdate {
return &l.TrieUpdate{
Expand Down Expand Up @@ -211,8 +39,8 @@ func QueryFixture() *l.Query {

// LightPayload returns a payload with 2 byte key and 2 byte value
func LightPayload(key uint16, value uint16) *l.Payload {
k := l.Key{KeyParts: []l.KeyPart{{Type: 0, Value: Uint16ToBinary(key)}}}
v := l.Value(Uint16ToBinary(value))
k := l.Key{KeyParts: []l.KeyPart{{Type: 0, Value: utils.Uint16ToBinary(key)}}}
v := l.Value(utils.Uint16ToBinary(value))
return &l.Payload{Key: k, Value: v}
}

Expand Down
Loading

0 comments on commit 74ee21d

Please sign in to comment.