diff --git a/.github/workflows/downstream.yml b/.github/workflows/downstream.yml index 23c02a463a..eeeed81ab4 100644 --- a/.github/workflows/downstream.yml +++ b/.github/workflows/downstream.yml @@ -40,7 +40,7 @@ jobs: run: sh crypto_setup.sh - name: Update Cadence - run: go get github.com/onflow/cadence@${{ github.event.pull_request.head.sha }} + run: go mod edit -replace github.com/onflow/cadence=github.com/${{ github.event.pull_request.head.repo.full_name }}@${{ github.event.pull_request.head.sha }} - name: Tidy up run: go mod tidy @@ -65,7 +65,7 @@ jobs: cache: true - name: Update Cadence - run: go get github.com/onflow/cadence@${{ github.event.pull_request.head.sha }} + run: go mod edit -replace github.com/onflow/cadence=github.com/${{ github.event.pull_request.head.repo.full_name }}@${{ github.event.pull_request.head.sha }} - name: Tidy up run: go mod tidy diff --git a/.golangci.yml b/.golangci.yml index 1ddc6a338a..d36d67bdb9 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -17,6 +17,9 @@ linters: - goimports - unconvert - nilerr + - forbidigo + - bidichk + - asciicheck issues: exclude-rules: @@ -38,6 +41,10 @@ linters-settings: - exitAfterDefer goimports: local-prefixes: github.com/onflow/cadence + forbidigo: + forbid: + - '^maps.Keys.*(# has indeterminate order\.)?$' + - '^maps.Values.*(# has indeterminate order\.)?$' custom: maprange: path: tools/maprange/maprange.so diff --git a/Makefile b/Makefile index 5e588c85a1..327d437533 100644 --- a/Makefile +++ b/Makefile @@ -96,6 +96,13 @@ tools/constructorcheck/constructorcheck.so: tools/golangci-lint/golangci-lint: (cd tools/golangci-lint && $(MAKE)) +.PHONY: clean-linter +clean-linter: + rm -f tools/golangci-lint/golangci-lint \ + tools/maprange/maprange.so \ + tools/unkeyed/unkeyed.so \ + tools/constructorcheck/constructorcheck.so + .PHONY: check-headers check-headers: @./check-headers.sh diff --git a/docs/flow-docs.json b/docs/flow-docs.json deleted file mode 100644 index 15ddc530cf..0000000000 --- a/docs/flow-docs.json +++ /dev/null @@ -1,264 +0,0 @@ -{ - "$schema": "https://developers.flow.com/schemas/flow-docs.json", - "displayName": "Cadence", - "headers": { - "": { - "icon": "cadence", - "title": "Cadence", - "description": "Cadence is a resource-oriented programming language that introduces new features to smart contract programming that help developers ensure that their code is safe, secure, clear, and approachable. Some of these features are:", - "headerCards": [ - { - "title": "Why Use Cadence?", - "tags": ["reference", "patterns"], - "description":"Learn the main benefits of using Cadence for smart contract development.", - "href": "/cadence/why" - }, - { - "title": "Hello World Tutorial", - "tags": ["tutorial", "playground"], - "description": "Write and deploy your first smart contract within minutes on our Playground.", - "href": "/cadence/tutorial/02-hello-world" - }, - { - "title": "Language Reference", - "tags": ["reference", "syntax"], - "description": "Learn the functionality, terminology and syntax of the Cadence language.", - "href": "/cadence/language" - }, - { - "title": "Guide for Solidity developers", - "tags": ["guide", "patterns"], - "description": "Understand the key differences between Solidity and Cadence with guidance to help transition.", - "href": "/cadence/solidity-to-cadence" - }, - { - "title": "Flow Playground", - "tags": ["beginner", "advanced", "smart-contracts"], - "description": "Learn Cadence Smart Contract programming using our web-based Cadence IDE", - "href": "https://play.flow.com" - } - ] - } - }, - "redirects": { - "tutorial": "tutorial/01-first-steps" - }, - "sidebars": { - "": [ - { - "title": "Cadence", - "items": [ - { - "title": "Introduction to Cadence", - "href": "" - }, - { - "title": "Tutorial", - "href": "tutorial/02-hello-world" - }, - { - "title": "Language Reference", - "href": "language" - } - ] - }, - { - "title": "Developer Guides", - "items": [ - { - "title": "Cadence Design Patterns", - "href": "design-patterns" - }, - { - "title": "Contract Upgrades with Incompatible Changes", - "href": "contract-upgrades" - }, - { - "title": "Cadence Anti-Patterns", - "href": "anti-patterns" - }, - { - "title": "Guide for Solidity developers", - "href": "solidity-to-cadence" - }, - { - "title": "Measuring Time in Cadence", - "href": "measuring-time" - }, - { - "title": "JSON-Cadence Data Interchange Format", - "href": "json-cadence-spec" - }, - { - "title": "Security Best Practices", - "href": "security-best-practices" - }, - { - "title": "Cadence Testing Framework", - "href": "testing-framework" - } - ] - }, - { - "title": "Project and Style Guides", - "items": [ - { - "title": "Project Management Guide", - "href": "style-guide" - } - ] - } - - ], - "language": [ - { - "title": "Cadence Language", - "items": [ - { - "href": "index", - "title": "Cadence" - }, - { - "href": "syntax", - "title": "Syntax" - }, - { - "href": "constants-and-variables" - }, - { - "href": "type-annotations" - }, - { - "href": "values-and-types" - }, - { - "href": "operators" - }, - { - "href": "functions" - }, - { - "href": "control-flow" - }, - { - "href": "scope" - }, - { - "href": "type-safety" - }, - { - "href": "type-inference" - }, - { - "href": "composite-types" - }, - { - "href": "resources" - }, - { - "href": "access-control" - }, - { - "href": "interfaces" - }, - { - "href": "enumerations" - }, - { - "href": "restricted-types" - }, - { - "href": "references" - }, - { - "href": "imports" - }, - { - "href": "accounts" - }, - { - "href": "capability-based-access-control" - }, - { - "href": "attachments" - }, - { - "href": "contracts" - }, - { - "href": "contract-updatability" - }, - { - "href": "events" - }, - { - "href": "core-events" - }, - { - "href": "transactions" - }, - { - "href": "run-time-types", - "title": "Run-time Types" - }, - { - "href": "built-in-functions", - "title": "Built-in Functions" - }, - { - "href": "environment-information" - }, - { - "href": "crypto" - }, - { - "href": "type-hierarchy" - }, - { - "href": "glossary" - } - ] - } - ], - "tutorial": [ - { - "title": "Cadence Language", - "items": [ - { - "href": "01-first-steps" - }, - { - "href": "02-hello-world" - }, - { - "href": "03-resources" - }, - { - "href": "04-capabilities" - }, - { - "href": "05-non-fungible-tokens-1" - }, - { - "href": "05-non-fungible-tokens-2" - }, - { - "href": "06-fungible-tokens" - }, - { - "href": "07-marketplace-setup" - }, - { - "href": "08-marketplace-compose" - }, - { - "href": "09-voting" - }, - { - "href": "10-resources-compose" - } - ] - } - ] - } -} diff --git a/encoding/ccf/ccf_test.go b/encoding/ccf/ccf_test.go index da67d71ba0..a3b4b3cbf6 100644 --- a/encoding/ccf/ccf_test.go +++ b/encoding/ccf/ccf_test.go @@ -26,6 +26,7 @@ import ( "math/big" "testing" + "github.com/fxamacker/cbor/v2" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -39,6 +40,16 @@ import ( "github.com/onflow/cadence/runtime/tests/utils" ) +var deterministicEncMode, _ = ccf.EncOptions{ + SortCompositeFields: ccf.SortBytewiseLexical, + SortRestrictedTypes: ccf.SortBytewiseLexical, +}.EncMode() + +var deterministicDecMode, _ = ccf.DecOptions{ + EnforceSortCompositeFields: ccf.EnforceSortBytewiseLexical, + EnforceSortRestrictedTypes: ccf.EnforceSortBytewiseLexical, +}.DecMode() + type encodeTest struct { name string val cadence.Value @@ -2533,32 +2544,136 @@ func TestEncodeWord128(t *testing.T) { func TestDecodeWord128Invalid(t *testing.T) { t.Parallel() - _, err := ccf.Decode(nil, []byte{ - // language=json, format=json-cdc - // {"type":"Word128","value":"0"} - // - // language=edn, format=ccf - // 130([137(52), 0]) - // - // language=cbor, format=ccf - // tag - 0xd8, ccf.CBORTagTypeAndValue, - // array, 2 items follow - 0x82, - // tag - 0xd8, ccf.CBORTagSimpleType, - // Word128 type ID (52) - 0x18, 0x34, - // Invalid type - 0xd7, - // bytes, 16 bytes follow - 0x50, - // 340282366920938463463374607431768211455 - 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, - 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, - }) - require.Error(t, err) - assert.Equal(t, "ccf: failed to decode: failed to decode Word128: cbor: cannot decode CBOR tag type to big.Int", err.Error()) + decModes := []ccf.DecMode{ccf.EventsDecMode, deterministicDecMode} + + for _, dm := range decModes { + _, err := dm.Decode(nil, []byte{ + // language=json, format=json-cdc + // {"type":"Word128","value":"0"} + // + // language=edn, format=ccf + // 130([137(52), 0]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Word128 type ID (52) + 0x18, 0x34, + // Invalid type + 0xd7, + // bytes, 16 bytes follow + 0x50, + // 340282366920938463463374607431768211455 + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + }) + require.Error(t, err) + assert.Equal(t, "ccf: failed to decode: failed to decode Word128: cbor: cannot decode CBOR tag type to big.Int", err.Error()) + } +} + +func TestEncodeWord256(t *testing.T) { + + t.Parallel() + + testAllEncodeAndDecode(t, []encodeTest{ + { + name: "Zero", + val: cadence.NewWord256(0), + expected: []byte{ + // language=json, format=json-cdc + // {"type":"Word256","value":"0"} + // + // language=edn, format=ccf + // 130([137(53), 0]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Word256 type ID (53) + 0x18, 0x35, + // tag (big num) + 0xc2, + // bytes, 0 bytes follow + 0x40, + }, + }, + { + name: "Max", + val: cadence.Word256{Value: sema.Word256TypeMaxIntBig}, + expected: []byte{ + // language=json, format=json-cdc + // {"type":"Word256","value":"115792089237316195423570985008687907853269984665640564039457584007913129639935"} + // + // language=edn, format=ccf + // 130([137(53), 115792089237316195423570985008687907853269984665640564039457584007913129639935]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Word256 type ID (53) + 0x18, 0x35, + // tag (big num) + 0xc2, + // bytes, 32 bytes follow + 0x58, 0x20, + // 115792089237316195423570985008687907853269984665640564039457584007913129639935 + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + }, + }, + }...) +} + +func TestDecodeWord256Invalid(t *testing.T) { + t.Parallel() + + decModes := []ccf.DecMode{ccf.EventsDecMode, deterministicDecMode} + + for _, dm := range decModes { + _, err := dm.Decode(nil, []byte{ + // language=json, format=json-cdc + // {"type":"Word256","value":"115792089237316195423570985008687907853269984665640564039457584007913129639935"} + // + // language=edn, format=ccf + // 130([137(53), 0]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Word256 type ID (53) + 0x18, 0x35, + // Invalid type + 0xd7, + // bytes, 32 bytes follow + 0x58, 0x20, + // 115792089237316195423570985008687907853269984665640564039457584007913129639935 + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + }) + require.Error(t, err) + assert.Equal(t, "ccf: failed to decode: failed to decode Word256: cbor: cannot decode CBOR tag type to big.Int", err.Error()) + } } func TestEncodeFix64(t *testing.T) { @@ -5997,7 +6112,23 @@ func TestEncodeEvent(t *testing.T) { }, } - testAllEncodeAndDecode(t, simpleEvent, resourceEvent, abstractEvent) + testCases := []encodeTest{simpleEvent, resourceEvent, abstractEvent} + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + actualCBOR, err := ccf.EventsEncMode.Encode(tc.val) + require.NoError(t, err) + utils.AssertEqualWithDiff(t, tc.expected, actualCBOR) + + decodedVal, err := ccf.EventsDecMode.Decode(nil, actualCBOR) + require.NoError(t, err) + assert.Equal( + t, + cadence.ValueWithCachedTypeID(tc.val), + cadence.ValueWithCachedTypeID(decodedVal), + ) + }) + } } func TestEncodeContract(t *testing.T) { @@ -7650,6 +7781,8 @@ func TestEncodeSimpleTypes(t *testing.T) { for _, ty := range []simpleTypes{ {cadence.AnyType{}, ccf.TypeAny}, {cadence.AnyResourceType{}, ccf.TypeAnyResource}, + {cadence.AnyStructAttachmentType{}, ccf.TypeAnyStructAttachmentType}, + {cadence.AnyResourceAttachmentType{}, ccf.TypeAnyResourceAttachmentType}, {cadence.MetaType{}, ccf.TypeMetaType}, {cadence.VoidType{}, ccf.TypeVoid}, {cadence.NeverType{}, ccf.TypeNever}, @@ -7682,6 +7815,7 @@ func TestEncodeSimpleTypes(t *testing.T) { {cadence.Word32Type{}, ccf.TypeWord32}, {cadence.Word64Type{}, ccf.TypeWord64}, {cadence.Word128Type{}, ccf.TypeWord128}, + {cadence.Word256Type{}, ccf.TypeWord256}, {cadence.Fix64Type{}, ccf.TypeFix64}, {cadence.UFix64Type{}, ccf.TypeUFix64}, {cadence.BlockType{}, ccf.TypeBlock}, @@ -7701,7 +7835,17 @@ func TestEncodeSimpleTypes(t *testing.T) { } { var w bytes.Buffer - encoder := ccf.CBOREncMode.NewStreamEncoder(&w) + cborEncMode := func() cbor.EncMode { + options := cbor.CoreDetEncOptions() + options.BigIntConvert = cbor.BigIntConvertNone + encMode, err := options.EncMode() + if err != nil { + panic(err) + } + return encMode + }() + + encoder := cborEncMode.NewStreamEncoder(&w) err := encoder.EncodeRawBytes([]byte{ // tag @@ -7992,10 +8136,131 @@ func TestEncodeType(t *testing.T) { ) }) + t.Run("with static struct no sort", func(t *testing.T) { + t.Parallel() + + val := cadence.TypeValue{ + StaticType: &cadence.StructType{ + Location: utils.TestLocation, + QualifiedIdentifier: "S", + Fields: []cadence.Field{ + {Identifier: "foo", Type: cadence.IntType{}}, + {Identifier: "bar", Type: cadence.IntType{}}, + }, + Initializers: [][]cadence.Parameter{ + { + {Label: "foo", Identifier: "bar", Type: cadence.IntType{}}, + {Label: "qux", Identifier: "baz", Type: cadence.StringType{}}, + }, + }, + }, + } + + expectedCBOR := []byte{ + // language=json, format=json-cdc + // {"value":{"staticType":{"type":"","kind":"Struct","typeID":"S.test.S","fields":[{"type":{"kind":"Int"},"id":"foo"},{"type":{"kind":"Int"},"id":"bar"}],"initializers":[[{"type":{"kind":"Int"},"label":"foo","id":"bar"},{"type":{"kind":"String"},"label":"qux","id":"baz"}]]}},"type":"Type"} + // + // language=edn, format=ccf + // 130([137(41), 208([h'', "S.test.S", null, [["foo", 185(4)], ["bar", 185(4)]], [[["foo", "bar", 185(4)], ["qux", "baz", 185(1)]]]])]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 elements follow + 0x82, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Meta type ID (41) + 0x18, 0x29, + // tag + 0xd8, ccf.CBORTagStructTypeValue, + // array, 5 elements follow + 0x85, + // bytes, 0 bytes follow + 0x40, + // string, 8 bytes follow + 0x68, + // S.test.So + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x53, + // type (nil for struct) + 0xf6, + // fields + // array, 2 element follows + 0x82, + // array, 2 elements follow + 0x82, + // string, 3 bytes follow + 0x63, + // foo + 0x66, 0x6f, 0x6f, + // tag + 0xd8, ccf.CBORTagSimpleTypeValue, + // Int type (4) + 0x04, + // array, 2 elements follow + 0x82, + // string, 3 bytes follow + 0x63, + // bar + 0x62, 0x61, 0x72, + // tag + 0xd8, ccf.CBORTagSimpleTypeValue, + // Int type (4) + 0x04, + // initializers + // array, 1 elements follow + 0x81, + // array, 2 element follows + 0x82, + // array, 3 elements follow + 0x83, + // string, 3 bytes follow + 0x63, + // foo + 0x66, 0x6f, 0x6f, + // string, 3 bytes follow + 0x63, + // bar + 0x62, 0x61, 0x72, + // tag + 0xd8, ccf.CBORTagSimpleTypeValue, + // Int type (4) + 0x04, + // array, 3 elements follow + 0x83, + // string, 3 bytes follow + 0x63, + // qux + 0x71, 0x75, 0x78, + // string, 3 bytes follow + 0x63, + // bax + 0x62, 0x61, 0x7a, + // tag + 0xd8, ccf.CBORTagSimpleTypeValue, + // String type (1) + 0x01, + } + + // Encode value without sorting of composite fields. + actualCBOR, err := ccf.Encode(val) + require.NoError(t, err) + utils.AssertEqualWithDiff(t, expectedCBOR, actualCBOR) + + // Decode value without enforcing sorting of composite fields. + decodedVal, err := ccf.Decode(nil, actualCBOR) + require.NoError(t, err) + assert.Equal( + t, + cadence.ValueWithCachedTypeID(val), + cadence.ValueWithCachedTypeID(decodedVal), + ) + }) + t.Run("with static struct", func(t *testing.T) { t.Parallel() - testEncodeAndDecode( + testEncodeAndDecodeEx( t, cadence.TypeValue{ StaticType: &cadence.StructType{ @@ -8003,6 +8268,7 @@ func TestEncodeType(t *testing.T) { QualifiedIdentifier: "S", Fields: []cadence.Field{ {Identifier: "foo", Type: cadence.IntType{}}, + {Identifier: "bar", Type: cadence.IntType{}}, }, Initializers: [][]cadence.Parameter{ { @@ -8014,10 +8280,10 @@ func TestEncodeType(t *testing.T) { }, []byte{ // language=json, format=json-cdc - // {"value":{"staticType":{"type":"","kind":"Struct","typeID":"S.test.S","fields":[{"type":{"kind":"Int"},"id":"foo"}],"initializers":[[{"type":{"kind":"Int"},"label":"foo","id":"bar"},{"type":{"kind":"String"},"label":"qux","id":"baz"}]]}},"type":"Type"} + // {"value":{"staticType":{"type":"","kind":"Struct","typeID":"S.test.S","fields":[{"type":{"kind":"Int"},"id":"foo"},{"type":{"kind":"Int"},"id":"bar"}],"initializers":[[{"type":{"kind":"Int"},"label":"foo","id":"bar"},{"type":{"kind":"String"},"label":"qux","id":"baz"}]]}},"type":"Type"} // // language=edn, format=ccf - // 130([137(41), 208([h'', "S.test.S", null, [["foo", 185(4)]], [[["foo", "bar", 185(4)], ["qux", "baz", 185(1)]]]])]) + // 130([137(41), 208([h'', "S.test.S", null, [["bar", 185(4)], ["foo", 185(4)]], [[["foo", "bar", 185(4)], ["qux", "baz", 185(1)]]]])]) // // language=cbor, format=ccf // tag @@ -8041,8 +8307,18 @@ func TestEncodeType(t *testing.T) { // type (nil for struct) 0xf6, // fields - // array, 1 element follows - 0x81, + // array, 2 element follows + 0x82, + // array, 2 elements follow + 0x82, + // string, 3 bytes follow + 0x63, + // bar + 0x62, 0x61, 0x72, + // tag + 0xd8, ccf.CBORTagSimpleTypeValue, + // Int type (4) + 0x04, // array, 2 elements follow 0x82, // string, 3 bytes follow @@ -8087,6 +8363,22 @@ func TestEncodeType(t *testing.T) { // String type (1) 0x01, }, + cadence.TypeValue{ + StaticType: &cadence.StructType{ + Location: utils.TestLocation, + QualifiedIdentifier: "S", + Fields: []cadence.Field{ + {Identifier: "bar", Type: cadence.IntType{}}, + {Identifier: "foo", Type: cadence.IntType{}}, + }, + Initializers: [][]cadence.Parameter{ + { + {Label: "foo", Identifier: "bar", Type: cadence.IntType{}}, + {Label: "qux", Identifier: "baz", Type: cadence.StringType{}}, + }, + }, + }, + }, ) }) @@ -8945,7 +9237,6 @@ func TestEncodeType(t *testing.T) { 0xd8, ccf.CBORTagReferenceTypeValue, // array, 2 elements follow 0x82, - // authorized // nil 0xf6, // tag @@ -10586,12 +10877,16 @@ func TestDecodeFix64(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - actual, err := ccf.Decode(nil, tc.encodedData) - if tc.check != nil { - tc.check(t, actual, err) - } else { - require.NoError(t, err) - assert.Equal(t, tc.expected, actual) + decModes := []ccf.DecMode{ccf.EventsDecMode, deterministicDecMode} + + for _, dm := range decModes { + actual, err := dm.Decode(nil, tc.encodedData) + if tc.check != nil { + tc.check(t, actual, err) + } else { + require.NoError(t, err) + assert.Equal(t, tc.expected, actual) + } } }) } @@ -11473,9 +11768,14 @@ func TestDecodeInvalidType(t *testing.T) { // array, 0 items follow 0x80, } - _, err := ccf.Decode(nil, encodedData) - require.Error(t, err) - assert.Equal(t, "ccf: failed to decode: invalid type ID for built-in: ``", err.Error()) + + decModes := []ccf.DecMode{ccf.EventsDecMode, deterministicDecMode} + + for _, dm := range decModes { + _, err := dm.Decode(nil, encodedData) + require.Error(t, err) + assert.Equal(t, "ccf: failed to decode: invalid type ID for built-in: ``", err.Error()) + } }) t.Run("invalid type ID", func(t *testing.T) { @@ -11526,9 +11826,14 @@ func TestDecodeInvalidType(t *testing.T) { // array, 0 items follow 0x80, } - _, err := ccf.Decode(nil, encodedData) - require.Error(t, err) - assert.Equal(t, "ccf: failed to decode: invalid type ID `I`: invalid identifier location type ID: missing location", err.Error()) + + decModes := []ccf.DecMode{ccf.EventsDecMode, deterministicDecMode} + + for _, dm := range decModes { + _, err := dm.Decode(nil, encodedData) + require.Error(t, err) + assert.Equal(t, "ccf: failed to decode: invalid type ID `I`: invalid identifier location type ID: missing location", err.Error()) + } }) t.Run("unknown location prefix", func(t *testing.T) { @@ -11579,9 +11884,14 @@ func TestDecodeInvalidType(t *testing.T) { // array, 0 items follow 0x80, } - _, err := ccf.Decode(nil, encodedData) - require.Error(t, err) - assert.Equal(t, "ccf: failed to decode: invalid type ID for built-in: `N.PublicKey`", err.Error()) + + decModes := []ccf.DecMode{ccf.EventsDecMode, deterministicDecMode} + + for _, dm := range decModes { + _, err := dm.Decode(nil, encodedData) + require.Error(t, err) + assert.Equal(t, "ccf: failed to decode: invalid type ID for built-in: `N.PublicKey`", err.Error()) + } }) } @@ -11597,7 +11907,7 @@ func testEncodeAndDecodeEx(t *testing.T, val cadence.Value, expectedCBOR []byte, } func testEncode(t *testing.T, val cadence.Value, expectedCBOR []byte) (actualCBOR []byte) { - actualCBOR, err := ccf.Encode(val) + actualCBOR, err := deterministicEncMode.Encode(val) require.NoError(t, err) utils.AssertEqualWithDiff(t, expectedCBOR, actualCBOR) @@ -11605,7 +11915,7 @@ func testEncode(t *testing.T, val cadence.Value, expectedCBOR []byte) (actualCBO } func testDecode(t *testing.T, actualCBOR []byte, expectedVal cadence.Value) { - decodedVal, err := ccf.Decode(nil, actualCBOR) + decodedVal, err := deterministicDecMode.Decode(nil, actualCBOR) require.NoError(t, err) assert.Equal( t, @@ -12103,7 +12413,7 @@ func TestDeployedEvents(t *testing.T) { // {"value":{"id":"A.f919ee77447b7497.FlowFees.FeesDeducted","fields":[{"value":{"value":"0.01797293","type":"UFix64"},"name":"amount"},{"value":{"value":"1.00000000","type":"UFix64"},"name":"inclusionEffort"},{"value":{"value":"0.00360123","type":"UFix64"},"name":"executionEffort"}]},"type":"Event"} // // language=edn, format=ccf - // 129([[162([h'', "A.f919ee77447b7497.FlowFees.FeesDeducted", [["amount", 137(23)], ["executionEffort", 137(23)], ["inclusionEffort", 137(23)]]])], [136(h''), [1797293, 360123, 100000000]]]) + // 129([[162([h'', "A.f919ee77447b7497.FlowFees.FeesDeducted", [["amount", 137(23)], ["inclusionEffort", 137(23)], ["executionEffort", 137(23)]]])], [136(h''), [1797293, 100000000, 360123]]]) // // language=cbor, format=ccf // tag @@ -12148,8 +12458,8 @@ func TestDeployedEvents(t *testing.T) { 0x82, // text, 15 bytes follow 0x6f, - // executionEffort - 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x66, 0x66, 0x6f, 0x72, 0x74, + // inclusionEffort + 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x45, 0x66, 0x66, 0x6f, 0x72, 0x74, // tag 0xd8, ccf.CBORTagSimpleType, // UFix64 type ID (23) @@ -12159,8 +12469,8 @@ func TestDeployedEvents(t *testing.T) { 0x82, // text, 15 bytes follow 0x6f, - // inclusionEffort - 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x45, 0x66, 0x66, 0x6f, 0x72, 0x74, + // executionEffort + 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x66, 0x66, 0x6f, 0x72, 0x74, // tag 0xd8, ccf.CBORTagSimpleType, // UFix64 type ID (23) @@ -12177,10 +12487,10 @@ func TestDeployedEvents(t *testing.T) { 0x83, // 1797293 0x1a, 0x00, 0x1b, 0x6c, 0xad, - // 360123 - 0x1a, 0x00, 0x05, 0x7e, 0xbb, // 100000000 0x1a, 0x05, 0xf5, 0xe1, 0x00, + // 360123 + 0x1a, 0x00, 0x05, 0x7e, 0xbb, }, }, { @@ -12253,7 +12563,7 @@ func TestDeployedEvents(t *testing.T) { // {"value":{"id":"A.8624b52f9ddcd04a.FlowIDTableStaking.DelegatorRewardsPaid","fields":[{"value":{"value":"e52cbcd825e328acac8db6bcbdcbb6e7724862c8b89b09d85edccf41ff9981eb","type":"String"},"name":"nodeID"},{"value":{"value":"92","type":"UInt32"},"name":"delegatorID"},{"value":{"value":"4.38760261","type":"UFix64"},"name":"amount"}]},"type":"Event"} // // language=edn, format=ccf - // 129([[162([h'', "A.8624b52f9ddcd04a.FlowIDTableStaking.DelegatorRewardsPaid", [["amount", 137(23)], ["nodeID", 137(1)], ["delegatorID", 137(14)]]])], [136(h''), [438760261, "e52cbcd825e328acac8db6bcbdcbb6e7724862c8b89b09d85edccf41ff9981eb", 92]]]) + // 129([[162([h'', "A.8624b52f9ddcd04a.FlowIDTableStaking.DelegatorRewardsPaid", [["nodeID", 137(1)], ["delegatorID", 137(14)], ["amount", 137(23)]]])], [136(h''), ["e52cbcd825e328acac8db6bcbdcbb6e7724862c8b89b09d85edccf41ff9981eb", 92, 438760261]]]) // // language=cbor, format=ccf // tag @@ -12267,7 +12577,7 @@ func TestDeployedEvents(t *testing.T) { // event type: // id: []byte{} // cadence-type-id: "A.8624b52f9ddcd04a.FlowIDTableStaking.DelegatorRewardsPaid" - // 3 field: [["amount", type(ufix64)], ["nodeID", type(string)], ["delegatorID", type(uint32)]] + // 3 field: [["nodeID", type(string)], ["delegatorID", type(uint32)], ["amount", type(ufix64)]] // tag 0xd8, ccf.CBORTagEventType, // array, 3 element follows @@ -12288,24 +12598,13 @@ func TestDeployedEvents(t *testing.T) { 0x82, // text, 6 bytes follow 0x66, - // "amount" - 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - // tag - 0xd8, ccf.CBORTagSimpleType, - // UFix64 type ID (23) - 0x17, - // field 1 - // array, 2 element follows - 0x82, - // text, 6 bytes follow - 0x66, // "nodeID" 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x44, // tag 0xd8, ccf.CBORTagSimpleType, // String type ID (1) 0x01, - // field 2 + // field 1 // array, 2 element follows 0x82, // text, 11 bytes follow @@ -12316,8 +12615,19 @@ func TestDeployedEvents(t *testing.T) { 0xd8, ccf.CBORTagSimpleType, // UInt32 type ID (14) 0x0e, - - // element 1: type and value + // field 2 + // array, 2 element follows + 0x82, + // text, 6 bytes follow + 0x66, + // "amount" + 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, + // tag + 0xd8, ccf.CBORTagSimpleType, + // UFix64 type ID (23) + 0x17, + + // element 1: type and value // array, 2 element follows 0x82, // tag @@ -12326,14 +12636,14 @@ func TestDeployedEvents(t *testing.T) { 0x40, // array, 3 items follow 0x83, - // 438760261 - 0x1a, 0x1a, 0x26, 0xf3, 0x45, // text, 64 bytes follow 0x78, 0x40, // "e52cbcd825e328acac8db6bcbdcbb6e7724862c8b89b09d85edccf41ff9981eb" 0x65, 0x35, 0x32, 0x63, 0x62, 0x63, 0x64, 0x38, 0x32, 0x35, 0x65, 0x33, 0x32, 0x38, 0x61, 0x63, 0x61, 0x63, 0x38, 0x64, 0x62, 0x36, 0x62, 0x63, 0x62, 0x64, 0x63, 0x62, 0x62, 0x36, 0x65, 0x37, 0x37, 0x32, 0x34, 0x38, 0x36, 0x32, 0x63, 0x38, 0x62, 0x38, 0x39, 0x62, 0x30, 0x39, 0x64, 0x38, 0x35, 0x65, 0x64, 0x63, 0x63, 0x66, 0x34, 0x31, 0x66, 0x66, 0x39, 0x39, 0x38, 0x31, 0x65, 0x62, // 92 0x18, 0x5c, + // 438760261 + 0x1a, 0x1a, 0x26, 0xf3, 0x45, }, }, { @@ -12344,7 +12654,7 @@ func TestDeployedEvents(t *testing.T) { // {"value":{"id":"A.8624b52f9ddcd04a.FlowIDTableStaking.EpochTotalRewardsPaid","fields":[{"value":{"value":"1316543.00000000","type":"UFix64"},"name":"total"},{"value":{"value":"53.04112895","type":"UFix64"},"name":"fromFees"},{"value":{"value":"1316489.95887105","type":"UFix64"},"name":"minted"},{"value":{"value":"6.04080767","type":"UFix64"},"name":"feesBurned"}]},"type":"Event"} // // language=edn, format=ccf - // 129([[162([h'', "A.8624b52f9ddcd04a.FlowIDTableStaking.EpochTotalRewardsPaid", [["total", 137(23)], ["minted", 137(23)], ["fromFees", 137(23)], ["feesBurned", 137(23)]]])], [136(h''), [131654300000000, 131648995887105, 5304112895, 604080767]]]) + // 129([[162([h'', "A.8624b52f9ddcd04a.FlowIDTableStaking.EpochTotalRewardsPaid", [["total", 137(23)], ["fromFees", 137(23)], ["minted", 137(23)], ["feesBurned", 137(23)]]])], [136(h''), [131654300000000, 5304112895, 131648995887105, 604080767]]]) // // language=cbor, format=ccf // tag @@ -12387,10 +12697,10 @@ func TestDeployedEvents(t *testing.T) { // field 1 // array, 2 element follows 0x82, - // text, 6 bytes follow - 0x66, - // "minted" - 0x6d, 0x69, 0x6e, 0x74, 0x65, 0x64, + // text, 8 bytes follow + 0x68, + // "fromFees" + 0x66, 0x72, 0x6f, 0x6d, 0x46, 0x65, 0x65, 0x73, // tag 0xd8, ccf.CBORTagSimpleType, // UFix64 type ID (23) @@ -12398,10 +12708,10 @@ func TestDeployedEvents(t *testing.T) { // field 2 // array, 2 element follows 0x82, - // text, 8 bytes follow - 0x68, - // "fromFees" - 0x66, 0x72, 0x6f, 0x6d, 0x46, 0x65, 0x65, 0x73, + // text, 6 bytes follow + 0x66, + // "minted" + 0x6d, 0x69, 0x6e, 0x74, 0x65, 0x64, // tag 0xd8, ccf.CBORTagSimpleType, // UFix64 type ID (23) @@ -12429,10 +12739,10 @@ func TestDeployedEvents(t *testing.T) { 0x84, // 131654300000000 0x1b, 0x00, 0x00, 0x77, 0xbd, 0x27, 0xc8, 0xdf, 0x00, - // 131648995887105 - 0x1b, 0x00, 0x00, 0x77, 0xbb, 0xeb, 0xa2, 0x88, 0x01, // 5304112895 0x1b, 0x00, 0x00, 0x00, 0x01, 0x3c, 0x26, 0x56, 0xff, + // 131648995887105 + 0x1b, 0x00, 0x00, 0x77, 0xbb, 0xeb, 0xa2, 0x88, 0x01, // 604080767 0x1a, 0x24, 0x01, 0x8a, 0x7f, }, @@ -12507,7 +12817,7 @@ func TestDeployedEvents(t *testing.T) { // {"value":{"id":"A.8624b52f9ddcd04a.FlowIDTableStaking.RewardsPaid","fields":[{"value":{"value":"e52cbcd825e328acac8db6bcbdcbb6e7724862c8b89b09d85edccf41ff9981eb","type":"String"},"name":"nodeID"},{"value":{"value":"1745.49955740","type":"UFix64"},"name":"amount"}]},"type":"Event"} // // language=edn, format=ccf - // 129([[162([h'', "A.8624b52f9ddcd04a.FlowIDTableStaking.RewardsPaid", [["amount", 137(23)], ["nodeID", 137(1)]]])], [136(h''), [174549955740, "e52cbcd825e328acac8db6bcbdcbb6e7724862c8b89b09d85edccf41ff9981eb"]]]) + // 129([[162([h'', "A.8624b52f9ddcd04a.FlowIDTableStaking.RewardsPaid", [["nodeID", 137(1)], ["amount", 137(23)]]])], [136(h''), ["e52cbcd825e328acac8db6bcbdcbb6e7724862c8b89b09d85edccf41ff9981eb", 174549955740]]]) // // language=cbor, format=ccf // tag @@ -12520,7 +12830,7 @@ func TestDeployedEvents(t *testing.T) { // event type: // id: []byte{} // cadence-type-id: "A.8624b52f9ddcd04a.FlowIDTableStaking.RewardsPaid" - // 2 field: [["amount", type(ufix64)], ["nodeID", type(string)]] + // 2 field: [["nodeID", type(string)], ["amount", type(ufix64)]] // tag 0xd8, ccf.CBORTagEventType, // array, 3 element follows @@ -12541,23 +12851,23 @@ func TestDeployedEvents(t *testing.T) { 0x82, // text, 6 bytes follow 0x66, - // "amount" - 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, + // "nodeID" + 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x44, // tag 0xd8, ccf.CBORTagSimpleType, - // UFix64 type ID (23) - 0x17, + // String type ID (1) + 0x01, // field 1 // array, 2 element follows 0x82, // text, 6 bytes follow 0x66, - // "nodeID" - 0x6e, 0x6f, 0x64, 0x65, 0x49, 0x44, + // "amount" + 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, // tag 0xd8, ccf.CBORTagSimpleType, - // String type ID (1) - 0x01, + // UFix64 type ID (23) + 0x17, // element 1: type and value // array, 2 element follows @@ -12568,12 +12878,12 @@ func TestDeployedEvents(t *testing.T) { 0x40, // array, 2 items follow 0x82, - // 174549955740 - 0x1b, 0x00, 0x00, 0x00, 0x28, 0xa3, 0xfc, 0xf4, 0x9c, // string, 64 bytes follow 0x78, 0x40, // "e52cbcd825e328acac8db6bcbdcbb6e7724862c8b89b09d85edccf41ff9981eb" 0x65, 0x35, 0x32, 0x63, 0x62, 0x63, 0x64, 0x38, 0x32, 0x35, 0x65, 0x33, 0x32, 0x38, 0x61, 0x63, 0x61, 0x63, 0x38, 0x64, 0x62, 0x36, 0x62, 0x63, 0x62, 0x64, 0x63, 0x62, 0x62, 0x36, 0x65, 0x37, 0x37, 0x32, 0x34, 0x38, 0x36, 0x32, 0x63, 0x38, 0x62, 0x38, 0x39, 0x62, 0x30, 0x39, 0x64, 0x38, 0x35, 0x65, 0x64, 0x63, 0x63, 0x66, 0x34, 0x31, 0x66, 0x66, 0x39, 0x39, 0x38, 0x31, 0x65, 0x62, + // 174549955740 + 0x1b, 0x00, 0x00, 0x00, 0x28, 0xa3, 0xfc, 0xf4, 0x9c, }, }, { @@ -12584,7 +12894,7 @@ func TestDeployedEvents(t *testing.T) { // {"value":{"id":"A.1654653399040a61.FlowToken.TokensDeposited","fields":[{"value":{"value":"1316489.95887105","type":"UFix64"},"name":"amount"},{"value":{"value":null,"type":"Optional"},"name":"to"}]},"type":"Event"} // // language=edn, format=ccf - // 129([[162([h'', "A.1654653399040a61.FlowToken.TokensDeposited", [["to", 138(137(3))], ["amount", 137(23)]]])], [136(h''), [null, 131648995887105]]]) + // 129([[162([h'', "A.1654653399040a61.FlowToken.TokensDeposited", [["amount", 137(23)], ["to", 138(137(3))]]])], [136(h''), [131648995887105, null]]]) // // language=cbor, format=ccf // tag @@ -12616,6 +12926,17 @@ func TestDeployedEvents(t *testing.T) { // field 0 // array, 2 element follows 0x82, + // text, 6 bytes follow + 0x66, + // "amount" + 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, + // tag + 0xd8, ccf.CBORTagSimpleType, + // UFix64 type ID (23) + 0x17, + // field 1 + // array, 2 element follows + 0x82, // text, 2 bytes follow 0x62, // "to" @@ -12626,17 +12947,6 @@ func TestDeployedEvents(t *testing.T) { 0xd8, ccf.CBORTagSimpleType, // Address type ID (3) 0x03, - // field 1 - // array, 2 element follows - 0x82, - // text, 6 bytes follow - 0x66, - // "amount" - 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - // tag - 0xd8, ccf.CBORTagSimpleType, - // UFix64 type ID (23) - 0x17, // element 1: type and value // array, 2 element follows @@ -12647,10 +12957,10 @@ func TestDeployedEvents(t *testing.T) { 0x40, // array, 2 items follow 0x82, - // null - 0xf6, // 131648995887105 0x1b, 0x00, 0x00, 0x77, 0xbb, 0xeb, 0xa2, 0x88, 0x01, + // null + 0xf6, }, }, { @@ -12661,7 +12971,7 @@ func TestDeployedEvents(t *testing.T) { // {"value":{"id":"A.1654653399040a61.FlowToken.TokensDeposited","fields":[{"value":{"value":"1745.49955740","type":"UFix64"},"name":"amount"},{"value":{"value":{"value":"0x8624b52f9ddcd04a","type":"Address"},"type":"Optional"},"name":"to"}]},"type":"Event"} // // language=edn, format=ccf - // 129([[162([h'', "A.1654653399040a61.FlowToken.TokensDeposited", [["to", 138(137(3))], ["amount", 137(23)]]])], [136(h''), [h'8624B52F9DDCD04A', 174549955740]]]) + // 129([[162([h'', "A.1654653399040a61.FlowToken.TokensDeposited", [["amount", 137(23)], ["to", 138(137(3))]]])], [136(h''), [174549955740, h'8624B52F9DDCD04A']]]) // // language=cbor, format=ccf // tag @@ -12693,6 +13003,17 @@ func TestDeployedEvents(t *testing.T) { // field 0 // array, 2 element follows 0x82, + // text, 6 bytes follow + 0x66, + // "amount" + 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, + // tag + 0xd8, ccf.CBORTagSimpleType, + // UFix64 type ID (23) + 0x17, + // field 1 + // array, 2 element follows + 0x82, // text, 2 bytes follow 0x62, // "to" @@ -12703,17 +13024,6 @@ func TestDeployedEvents(t *testing.T) { 0xd8, ccf.CBORTagSimpleType, // Address type ID (3) 0x03, - // field 1 - // array, 2 element follows - 0x82, - // text, 6 bytes follow - 0x66, - // "amount" - 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - // tag - 0xd8, ccf.CBORTagSimpleType, - // UFix64 type ID (23) - 0x17, // element 1: type and value // array, 2 element follows @@ -12724,12 +13034,12 @@ func TestDeployedEvents(t *testing.T) { 0x40, // array, 2 items follow 0x82, + // 174549955740 + 0x1b, 0x00, 0x00, 0x00, 0x28, 0xa3, 0xfc, 0xf4, 0x9c, // bytes, 8 bytes follow 0x48, // 0x8624b52f9ddcd04a 0x86, 0x24, 0xb5, 0x2f, 0x9d, 0xdc, 0xd0, 0x4a, - // 174549955740 - 0x1b, 0x00, 0x00, 0x00, 0x28, 0xa3, 0xfc, 0xf4, 0x9c, }, }, { @@ -12802,7 +13112,7 @@ func TestDeployedEvents(t *testing.T) { // {"value":{"id":"A.1654653399040a61.FlowToken.TokensWithdrawn","fields":[{"value":{"value":"53.04112895","type":"UFix64"},"name":"amount"},{"value":{"value":{"value":"0xf919ee77447b7497","type":"Address"},"type":"Optional"},"name":"from"}]},"type":"Event"} // // language=edn, format=ccf - // 129([[162([h'', "A.1654653399040a61.FlowToken.TokensWithdrawn", [["from", 138(137(3))], ["amount", 137(23)]]])], [136(h''), [h'F919EE77447B7497', 5304112895]]]) + // 129([[162([h'', "A.1654653399040a61.FlowToken.TokensWithdrawn", [["amount", 137(23)], ["from", 138(137(3))]]])], [136(h''), [5304112895, h'F919EE77447B7497']]]) // // language=cbor, format=ccf // tag @@ -12834,6 +13144,17 @@ func TestDeployedEvents(t *testing.T) { // field 0 // array, 2 element follows 0x82, + // text, 6 bytes follow + 0x66, + // "amount" + 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, + // tag + 0xd8, ccf.CBORTagSimpleType, + // UFix64 type ID (23) + 0x17, + // field 1 + // array, 2 element follows + 0x82, // text, 4 bytes follow 0x64, // "from" @@ -12844,17 +13165,6 @@ func TestDeployedEvents(t *testing.T) { 0xd8, ccf.CBORTagSimpleType, // Address type ID (3) 0x03, - // field 1 - // array, 2 element follows - 0x82, - // text, 6 bytes follow - 0x66, - // "amount" - 0x61, 0x6d, 0x6f, 0x75, 0x6e, 0x74, - // tag - 0xd8, ccf.CBORTagSimpleType, - // UFix64 type ID (23) - 0x17, // element 1: type and value // array, 2 element follows @@ -12865,12 +13175,12 @@ func TestDeployedEvents(t *testing.T) { 0x40, // array, 2 items follow 0x82, + // 5304112895 + 0x1b, 0x00, 0x00, 0x00, 0x01, 0x3c, 0x26, 0x56, 0xff, // bytes, 8 bytes follow 0x48, // 0xf919ee77447b7497 0xf9, 0x19, 0xee, 0x77, 0x44, 0x7b, 0x74, 0x97, - // 5304112895 - 0x1b, 0x00, 0x00, 0x00, 0x01, 0x3c, 0x26, 0x56, 0xff, }, }, } @@ -12880,17 +13190,20 @@ func TestDeployedEvents(t *testing.T) { t.Parallel() // Encode Cadence value to CCF - actualCBOR, err := ccf.Encode(tc.event) + actualCBOR, err := ccf.EventsEncMode.Encode(tc.event) require.NoError(t, err) utils.AssertEqualWithDiff(t, tc.expectedCBOR, actualCBOR) // Decode CCF to Cadence value - decodedEvent, err := ccf.Decode(nil, actualCBOR) + decodedEvent, err := ccf.EventsDecMode.Decode(nil, actualCBOR) require.NoError(t, err) - // Test original event and decoded events are equal even if - // fields are ordered differently due to deterministic encoding. - testEventEquality(t, tc.event, decodedEvent.(cadence.Event)) + // Since event encoding doesn't sort fields, make sure that input event is identical to decoded event. + require.Equal( + t, + cadence.ValueWithCachedTypeID(tc.event), + cadence.ValueWithCachedTypeID(decodedEvent), + ) }) } @@ -13100,10 +13413,10 @@ func createFlowTokenTokensWithdrawnEvent() cadence.Event { addressBytes, _ := hex.DecodeString("f919ee77447b7497") amount, _ := cadence.NewUFix64("53.04112895") - to := cadence.NewOptional(cadence.BytesToAddress(addressBytes)) + from := cadence.NewOptional(cadence.BytesToAddress(addressBytes)) return cadence.NewEvent( - []cadence.Value{amount, to}, + []cadence.Value{amount, from}, ).WithType(newFlowTokenTokensWithdrawnEventType()) } @@ -13264,38 +13577,17 @@ func createFlowIDTableStakingRewardsPaidEvent() cadence.Event { ).WithType(newFlowIDTableStakingRewardsPaidEventType()) } -func testEventEquality(t *testing.T, event1 cadence.Event, event2 cadence.Event) { - require.True(t, event1.Type().Equal(event2.Type())) - require.Equal(t, len(event1.Fields), len(event2.Fields)) - require.Equal(t, len(event1.EventType.Fields), len(event2.EventType.Fields)) - - for i, event1FieldType := range event1.EventType.Fields { - - foundField := false - - for j, event2FieldType := range event2.EventType.Fields { - if event1FieldType.Identifier == event2FieldType.Identifier { - require.Equal(t, event1.Fields[i], event2.Fields[j]) - foundField = true - break - } - } - - require.True(t, foundField) - } -} - func TestDecodeTruncatedData(t *testing.T) { t.Parallel() - data, err := ccf.Encode(createFlowTokenTokensWithdrawnEvent()) + data, err := deterministicEncMode.Encode(createFlowTokenTokensWithdrawnEvent()) require.NoError(t, err) - _, err = ccf.Decode(nil, data) + _, err = deterministicDecMode.Decode(nil, data) require.NoError(t, err) for i := len(data) - 1; i >= 0; i-- { - decodedVal, err := ccf.Decode(nil, data[:i]) + decodedVal, err := deterministicDecMode.Decode(nil, data[:i]) require.Nil(t, decodedVal) require.Error(t, err) } @@ -14122,7 +14414,7 @@ func TestDecodeInvalidData(t *testing.T) { test := func(tc testCase) { t.Run(tc.name, func(t *testing.T) { t.Parallel() - decodedVal, err := ccf.Decode(nil, tc.data) + decodedVal, err := deterministicDecMode.Decode(nil, tc.data) require.Nil(t, decodedVal) require.Error(t, err) }) @@ -14344,3 +14636,974 @@ func TestEncodeValueOfRestrictedInterface(t *testing.T) { }, ) } + +func TestCyclicReferenceValue(t *testing.T) { + + // Test data is from TestRuntimeScriptReturnSpecial in runtime_test.go + t.Run("recursive reference", func(t *testing.T) { + + t.Parallel() + + script := ` + access(all) fun main(): AnyStruct { + let refs: [&AnyStruct] = [] + refs.append(&refs as &AnyStruct) + return refs + } + ` + + actual := exportFromScript(t, script) + + expected := cadence.NewArray([]cadence.Value{ + cadence.NewArray([]cadence.Value{ + nil, + }).WithType(&cadence.VariableSizedArrayType{ + ElementType: &cadence.ReferenceType{ + Authorization: cadence.Unauthorized{}, + Type: cadence.AnyStructType{}, + }, + }), + }).WithType(&cadence.VariableSizedArrayType{ + ElementType: &cadence.ReferenceType{ + Authorization: cadence.Unauthorized{}, + Type: cadence.AnyStructType{}, + }, + }) + + assert.Equal(t, expected, actual) + + testEncodeAndDecode( + t, + expected, + []byte{ + // language=json, format=json-cdc + // {"value":[{"value":[null],"type":"Array"}],"type":"Array"} + // + // language=edn, format=ccf + // 130([139(142([false, 137(39)])), [130([139(142([false, 137(39)])), [null]])]]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // static type + // tag + 0xd8, ccf.CBORTagVarsizedArrayType, + // tag + 0xd8, ccf.CBORTagReferenceType, + // array, 2 items follow + 0x82, + // nil + 0xf6, + // tag + 0xd8, ccf.CBORTagSimpleType, + // AnyStruct type ID (39) + 0x18, 0x27, + + // data + // array, 1 items follow + 0x81, + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagVarsizedArrayType, + // tag + 0xd8, ccf.CBORTagReferenceType, + // array, 2 items follow + 0x82, + // nil + 0xf6, + // tag + 0xd8, ccf.CBORTagSimpleType, + // AnyStruct type ID (39) + 0x18, 0x27, + // array, 1 items follow + 0x81, + // nil + 0xf6, + }, + ) + }) +} + +func TestSortOptions(t *testing.T) { + // Test sorting of: + // - composite fields ("count", "sum") + // - restricted types ("HasCount", "HasSum") + + sortFieldsEncMode, err := ccf.EncOptions{ + SortCompositeFields: ccf.SortBytewiseLexical, + }.EncMode() + require.NoError(t, err) + + sortRestrictedTypesEncMode, err := ccf.EncOptions{ + SortRestrictedTypes: ccf.SortBytewiseLexical, + }.EncMode() + require.NoError(t, err) + + enforceSortedFieldsDecMode, err := ccf.DecOptions{ + EnforceSortCompositeFields: ccf.EnforceSortBytewiseLexical, + }.DecMode() + require.NoError(t, err) + + enforceSortedRestrictedTypesDecMode, err := ccf.DecOptions{ + EnforceSortRestrictedTypes: ccf.EnforceSortBytewiseLexical, + }.DecMode() + require.NoError(t, err) + + hasCountInterfaceType := cadence.NewResourceInterfaceType( + common.NewStringLocation(nil, "test"), + "HasCount", + nil, + nil, + ) + + hasSumInterfaceType := cadence.NewResourceInterfaceType( + common.NewStringLocation(nil, "test"), + "HasSum", + nil, + nil, + ) + + statsType := cadence.NewResourceType( + common.NewStringLocation(nil, "test"), + "Stats", + []cadence.Field{ + cadence.NewField("count", cadence.NewIntType()), + cadence.NewField("sum", cadence.NewIntType()), + }, + nil, + ) + + countSumRestrictedType := cadence.NewRestrictedType( + nil, + []cadence.Type{ + hasCountInterfaceType, + hasSumInterfaceType, + }, + ) + + val := cadence.NewArray([]cadence.Value{ + cadence.NewResource( + []cadence.Value{ + cadence.NewInt(1), + cadence.NewInt(2), + }, + ).WithType(statsType), + }).WithType(cadence.NewVariableSizedArrayType(countSumRestrictedType)) + + t.Run("no sort", func(t *testing.T) { + expectedStatsType := cadence.NewResourceType( + common.NewStringLocation(nil, "test"), + "Stats", + []cadence.Field{ + cadence.NewField("count", cadence.NewIntType()), + cadence.NewField("sum", cadence.NewIntType()), + }, + nil, + ) + + expectedCountSumRestrictedType := cadence.NewRestrictedType( + nil, + []cadence.Type{ + hasCountInterfaceType, + hasSumInterfaceType, + }, + ) + + expectedVal := cadence.NewArray([]cadence.Value{ + cadence.NewResource( + []cadence.Value{ + cadence.NewInt(1), + cadence.NewInt(2), + }, + ).WithType(expectedStatsType), + }).WithType(cadence.NewVariableSizedArrayType(expectedCountSumRestrictedType)) + + expectedCBOR := []byte{ + // language=json, format=json-cdc + // {"value":[{"value":{"id":"S.test.Stats","fields":[{"value":{"value":"1","type":"Int"},"name":"count"},{"value":{"value":"2","type":"Int"},"name":"sum"}]},"type":"Resource"}],"type":"Array"} + // + // language=edn, format=ccf + // 129([[161([h'', "S.test.Stats", [["count", 137(4)], ["sum", 137(4)]]]), 177([h'01', "S.test.HasSum"]), 177([h'02', "S.test.HasCount"]), ], [139(143([null, [136(h'02'), 136(h'01')]])), [130([136(h''), [2, 1]])]]]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeDefAndValue, + // array, 2 items follow + 0x82, + // element 0: type definitions + // array, 3 items follow + 0x83, + // resource type: + // id: []byte{} + // cadence-type-id: "S.test.Stats" + // 2 fields: [["count", type(int)], ["sum", type(int)]] + // tag + 0xd8, ccf.CBORTagResourceType, + // array, 3 items follow + 0x83, + // id + // bytes, 0 bytes follow + 0x40, + // cadence-type-id + // string, 12 bytes follow + 0x6c, + // S.test.Stats + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x73, + // fields + // array, 2 items follow + 0x82, + // field 0 + // array, 2 items follow + 0x82, + // text, 5 bytes follow + 0x65, + // count + 0x63, 0x6f, 0x75, 0x6e, 0x74, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Int type ID (4) + 0x04, + // field 1 + // array, 2 items follow + 0x82, + // text, 3 bytes follow + 0x63, + // sum + 0x73, 0x75, 0x6d, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Int type ID (4) + 0x04, + // resource interface type: + // id: []byte{1} + // cadence-type-id: "S.test.HasSum" + // tag + 0xd8, ccf.CBORTagResourceInterfaceType, + // array, 2 items follow + 0x82, + // id + // bytes, 1 bytes follow + 0x41, + // 1 + 0x01, + // cadence-type-id + // string, 13 bytes follow + 0x6d, + // S.test.HasSum + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x61, 0x73, 0x53, 0x75, 0x6d, + // resource interface type: + // id: []byte{2} + // cadence-type-id: "S.test.HasCount" + // tag + 0xd8, ccf.CBORTagResourceInterfaceType, + // array, 2 items follow + 0x82, + // id + // bytes, 1 bytes follow + 0x41, + // 2 + 0x02, + // cadence-type-id + // string, 15 bytes follow + 0x6f, + // S.test.HasCount + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x61, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, + + // element 1: type and value + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagVarsizedArrayType, + // tag + 0xd8, ccf.CBORTagRestrictedType, + // array, 2 items follow + 0x82, + // type + // null + 0xf6, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 1 byte follows + 0x41, + // 2 + 0x02, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 1 byte follows + 0x41, + // 1 + 0x01, + + // array, 1 item follows + 0x81, + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 0 byte follows + 0x40, + // array, 2 items follow + 0x82, + // tag (big num) + 0xc2, + // bytes, 1 byte follows + 0x41, + // 1 + 0x01, + // tag (big num) + 0xc2, + // bytes, 1 byte follows + 0x41, + // 2 + 0x02, + } + + // Encode value without sorting. + actualCBOR, err := ccf.Encode(val) + require.NoError(t, err) + utils.AssertEqualWithDiff(t, expectedCBOR, actualCBOR) + + // Decode value without enforcing sorting. + decodedVal, err := ccf.Decode(nil, actualCBOR) + require.NoError(t, err) + assert.Equal( + t, + cadence.ValueWithCachedTypeID(expectedVal), + cadence.ValueWithCachedTypeID(decodedVal), + ) + + // Decode value enforcing sorting of composite fields should return error. + _, err = enforceSortedFieldsDecMode.Decode(nil, actualCBOR) + require.Error(t, err) + + // Decode value enforcing sorting of restricted types should return error. + _, err = enforceSortedRestrictedTypesDecMode.Decode(nil, actualCBOR) + require.Error(t, err) + }) + + t.Run("sort composite fields only", func(t *testing.T) { + expectedStatsType := cadence.NewResourceType( + common.NewStringLocation(nil, "test"), + "Stats", + []cadence.Field{ + cadence.NewField("sum", cadence.NewIntType()), + cadence.NewField("count", cadence.NewIntType()), + }, + nil, + ) + + expectedCountSumRestrictedType := cadence.NewRestrictedType( + nil, + []cadence.Type{ + hasCountInterfaceType, + hasSumInterfaceType, + }, + ) + + expectedVal := cadence.NewArray([]cadence.Value{ + cadence.NewResource( + []cadence.Value{ + cadence.NewInt(2), + cadence.NewInt(1), + }, + ).WithType(expectedStatsType), + }).WithType(cadence.NewVariableSizedArrayType(expectedCountSumRestrictedType)) + + expectedCBOR := []byte{ + // language=json, format=json-cdc + // {"value":[{"value":{"id":"S.test.Stats","fields":[{"value":{"value":"1","type":"Int"},"name":"count"},{"value":{"value":"2","type":"Int"},"name":"sum"}]},"type":"Resource"}],"type":"Array"} + // + // language=edn, format=ccf + // 129([[161([h'', "S.test.Stats", [["sum", 137(4)], ["count", 137(4)]]]), 177([h'01', "S.test.HasSum"]), 177([h'02', "S.test.HasCount"]), ], [139(143([null, [136(h'02'), 136(h'01')]])), [130([136(h''), [2, 1]])]]]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeDefAndValue, + // array, 2 items follow + 0x82, + // element 0: type definitions + // array, 3 items follow + 0x83, + // resource type: + // id: []byte{} + // cadence-type-id: "S.test.Stats" + // 2 fields: [["sum", type(int)], ["count", type(int)]] + // tag + 0xd8, ccf.CBORTagResourceType, + // array, 3 items follow + 0x83, + // id + // bytes, 0 bytes follow + 0x40, + // cadence-type-id + // string, 12 bytes follow + 0x6c, + // S.test.Stats + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x73, + // fields + // array, 2 items follow + 0x82, + // field 0 + // array, 2 items follow + 0x82, + // text, 3 bytes follow + 0x63, + // sum + 0x73, 0x75, 0x6d, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Int type ID (4) + 0x04, + // field 1 + // array, 2 items follow + 0x82, + // text, 5 bytes follow + 0x65, + // count + 0x63, 0x6f, 0x75, 0x6e, 0x74, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Int type ID (4) + 0x04, + // resource interface type: + // id: []byte{1} + // cadence-type-id: "S.test.HasSum" + // tag + 0xd8, ccf.CBORTagResourceInterfaceType, + // array, 2 items follow + 0x82, + // id + // bytes, 1 bytes follow + 0x41, + // 1 + 0x01, + // cadence-type-id + // string, 13 bytes follow + 0x6d, + // S.test.HasSum + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x61, 0x73, 0x53, 0x75, 0x6d, + // resource interface type: + // id: []byte{2} + // cadence-type-id: "S.test.HasCount" + // tag + 0xd8, ccf.CBORTagResourceInterfaceType, + // array, 2 items follow + 0x82, + // id + // bytes, 1 bytes follow + 0x41, + // 2 + 0x02, + // cadence-type-id + // string, 15 bytes follow + 0x6f, + // S.test.HasCount + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x61, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, + + // element 1: type and value + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagVarsizedArrayType, + // tag + 0xd8, ccf.CBORTagRestrictedType, + // array, 2 items follow + 0x82, + // type + // null + 0xf6, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 1 byte follows + 0x41, + // 2 + 0x02, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 1 byte follows + 0x41, + // 1 + 0x01, + + // array, 1 item follows + 0x81, + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 0 byte follows + 0x40, + // array, 2 items follow + 0x82, + // tag (big num) + 0xc2, + // bytes, 1 byte follows + 0x41, + // 2 + 0x02, + // tag (big num) + 0xc2, + // bytes, 1 byte follows + 0x41, + // 1 + 0x01, + } + + // Encode value with sorted composite fields. + actualCBOR, err := sortFieldsEncMode.Encode(val) + require.NoError(t, err) + utils.AssertEqualWithDiff(t, expectedCBOR, actualCBOR) + + // Decode value enforcing sorting of composite fields. + decodedVal, err := enforceSortedFieldsDecMode.Decode(nil, actualCBOR) + require.NoError(t, err) + assert.Equal( + t, + cadence.ValueWithCachedTypeID(expectedVal), + cadence.ValueWithCachedTypeID(decodedVal), + ) + + // Decode value without enforcing sorting should return no error. + _, err = ccf.Decode(nil, actualCBOR) + require.NoError(t, err) + + // Decode value enforcing sorting of restricted types should return error. + _, err = enforceSortedRestrictedTypesDecMode.Decode(nil, actualCBOR) + require.Error(t, err) + }) + + t.Run("sort restricted types only", func(t *testing.T) { + expectedStatsType := cadence.NewResourceType( + common.NewStringLocation(nil, "test"), + "Stats", + []cadence.Field{ + cadence.NewField("count", cadence.NewIntType()), + cadence.NewField("sum", cadence.NewIntType()), + }, + nil, + ) + + expectedCountSumRestrictedType := cadence.NewRestrictedType( + nil, + []cadence.Type{ + hasSumInterfaceType, + hasCountInterfaceType, + }, + ) + + expectedVal := cadence.NewArray([]cadence.Value{ + cadence.NewResource( + []cadence.Value{ + cadence.NewInt(1), + cadence.NewInt(2), + }, + ).WithType(expectedStatsType), + }).WithType(cadence.NewVariableSizedArrayType(expectedCountSumRestrictedType)) + + expectedCBOR := []byte{ + // language=json, format=json-cdc + // {"value":[{"value":{"id":"S.test.Stats","fields":[{"value":{"value":"1","type":"Int"},"name":"count"},{"value":{"value":"2","type":"Int"},"name":"sum"}]},"type":"Resource"}],"type":"Array"} + // + // language=edn, format=ccf + // 129([[161([h'', "S.test.Stats", [["count", 137(4)], ["sum", 137(4)]]]), 177([h'01', "S.test.HasSum"]), 177([h'02', "S.test.HasCount"]), ], [139(143([null, [136(h'01'), 136(h'02')]])), [130([136(h''), [2, 1]])]]]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeDefAndValue, + // array, 2 items follow + 0x82, + // element 0: type definitions + // array, 3 items follow + 0x83, + // resource type: + // id: []byte{} + // cadence-type-id: "S.test.Stats" + // 2 fields: [["count", type(int)], ["sum", type(int)]] + // tag + 0xd8, ccf.CBORTagResourceType, + // array, 3 items follow + 0x83, + // id + // bytes, 0 bytes follow + 0x40, + // cadence-type-id + // string, 12 bytes follow + 0x6c, + // S.test.Stats + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x73, + // fields + // array, 2 items follow + 0x82, + // field 0 + // array, 2 items follow + 0x82, + // text, 5 bytes follow + 0x65, + // count + 0x63, 0x6f, 0x75, 0x6e, 0x74, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Int type ID (4) + 0x04, + // field 1 + // array, 2 items follow + 0x82, + // text, 3 bytes follow + 0x63, + // sum + 0x73, 0x75, 0x6d, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Int type ID (4) + 0x04, + // resource interface type: + // id: []byte{1} + // cadence-type-id: "S.test.HasSum" + // tag + 0xd8, ccf.CBORTagResourceInterfaceType, + // array, 2 items follow + 0x82, + // id + // bytes, 1 bytes follow + 0x41, + // 1 + 0x01, + // cadence-type-id + // string, 13 bytes follow + 0x6d, + // S.test.HasSum + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x61, 0x73, 0x53, 0x75, 0x6d, + // resource interface type: + // id: []byte{2} + // cadence-type-id: "S.test.HasCount" + // tag + 0xd8, ccf.CBORTagResourceInterfaceType, + // array, 2 items follow + 0x82, + // id + // bytes, 1 bytes follow + 0x41, + // 2 + 0x02, + // cadence-type-id + // string, 15 bytes follow + 0x6f, + // S.test.HasCount + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x61, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, + + // element 1: type and value + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagVarsizedArrayType, + // tag + 0xd8, ccf.CBORTagRestrictedType, + // array, 2 items follow + 0x82, + // type + // null + 0xf6, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 1 byte follows + 0x41, + // 1 + 0x01, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 1 byte follows + 0x41, + // 2 + 0x02, + + // array, 1 item follows + 0x81, + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 0 byte follows + 0x40, + // array, 2 items follow + 0x82, + // tag (big num) + 0xc2, + // bytes, 1 byte follows + 0x41, + // 1 + 0x01, + // tag (big num) + 0xc2, + // bytes, 1 byte follows + 0x41, + // 2 + 0x02, + } + + // Encode value with sorted restricted types. + actualCBOR, err := sortRestrictedTypesEncMode.Encode(val) + require.NoError(t, err) + utils.AssertEqualWithDiff(t, expectedCBOR, actualCBOR) + + // Decode value enforcing sorting of restricted types. + decodedVal, err := enforceSortedRestrictedTypesDecMode.Decode(nil, actualCBOR) + require.NoError(t, err) + assert.Equal( + t, + cadence.ValueWithCachedTypeID(expectedVal), + cadence.ValueWithCachedTypeID(decodedVal), + ) + + // Decode value without enforcing sorting should return no error. + _, err = ccf.Decode(nil, actualCBOR) + require.NoError(t, err) + + // Decode value enforcing sorting of composite fields should return error. + _, err = enforceSortedFieldsDecMode.Decode(nil, actualCBOR) + require.Error(t, err) + }) + + t.Run("sort", func(t *testing.T) { + expectedStatsType := cadence.NewResourceType( + common.NewStringLocation(nil, "test"), + "Stats", + []cadence.Field{ + cadence.NewField("sum", cadence.NewIntType()), + cadence.NewField("count", cadence.NewIntType()), + }, + nil, + ) + + expectedCountSumRestrictedType := cadence.NewRestrictedType( + nil, + []cadence.Type{ + hasSumInterfaceType, + hasCountInterfaceType, + }, + ) + + expectedVal := cadence.NewArray([]cadence.Value{ + cadence.NewResource( + []cadence.Value{ + cadence.NewInt(2), + cadence.NewInt(1), + }, + ).WithType(expectedStatsType), + }).WithType(cadence.NewVariableSizedArrayType(expectedCountSumRestrictedType)) + + expectedCBOR := []byte{ + // language=json, format=json-cdc + // {"value":[{"value":{"id":"S.test.Stats","fields":[{"value":{"value":"1","type":"Int"},"name":"count"},{"value":{"value":"2","type":"Int"},"name":"sum"}]},"type":"Resource"}],"type":"Array"} + // + // language=edn, format=ccf + // 129([[161([h'', "S.test.Stats", [["sum", 137(4)], ["count", 137(4)]]]), 177([h'01', "S.test.HasSum"]), 177([h'02', "S.test.HasCount"])], [139(143([null, [136(h'01'), 136(h'02')]])), [130([136(h''), [2, 1]])]]]) + // + // language=cbor, format=ccf + // tag + 0xd8, ccf.CBORTagTypeDefAndValue, + // array, 2 items follow + 0x82, + // element 0: type definitions + // array, 3 items follow + 0x83, + // resource type: + // id: []byte{} + // cadence-type-id: "S.test.Stats" + // 2 fields: [["sum", type(int)], ["count", type(int)]] + // tag + 0xd8, ccf.CBORTagResourceType, + // array, 3 items follow + 0x83, + // id + // bytes, 0 bytes follow + 0x40, + // cadence-type-id + // string, 12 bytes follow + 0x6c, + // S.test.Stats + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x73, + // fields + // array, 2 items follow + 0x82, + // field 0 + // array, 2 items follow + 0x82, + // text, 3 bytes follow + 0x63, + // sum + 0x73, 0x75, 0x6d, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Int type ID (4) + 0x04, + // field 1 + // array, 2 items follow + 0x82, + // text, 5 bytes follow + 0x65, + // count + 0x63, 0x6f, 0x75, 0x6e, 0x74, + // tag + 0xd8, ccf.CBORTagSimpleType, + // Int type ID (4) + 0x04, + // resource interface type: + // id: []byte{1} + // cadence-type-id: "S.test.HasSum" + // tag + 0xd8, ccf.CBORTagResourceInterfaceType, + // array, 2 items follow + 0x82, + // id + // bytes, 1 bytes follow + 0x41, + // 1 + 0x01, + // cadence-type-id + // string, 13 bytes follow + 0x6d, + // S.test.HasSum + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x61, 0x73, 0x53, 0x75, 0x6d, + // resource interface type: + // id: []byte{2} + // cadence-type-id: "S.test.HasCount" + // tag + 0xd8, ccf.CBORTagResourceInterfaceType, + // array, 2 items follow + 0x82, + // id + // bytes, 1 bytes follow + 0x41, + // 2 + 0x02, + // cadence-type-id + // string, 15 bytes follow + 0x6f, + // S.test.HasCount + 0x53, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x48, 0x61, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, + + // element 1: type and value + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagVarsizedArrayType, + // tag + 0xd8, ccf.CBORTagRestrictedType, + // array, 2 items follow + 0x82, + // type + // null + 0xf6, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 1 byte follows + 0x41, + // 1 + 0x01, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 1 byte follows + 0x41, + // 2 + 0x02, + + // array, 1 item follows + 0x81, + // tag + 0xd8, ccf.CBORTagTypeAndValue, + // array, 2 items follow + 0x82, + // tag + 0xd8, ccf.CBORTagTypeRef, + // bytes, 0 byte follows + 0x40, + // array, 2 items follow + 0x82, + // tag (big num) + 0xc2, + // bytes, 1 byte follows + 0x41, + // 2 + 0x02, + // tag (big num) + 0xc2, + // bytes, 1 byte follows + 0x41, + // 1 + 0x01, + } + + // Encode value with sorted composite fields and restricted types. + actualCBOR, err := deterministicEncMode.Encode(val) + require.NoError(t, err) + utils.AssertEqualWithDiff(t, expectedCBOR, actualCBOR) + + // Decode value enforcing sorting of composite fields and restricted types. + decodedVal, err := deterministicDecMode.Decode(nil, actualCBOR) + require.NoError(t, err) + assert.Equal( + t, + cadence.ValueWithCachedTypeID(expectedVal), + cadence.ValueWithCachedTypeID(decodedVal), + ) + + // Decode value without enforcing sorting should return no error. + _, err = ccf.Decode(nil, actualCBOR) + require.NoError(t, err) + + // Decode value enforcing sorting of composite fields should return no error. + _, err = enforceSortedFieldsDecMode.Decode(nil, actualCBOR) + require.NoError(t, err) + + // Decode value enforcing sorting of restricted types should return no error. + _, err = enforceSortedRestrictedTypesDecMode.Decode(nil, actualCBOR) + require.NoError(t, err) + }) +} + +func TestInvalidEncodingOptions(t *testing.T) { + opts := ccf.EncOptions{ + SortCompositeFields: 100, + } + _, err := opts.EncMode() + require.Error(t, err) + + opts = ccf.EncOptions{ + SortRestrictedTypes: 100, + } + _, err = opts.EncMode() + require.Error(t, err) +} + +func TestInvalidDecodingOptions(t *testing.T) { + opts := ccf.DecOptions{ + EnforceSortCompositeFields: 100, + } + _, err := opts.DecMode() + require.Error(t, err) + + opts = ccf.DecOptions{ + EnforceSortRestrictedTypes: 100, + } + _, err = opts.DecMode() + require.Error(t, err) +} diff --git a/encoding/ccf/decode.go b/encoding/ccf/decode.go index 8a7d93bdd6..8cd98a837e 100644 --- a/encoding/ccf/decode.go +++ b/encoding/ccf/decode.go @@ -32,7 +32,7 @@ import ( cadenceErrors "github.com/onflow/cadence/runtime/errors" ) -// CBORDecMode +// defaultCBORDecMode // // See https://github.com/fxamacker/cbor: // "For best performance, reuse EncMode and DecMode after creating them." @@ -45,7 +45,7 @@ import ( // decoder into allocating very big data items (strings, arrays, maps, or even arbitrary precision numbers) // or exhaust the stack depth by setting up deeply nested items. Decoders need to have appropriate resource // management to mitigate these attacks." -var CBORDecMode = func() cbor.DecMode { +var defaultCBORDecMode = func() cbor.DecMode { decMode, err := cbor.DecOptions{ IndefLength: cbor.IndefLengthForbidden, IntDec: cbor.IntDecConvertNone, @@ -67,14 +67,92 @@ type Decoder struct { // CCF codec uses CBOR codec under the hood. dec *cbor.StreamDecoder gauge common.MemoryGauge + + // CCF decoding mode contains immutable CCF decoding options. + dm *decMode +} + +type DecMode interface { + // Decode returns a Cadence value decoded from its CCF-encoded representation. + // + // This function returns an error if the bytes represent CCF that is malformed, + // invalid, or does not comply with requirements in the CCF specification. + Decode(gauge common.MemoryGauge, b []byte) (cadence.Value, error) + + // NewDecoder initializes a Decoder that will decode CCF-encoded bytes from the + // given bytes. + NewDecoder(gauge common.MemoryGauge, b []byte) *Decoder +} + +// EnforceSortMode specifies how the decoder should enforce sort order. +type EnforceSortMode int + +const ( + // EnforceSortNone means sort order is not enforced by the decoder. + EnforceSortNone EnforceSortMode = iota + + // EnforceSortBytewiseLexical requires sort order to be bytewise lexicographic. + EnforceSortBytewiseLexical + + maxEnforceSortMode +) + +func (esm EnforceSortMode) valid() bool { + return esm < maxEnforceSortMode +} + +// DecOptions specifies CCF decoding options which can be used to create immutable DecMode. +type DecOptions struct { + // EnforceSortCompositeFields specifies how decoder should enforce sort order of compsite fields. + EnforceSortCompositeFields EnforceSortMode + + // EnforceSortRestrictedTypes specifies how decoder should enforce sort order of restricted types. + EnforceSortRestrictedTypes EnforceSortMode + + // CBORDecMode will default to defaultCBORDecMode if nil. The decoding mode contains + // immutable decoding options (cbor.DecOptions) and is safe for concurrent use. + CBORDecMode cbor.DecMode +} + +// EventsDecMode is CCF decoding mode for events which contains +// immutable CCF decoding options. It is safe for concurrent use. +var EventsDecMode = &decMode{ + enforceSortCompositeFields: EnforceSortNone, + enforceSortRestrictedTypes: EnforceSortNone, + cborDecMode: defaultCBORDecMode, +} + +type decMode struct { + enforceSortCompositeFields EnforceSortMode + enforceSortRestrictedTypes EnforceSortMode + cborDecMode cbor.DecMode +} + +// DecMode returns CCF decoding mode which contains immutable decoding options. +// The returned DecMode is safe for concurrent use. +func (opts DecOptions) DecMode() (DecMode, error) { + if !opts.EnforceSortCompositeFields.valid() { + return nil, fmt.Errorf("ccf: invalid EnforceSortCompositeFields %d", opts.EnforceSortCompositeFields) + } + if !opts.EnforceSortRestrictedTypes.valid() { + return nil, fmt.Errorf("ccf: invalid EnforceSortRestrictedTypes %d", opts.EnforceSortRestrictedTypes) + } + if opts.CBORDecMode == nil { + opts.CBORDecMode = defaultCBORDecMode + } + return &decMode{ + enforceSortCompositeFields: opts.EnforceSortCompositeFields, + enforceSortRestrictedTypes: opts.EnforceSortRestrictedTypes, + cborDecMode: opts.CBORDecMode, + }, nil } // Decode returns a Cadence value decoded from its CCF-encoded representation. // // This function returns an error if the bytes represent CCF that is malformed, // invalid, or does not comply with requirements in the CCF specification. -func Decode(gauge common.MemoryGauge, b []byte) (cadence.Value, error) { - dec := NewDecoder(gauge, b) +func (dm *decMode) Decode(gauge common.MemoryGauge, b []byte) (cadence.Value, error) { + dec := dm.NewDecoder(gauge, b) v, err := dec.Decode() if err != nil { @@ -90,15 +168,32 @@ func Decode(gauge common.MemoryGauge, b []byte) (cadence.Value, error) { // NewDecoder initializes a Decoder that will decode CCF-encoded bytes from the // given bytes. -func NewDecoder(gauge common.MemoryGauge, b []byte) *Decoder { +func (dm *decMode) NewDecoder(gauge common.MemoryGauge, b []byte) *Decoder { // NOTE: encoded data is not copied by decoder. // CCF codec uses CBOR codec under the hood. return &Decoder{ - dec: CBORDecMode.NewByteStreamDecoder(b), + dec: dm.cborDecMode.NewByteStreamDecoder(b), gauge: gauge, + dm: dm, } } +var defaultDecMode = &decMode{cborDecMode: defaultCBORDecMode} + +// Decode returns a Cadence value decoded from its CCF-encoded representation. +// +// This function returns an error if the bytes represent CCF that is malformed, +// invalid, or does not comply with requirements in the CCF specification. +func Decode(gauge common.MemoryGauge, b []byte) (cadence.Value, error) { + return defaultDecMode.Decode(gauge, b) +} + +// NewDecoder initializes a Decoder that will decode CCF-encoded bytes from the +// given bytes. +func NewDecoder(gauge common.MemoryGauge, b []byte) *Decoder { + return defaultDecMode.NewDecoder(gauge, b) +} + // Decode reads CCF-encoded bytes and decodes them to a Cadence value. // // This function returns an error if the bytes represent CCF that is malformed, @@ -266,6 +361,7 @@ func (d *Decoder) decodeTypeAndValue(types *cadenceTypeByCCFTypeID) (cadence.Val // / word32-value // / word64-value // / word128-value +// / word256-value // / fix64-value // / ufix64-value func (d *Decoder) decodeValue(t cadence.Type, types *cadenceTypeByCCFTypeID) (cadence.Value, error) { @@ -356,6 +452,9 @@ func (d *Decoder) decodeValue(t cadence.Type, types *cadenceTypeByCCFTypeID) (ca case cadence.Word128Type: return d.decodeWord128() + case cadence.Word256Type: + return d.decodeWord256() + case cadence.Fix64Type: return d.decodeFix64() @@ -415,13 +514,26 @@ func (d *Decoder) decodeValue(t cadence.Type, types *cadenceTypeByCCFTypeID) (ca return d.decodeValue(t.Type, types) default: - err := decodeCBORTagWithKnownNumber(d.dec, CBORTagTypeAndValue) + nt, err := d.dec.NextType() if err != nil { - return nil, fmt.Errorf("unexpected encoded value of Cadence type %s (%T): %s", t.ID(), t, err.Error()) + return nil, err } - // Decode ccf-type-and-value-message. - return d.decodeTypeAndValue(types) + switch nt { + case cbor.NilType: + // Decode nil value (such as cyclic reference value). + err := d.dec.DecodeNil() + return nil, err + + default: + err := decodeCBORTagWithKnownNumber(d.dec, CBORTagTypeAndValue) + if err != nil { + return nil, fmt.Errorf("unexpected encoded value of Cadence type %s (%T): %s", t.ID(), t, err.Error()) + } + + // Decode ccf-type-and-value-message. + return d.decodeTypeAndValue(types) + } } } @@ -819,6 +931,23 @@ func (d *Decoder) decodeWord128() (cadence.Value, error) { ) } +// decodeWord256 decodes word256-value as +// language=CDDL +// word256-value = bigint .ge 0 +func (d *Decoder) decodeWord256() (cadence.Value, error) { + // NewMeteredWord256FromBig checks if decoded big.Int is positive. + return cadence.NewMeteredWord256FromBig( + d.gauge, + func() *big.Int { + bigInt, err := d.dec.DecodeBigInt() + if err != nil { + panic(fmt.Errorf("failed to decode Word256: %s", err)) + } + return bigInt + }, + ) +} + // decodeFix64 decodes fix64-value as // language=CDDL // fix64-value = (int .ge -9223372036854775808) .le 9223372036854775807 @@ -962,7 +1091,7 @@ func (d *Decoder) decodeDictionary(typ *cadence.DictionaryType, types *cadenceTy previousKeyRawBytes = keyRawBytes // decode key from raw bytes - keyDecoder := NewDecoder(d.gauge, keyRawBytes) + keyDecoder := d.dm.NewDecoder(d.gauge, keyRawBytes) key, err := keyDecoder.decodeValue(typ.KeyType, types) if err != nil { return nil, err @@ -1669,14 +1798,14 @@ func (d *Decoder) decodeCompositeTypeValue( } // Decode fields after type is resolved to handle recursive types. - dec := NewDecoder(d.gauge, compTypeValue.rawFields) + dec := d.dm.NewDecoder(d.gauge, compTypeValue.rawFields) fields, err := dec.decodeCompositeFields(visited, dec.decodeTypeValue) if err != nil { return nil, err } // Decode initializers after type is resolved to handle recursive types. - dec = NewDecoder(d.gauge, compTypeValue.rawInitializers) + dec = d.dm.NewDecoder(d.gauge, compTypeValue.rawInitializers) initializers, err := dec.decodeInitializerTypeValues(visited) if err != nil { return nil, err diff --git a/encoding/ccf/decode_type.go b/encoding/ccf/decode_type.go index 61aa395bd8..36c7570d09 100644 --- a/encoding/ccf/decode_type.go +++ b/encoding/ccf/decode_type.go @@ -179,6 +179,9 @@ func (d *Decoder) decodeSimpleTypeID() (cadence.Type, error) { case TypeWord128: return cadence.TheWord128Type, nil + case TypeWord256: + return cadence.TheWord256Type, nil + case TypeFix64: return cadence.TheFix64Type, nil @@ -266,6 +269,12 @@ func (d *Decoder) decodeSimpleTypeID() (cadence.Type, error) { case TypeVoid: return cadence.TheVoidType, nil + case TypeAnyStructAttachmentType: + return cadence.TheAnyStructAttachmentType, nil + + case TypeAnyResourceAttachmentType: + return cadence.TheAnyResourceAttachmentType, nil + default: return nil, fmt.Errorf("unsupported encoded simple type ID %d", simpleTypeID) } @@ -586,12 +595,14 @@ func (d *Decoder) decodeRestrictedType( return nil, fmt.Errorf("found duplicate restricted type %s", restrictedTypeID) } - // "Deterministic CCF Encoding Requirements" in CCF specs: - // - // "restricted-type.restrictions MUST be sorted by restriction's cadence-type-id" - // "restricted-type-value.restrictions MUST be sorted by restriction's cadence-type-id." - if !stringsAreSortedBytewise(previousRestrictedTypeID, restrictedTypeID) { - return nil, fmt.Errorf("restricted types are not sorted (%s, %s)", previousRestrictedTypeID, restrictedTypeID) + if d.dm.enforceSortRestrictedTypes == EnforceSortBytewiseLexical { + // "Deterministic CCF Encoding Requirements" in CCF specs: + // + // "restricted-type.restrictions MUST be sorted by restriction's cadence-type-id" + // "restricted-type-value.restrictions MUST be sorted by restriction's cadence-type-id." + if !stringsAreSortedBytewise(previousRestrictedTypeID, restrictedTypeID) { + return nil, fmt.Errorf("restricted types are not sorted (%s, %s)", previousRestrictedTypeID, restrictedTypeID) + } } restrictionTypeIDs[restrictedTypeID] = struct{}{} diff --git a/encoding/ccf/decode_typedef.go b/encoding/ccf/decode_typedef.go index d7eacfc52a..c03ea9b9dc 100644 --- a/encoding/ccf/decode_typedef.go +++ b/encoding/ccf/decode_typedef.go @@ -124,7 +124,7 @@ func (d *Decoder) decodeTypeDefs() (*cadenceTypeByCCFTypeID, error) { panic(cadenceErrors.NewUnexpectedErrorFromCause(err)) } - dec := NewDecoder(d.gauge, rawFields.rawFields) + dec := d.dm.NewDecoder(d.gauge, rawFields.rawFields) fields, err := dec.decodeCompositeFields(types, dec.decodeInlineType) if err != nil { return nil, err @@ -402,12 +402,14 @@ func (d *Decoder) decodeCompositeFields(types *cadenceTypeByCCFTypeID, decodeTyp return nil, fmt.Errorf("found duplicate field name %s in composite-type", field.Identifier) } - // "Deterministic CCF Encoding Requirements" in CCF specs: - // - // "composite-type.fields MUST be sorted by name" - // "composite-type-value.fields MUST be sorted by name." - if !stringsAreSortedBytewise(previousFieldName, field.Identifier) { - return nil, fmt.Errorf("field names are not sorted in composite-type (%s, %s)", previousFieldName, field.Identifier) + if d.dm.enforceSortCompositeFields == EnforceSortBytewiseLexical { + // "Deterministic CCF Encoding Requirements" in CCF specs: + // + // "composite-type.fields MUST be sorted by name" + // "composite-type-value.fields MUST be sorted by name." + if !stringsAreSortedBytewise(previousFieldName, field.Identifier) { + return nil, fmt.Errorf("field names are not sorted in composite-type (%s, %s)", previousFieldName, field.Identifier) + } } fieldNames[field.Identifier] = struct{}{} diff --git a/encoding/ccf/encode.go b/encoding/ccf/encode.go index 67767f29e6..ab237ec2e8 100644 --- a/encoding/ccf/encode.go +++ b/encoding/ccf/encode.go @@ -32,11 +32,11 @@ import ( cadenceErrors "github.com/onflow/cadence/runtime/errors" ) -// CBOREncMode +// defaultCBOREncMode // // See https://github.com/fxamacker/cbor: // "For best performance, reuse EncMode and DecMode after creating them." -var CBOREncMode = func() cbor.EncMode { +var defaultCBOREncMode = func() cbor.EncMode { options := cbor.CoreDetEncOptions() options.BigIntConvert = cbor.BigIntConvertNone encMode, err := options.EncMode() @@ -50,17 +50,88 @@ var CBOREncMode = func() cbor.EncMode { type Encoder struct { // CCF codec uses CBOR codec under the hood. enc *cbor.StreamEncoder + // cachedSortedFieldIndex contains sorted field index of Cadence composite types. cachedSortedFieldIndex map[string][]int // key: composite type ID, value: sorted field indexes + + // CCF encoding mode containing CCF encoding options + em *encMode +} + +type EncMode interface { + // Encode returns the CCF-encoded representation of the given value. + // + // This function returns an error if the Cadence value cannot be represented in CCF. + Encode(value cadence.Value) ([]byte, error) + + // MustEncode returns the CCF-encoded representation of the given value, or panics + // if the value cannot be represented in CCF. + MustEncode(value cadence.Value) []byte + + // NewEncoder initializes an Encoder that will write CCF-encoded bytes to the + // given io.Writer. + NewEncoder(w io.Writer) *Encoder +} + +type SortMode int + +const ( + // SortNone means no sorting. + SortNone SortMode = iota + + // SortBytewiseLexical means bytewise lexicographic order. + SortBytewiseLexical + + maxSortMode +) + +func (sm SortMode) valid() bool { + return sm < maxSortMode +} + +// EncOptions specifies CCF encoding options. +type EncOptions struct { + // SortCompositeFields specifies sort order of Cadence composite fields. + SortCompositeFields SortMode + + // SortRestrictedTypes specifies sort order of Cadence restricted types. + SortRestrictedTypes SortMode +} + +// EventsEncMode is CCF encoding mode for events which contains +// immutable CCF encoding options. It is safe for concurrent use. +var EventsEncMode = &encMode{ + sortCompositeFields: SortNone, + sortRestrictedTypes: SortNone, +} + +type encMode struct { + sortCompositeFields SortMode + sortRestrictedTypes SortMode +} + +// EncMode returns CCF encoding mode, which contains immutable encoding options +// and is safe for concurrent use. +func (opts EncOptions) EncMode() (EncMode, error) { + if !opts.SortCompositeFields.valid() { + return nil, fmt.Errorf("ccf: invalid SortCompositeFields %d", opts.SortCompositeFields) + } + if !opts.SortRestrictedTypes.valid() { + return nil, fmt.Errorf("ccf: invalid SortRestrictedTypes %d", opts.SortRestrictedTypes) + } + return &encMode{ + sortCompositeFields: opts.SortCompositeFields, + sortRestrictedTypes: opts.SortRestrictedTypes, + }, nil } // Encode returns the CCF-encoded representation of the given value. // // This function returns an error if the Cadence value cannot be represented in CCF. -func Encode(value cadence.Value) ([]byte, error) { +func (em *encMode) Encode(value cadence.Value) ([]byte, error) { var w bytes.Buffer - enc := NewEncoder(&w) + enc := em.NewEncoder(&w) defer enc.enc.Close() err := enc.Encode(value) @@ -73,8 +144,8 @@ func Encode(value cadence.Value) ([]byte, error) { // MustEncode returns the CCF-encoded representation of the given value, or panics // if the value cannot be represented in CCF. -func MustEncode(value cadence.Value) []byte { - b, err := Encode(value) +func (em *encMode) MustEncode(value cadence.Value) []byte { + b, err := em.Encode(value) if err != nil { panic(err) } @@ -83,14 +154,35 @@ func MustEncode(value cadence.Value) []byte { // NewEncoder initializes an Encoder that will write CCF-encoded bytes to the // given io.Writer. -func NewEncoder(w io.Writer) *Encoder { - // CCF codec uses CBOR codec under the hood. +func (em *encMode) NewEncoder(w io.Writer) *Encoder { return &Encoder{ - enc: CBOREncMode.NewStreamEncoder(w), + enc: defaultCBOREncMode.NewStreamEncoder(w), cachedSortedFieldIndex: make(map[string][]int), + em: em, } } +var defaultEncMode = &encMode{} + +// Encode returns the CCF-encoded representation of the given value +// by using default CCF encoding options. This function returns an +// error if the Cadence value cannot be represented in CCF. +func Encode(value cadence.Value) ([]byte, error) { + return defaultEncMode.Encode(value) +} + +// MustEncode returns the CCF-encoded representation of the given value, or panics +// if the value cannot be represented in CCF. Default CCF encoding options are used. +func MustEncode(value cadence.Value) []byte { + return defaultEncMode.MustEncode(value) +} + +// NewEncoder initializes an Encoder that will write CCF-encoded bytes to the +// given io.Writer. Default CCF encoding options are used. +func NewEncoder(w io.Writer) *Encoder { + return defaultEncMode.NewEncoder(w) +} + // Encode writes the CCF-encoded representation of the given value to this // encoder's io.Writer. // @@ -323,6 +415,7 @@ func (e *Encoder) encodeTypeDefs(types []cadence.Type, tids ccfTypeIDByCadenceTy // / word32-value // / word64-value // / word128-value +// / word256-value // / fix64-value // / ufix64-value // @@ -342,6 +435,10 @@ func (e *Encoder) encodeValue( tids ccfTypeIDByCadenceType, ) error { + if v == nil { + return e.enc.EncodeNil() + } + runtimeType := v.Type() // CCF requires value to have non-nil type. @@ -451,6 +548,9 @@ func (e *Encoder) encodeValue( case cadence.Word128: return e.encodeWord128(v) + case cadence.Word256: + return e.encodeWord256(v) + case cadence.Fix64: return e.encodeFix64(v) @@ -701,6 +801,13 @@ func (e *Encoder) encodeWord128(v cadence.Word128) error { return e.enc.EncodeBigInt(v.Big()) } +// encodeWord256 encodes cadence.Word256 as +// language=CDDL +// word256-value = uint .ge 0 +func (e *Encoder) encodeWord256(v cadence.Word256) error { + return e.enc.EncodeBigInt(v.Big()) +} + // encodeFix64 encodes cadence.Fix64 as // language=CDDL // fix64-value = (int .ge -9223372036854775808) .le 9223372036854775807 @@ -786,7 +893,7 @@ func (e *Encoder) encodeSortedDictionary(v cadence.Dictionary, tids ccfTypeIDByC defer putBuffer(buf) // Encode and sort key value pairs. - sortedPairs, err := encodeAndSortKeyValuePairs(buf, v, tids) + sortedPairs, err := encodeAndSortKeyValuePairs(buf, v, tids, e.em) if err != nil { return err } @@ -812,6 +919,8 @@ func encodeAndSortKeyValuePairs( buf *bytes.Buffer, v cadence.Dictionary, tids ccfTypeIDByCadenceType, + em *encMode, + ) ( []encodedKeyValuePair, error, @@ -821,7 +930,7 @@ func encodeAndSortKeyValuePairs( encodedPairs := make([]encodedKeyValuePair, len(v.Pairs)) - e := NewEncoder(buf) + e := em.NewEncoder(buf) for i, pair := range v.Pairs { @@ -928,31 +1037,47 @@ func (e *Encoder) encodeComposite( return err } - switch len(fields) { - case 0: - // Short-circuit if there is no field. + switch e.em.sortCompositeFields { + case SortNone: + // Encode fields without sorting. + for i, field := range fields { + err = e.encodeValue(field, staticFieldTypes[i].Type, tids) + if err != nil { + return err + } + } return nil - case 1: - // Avoid overhead of sorting if there is only one field. - return e.encodeValue(fields[0], staticFieldTypes[0].Type, tids) + case SortBytewiseLexical: + switch len(fields) { + case 0: + // Short-circuit if there is no field. + return nil - default: - sortedIndexes := e.getSortedFieldIndex(typ) + case 1: + // Avoid overhead of sorting if there is only one field. + return e.encodeValue(fields[0], staticFieldTypes[0].Type, tids) - if len(sortedIndexes) != len(staticFieldTypes) { - panic(cadenceErrors.NewUnexpectedError("number of sorted indexes doesn't match number of field types")) - } + default: + sortedIndexes := e.getSortedFieldIndex(typ) - for _, index := range sortedIndexes { - // Encode sorted field as value. - err = e.encodeValue(fields[index], staticFieldTypes[index].Type, tids) - if err != nil { - return err + if len(sortedIndexes) != len(staticFieldTypes) { + panic(cadenceErrors.NewUnexpectedError("number of sorted indexes doesn't match number of field types")) } + + for _, index := range sortedIndexes { + // Encode sorted field as value. + err = e.encodeValue(fields[index], staticFieldTypes[index].Type, tids) + if err != nil { + return err + } + } + + return nil } - return nil + default: + panic(cadenceErrors.NewUnexpectedError("unsupported sort option for composite fields: %d", e.em.sortCompositeFields)) } } @@ -1585,36 +1710,52 @@ func (e *Encoder) encodeFieldTypeValues(fieldTypes []cadence.Field, visited ccfT return err } - switch len(fieldTypes) { - case 0: - // Short-circuit if there is no field type. + switch e.em.sortCompositeFields { + case SortNone: + // Encode fields without sorting. + for _, fieldType := range fieldTypes { + err = e.encodeFieldTypeValue(fieldType, visited) + if err != nil { + return err + } + } return nil - case 1: - // Avoid overhead of sorting if there is only one field type. - return e.encodeFieldTypeValue(fieldTypes[0], visited) + case SortBytewiseLexical: + switch len(fieldTypes) { + case 0: + // Short-circuit if there is no field type. + return nil - default: - // "Deterministic CCF Encoding Requirements" in CCF specs: - // - // "composite-type-value.fields MUST be sorted by name." - - // NOTE: bytewiseFieldIdentifierSorter doesn't sort fieldTypes in place. - // bytewiseFieldIdentifierSorter.indexes is used as sorted fieldTypes - // index. - sorter := newBytewiseFieldSorter(fieldTypes) - - sort.Sort(sorter) + case 1: + // Avoid overhead of sorting if there is only one field type. + return e.encodeFieldTypeValue(fieldTypes[0], visited) - // Encode sorted field types. - for _, index := range sorter.indexes { - err = e.encodeFieldTypeValue(fieldTypes[index], visited) - if err != nil { - return err + default: + // "Deterministic CCF Encoding Requirements" in CCF specs: + // + // "composite-type-value.fields MUST be sorted by name." + + // NOTE: bytewiseFieldIdentifierSorter doesn't sort fieldTypes in place. + // bytewiseFieldIdentifierSorter.indexes is used as sorted fieldTypes + // index. + sorter := newBytewiseFieldSorter(fieldTypes) + + sort.Sort(sorter) + + // Encode sorted field types. + for _, index := range sorter.indexes { + err = e.encodeFieldTypeValue(fieldTypes[index], visited) + if err != nil { + return err + } } + + return nil } - return nil + default: + panic(cadenceErrors.NewUnexpectedError("unsupported sort option for composite field type values: %d", e.em.sortCompositeFields)) } } diff --git a/encoding/ccf/encode_type.go b/encoding/ccf/encode_type.go index 23970d236a..dc982895cd 100644 --- a/encoding/ccf/encode_type.go +++ b/encoding/ccf/encode_type.go @@ -411,34 +411,50 @@ func (e *Encoder) encodeRestrictedTypeWithRawTag( return err } - switch len(restrictions) { - case 0: - // Short-circuit if there is no restriction. + switch e.em.sortRestrictedTypes { + case SortNone: + for _, res := range restrictions { + // Encode restriction type with given encodeTypeFn. + err = encodeRestrictionTypeFn(res, tids) + if err != nil { + return err + } + } return nil - case 1: - // Avoid overhead of sorting if there is only one restriction. - // Encode restriction type with given encodeTypeFn. - return encodeTypeFn(restrictions[0], tids) - - default: - // "Deterministic CCF Encoding Requirements" in CCF specs: - // - // "restricted-type.restrictions MUST be sorted by restriction's cadence-type-id" - // "restricted-type-value.restrictions MUST be sorted by restriction's cadence-type-id." - sorter := newBytewiseCadenceTypeSorter(restrictions) + case SortBytewiseLexical: + switch len(restrictions) { + case 0: + // Short-circuit if there is no restriction. + return nil - sort.Sort(sorter) - - for _, index := range sorter.indexes { + case 1: + // Avoid overhead of sorting if there is only one restriction. // Encode restriction type with given encodeTypeFn. - err = encodeRestrictionTypeFn(restrictions[index], tids) - if err != nil { - return err + return encodeTypeFn(restrictions[0], tids) + + default: + // "Deterministic CCF Encoding Requirements" in CCF specs: + // + // "restricted-type.restrictions MUST be sorted by restriction's cadence-type-id" + // "restricted-type-value.restrictions MUST be sorted by restriction's cadence-type-id." + sorter := newBytewiseCadenceTypeSorter(restrictions) + + sort.Sort(sorter) + + for _, index := range sorter.indexes { + // Encode restriction type with given encodeTypeFn. + err = encodeRestrictionTypeFn(restrictions[index], tids) + if err != nil { + return err + } } + + return nil } - return nil + default: + panic(cadenceErrors.NewUnexpectedError("unsupported sort option for restricted types: %d", e.em.sortRestrictedTypes)) } } diff --git a/encoding/ccf/encode_typedef.go b/encoding/ccf/encode_typedef.go index a0fc21f206..bd05fe0bad 100644 --- a/encoding/ccf/encode_typedef.go +++ b/encoding/ccf/encode_typedef.go @@ -136,30 +136,46 @@ func (e *Encoder) encodeCompositeTypeFields(typ cadence.CompositeType, tids ccfT return err } - switch len(fieldTypes) { - case 0: - // Short-circuit if there is no field type. - return nil - - case 1: - // Avoid overhead of sorting if there is only one field. - return e.encodeCompositeTypeField(fieldTypes[0], tids) - - default: - // "Deterministic CCF Encoding Requirements" in CCF specs: - // - // "composite-type.fields MUST be sorted by name" - sortedIndexes := e.getSortedFieldIndex(typ) - - for _, index := range sortedIndexes { - // Encode field - err = e.encodeCompositeTypeField(fieldTypes[index], tids) + switch e.em.sortCompositeFields { + case SortNone: + // Encode fields without sorting. + for _, fieldType := range fieldTypes { + err = e.encodeCompositeTypeField(fieldType, tids) if err != nil { return err } } - return nil + + case SortBytewiseLexical: + switch len(fieldTypes) { + case 0: + // Short-circuit if there is no field type. + return nil + + case 1: + // Avoid overhead of sorting if there is only one field. + return e.encodeCompositeTypeField(fieldTypes[0], tids) + + default: + // "Deterministic CCF Encoding Requirements" in CCF specs: + // + // "composite-type.fields MUST be sorted by name" + sortedIndexes := e.getSortedFieldIndex(typ) + + for _, index := range sortedIndexes { + // Encode field + err = e.encodeCompositeTypeField(fieldTypes[index], tids) + if err != nil { + return err + } + } + + return nil + } + + default: + panic(cadenceErrors.NewUnexpectedError("unsupported sort option for composite field types: %d", e.em.sortCompositeFields)) } } diff --git a/encoding/ccf/service_events_test.go b/encoding/ccf/service_events_test.go new file mode 100644 index 0000000000..bbabd721d8 --- /dev/null +++ b/encoding/ccf/service_events_test.go @@ -0,0 +1,1371 @@ +/* + * Cadence - The resource-oriented smart contract programming language + * + * Copyright Dapper Labs, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package ccf_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/onflow/cadence" + "github.com/onflow/cadence/encoding/ccf" + "github.com/onflow/cadence/runtime/common" +) + +func TestEpochSetupEvent(t *testing.T) { + event := createEpochSetupEvent() + + b, err := ccf.Encode(event) + require.NoError(t, err) + + // Test that encoded value isn't sorted. + _, err = deterministicDecMode.Decode(nil, b) + require.Error(t, err) + + decodedValue, err := ccf.Decode(nil, b) + require.NoError(t, err) + + // Test decoded event has unsorted fields. + // If field is struct (such as NodeInfo), struct fields should be unsorted as well. + + evt, ok := decodedValue.(cadence.Event) + require.True(t, ok) + require.Equal(t, 9, len(evt.Fields)) + + evtType, ok := decodedValue.Type().(*cadence.EventType) + require.True(t, ok) + require.Equal(t, 9, len(evtType.Fields)) + + // field 0: counter + require.Equal(t, "counter", evtType.Fields[0].Identifier) + require.Equal(t, cadence.UInt64(1), evt.Fields[0]) + + // field 1: nodeInfo + require.Equal(t, "nodeInfo", evtType.Fields[1].Identifier) + nodeInfos, ok := evt.Fields[1].(cadence.Array) + require.True(t, ok) + testNodeInfos(t, nodeInfos) + + // field 2: firstView + require.Equal(t, "firstView", evtType.Fields[2].Identifier) + require.Equal(t, cadence.UInt64(100), evt.Fields[2]) + + // field 3: finalView + require.Equal(t, "finalView", evtType.Fields[3].Identifier) + require.Equal(t, cadence.UInt64(200), evt.Fields[3]) + + // field 4: collectorClusters + require.Equal(t, "collectorClusters", evtType.Fields[4].Identifier) + epochCollectors, ok := evt.Fields[4].(cadence.Array) + require.True(t, ok) + testEpochCollectors(t, epochCollectors) + + // field 5: randomSource + require.Equal(t, "randomSource", evtType.Fields[5].Identifier) + require.Equal(t, cadence.String("01020304"), evt.Fields[5]) + + // field 6: DKGPhase1FinalView + require.Equal(t, "DKGPhase1FinalView", evtType.Fields[6].Identifier) + require.Equal(t, cadence.UInt64(150), evt.Fields[6]) + + // field 7: DKGPhase2FinalView + require.Equal(t, "DKGPhase2FinalView", evtType.Fields[7].Identifier) + require.Equal(t, cadence.UInt64(160), evt.Fields[7]) + + // field 8: DKGPhase3FinalView + require.Equal(t, "DKGPhase3FinalView", evtType.Fields[8].Identifier) + require.Equal(t, cadence.UInt64(170), evt.Fields[8]) +} + +func testNodeInfos(t *testing.T, nodeInfos cadence.Array) { + require.Equal(t, 7, len(nodeInfos.Values)) + + // Test nodeInfo 0 + + node0, ok := nodeInfos.Values[0].(cadence.Struct) + require.True(t, ok) + require.Equal(t, 14, len(node0.Fields)) + + nodeInfoType, ok := node0.Type().(*cadence.StructType) + require.True(t, ok) + require.Equal(t, 14, len(nodeInfoType.Fields)) + + // field 0: id + require.Equal(t, "id", nodeInfoType.Fields[0].Identifier) + require.Equal(t, cadence.String("0000000000000000000000000000000000000000000000000000000000000001"), node0.Fields[0]) + + // field 1: role + require.Equal(t, "role", nodeInfoType.Fields[1].Identifier) + require.Equal(t, cadence.UInt8(1), node0.Fields[1]) + + // field 2: networkingAddress + require.Equal(t, "networkingAddress", nodeInfoType.Fields[2].Identifier) + require.Equal(t, cadence.String("1.flow.com"), node0.Fields[2]) + + // field 3: networkingKey + require.Equal(t, "networkingKey", nodeInfoType.Fields[3].Identifier) + require.Equal(t, cadence.String("378dbf45d85c614feb10d8bd4f78f4b6ef8eec7d987b937e123255444657fb3da031f232a507e323df3a6f6b8f50339c51d188e80c0e7a92420945cc6ca893fc"), node0.Fields[3]) + + // field 4: stakingKey + require.Equal(t, "stakingKey", nodeInfoType.Fields[4].Identifier) + require.Equal(t, cadence.String("af4aade26d76bb2ab15dcc89adcef82a51f6f04b3cb5f4555214b40ec89813c7a5f95776ea4fe449de48166d0bbc59b919b7eabebaac9614cf6f9461fac257765415f4d8ef1376a2365ec9960121888ea5383d88a140c24c29962b0a14e4e4e7"), node0.Fields[4]) + + // field 5: tokensStaked + require.Equal(t, "tokensStaked", nodeInfoType.Fields[5].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node0.Fields[5]) + + // field 6: tokensCommitted + require.Equal(t, "tokensCommitted", nodeInfoType.Fields[6].Identifier) + require.Equal(t, ufix64FromString("1350000.00000000"), node0.Fields[6]) + + // field 7: tokensUnstaking + require.Equal(t, "tokensUnstaking", nodeInfoType.Fields[7].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node0.Fields[7]) + + // field 8: tokensUnstaked + require.Equal(t, "tokensUnstaked", nodeInfoType.Fields[8].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node0.Fields[8]) + + // field 9: tokensRewarded + require.Equal(t, "tokensRewarded", nodeInfoType.Fields[9].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node0.Fields[9]) + + // field 10: delegators + require.Equal(t, "delegators", nodeInfoType.Fields[10].Identifier) + delegators, ok := node0.Fields[10].(cadence.Array) + require.True(t, ok) + require.Equal(t, 0, len(delegators.Values)) + + // field 11: delegatorIDCounter + require.Equal(t, "delegatorIDCounter", nodeInfoType.Fields[11].Identifier) + require.Equal(t, cadence.UInt32(0), node0.Fields[11]) + + // field 12: tokensRequestedToUnstake + require.Equal(t, "tokensRequestedToUnstake", nodeInfoType.Fields[12].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node0.Fields[12]) + + // field 13: initialWeight + require.Equal(t, "initialWeight", nodeInfoType.Fields[13].Identifier) + require.Equal(t, cadence.UInt64(100), node0.Fields[13]) + + // Test nodeInfo 6 (last nodeInfo struct) + + node6, ok := nodeInfos.Values[6].(cadence.Struct) + require.True(t, ok) + require.Equal(t, 14, len(node6.Fields)) + + nodeInfoType, ok = node6.Type().(*cadence.StructType) + require.True(t, ok) + require.Equal(t, 14, len(nodeInfoType.Fields)) + + // field 0: id + require.Equal(t, "id", nodeInfoType.Fields[0].Identifier) + require.Equal(t, cadence.String("0000000000000000000000000000000000000000000000000000000000000031"), node6.Fields[0]) + + // field 1: role + require.Equal(t, "role", nodeInfoType.Fields[1].Identifier) + require.Equal(t, cadence.UInt8(4), node6.Fields[1]) + + // field 2: networkingAddress + require.Equal(t, "networkingAddress", nodeInfoType.Fields[2].Identifier) + require.Equal(t, cadence.String("31.flow.com"), node6.Fields[2]) + + // field 3: networkingKey + require.Equal(t, "networkingKey", nodeInfoType.Fields[3].Identifier) + require.Equal(t, cadence.String("697241208dcc9142b6f53064adc8ff1c95760c68beb2ba083c1d005d40181fd7a1b113274e0163c053a3addd47cd528ec6a1f190cf465aac87c415feaae011ae"), node6.Fields[3]) + + // field 4: stakingKey + require.Equal(t, "stakingKey", nodeInfoType.Fields[4].Identifier) + require.Equal(t, cadence.String("b1f97d0a06020eca97352e1adde72270ee713c7daf58da7e74bf72235321048b4841bdfc28227964bf18e371e266e32107d238358848bcc5d0977a0db4bda0b4c33d3874ff991e595e0f537c7b87b4ddce92038ebc7b295c9ea20a1492302aa7"), node6.Fields[4]) + + // field 5: tokensStaked + require.Equal(t, "tokensStaked", nodeInfoType.Fields[5].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node6.Fields[5]) + + // field 6: tokensCommitted + require.Equal(t, "tokensCommitted", nodeInfoType.Fields[6].Identifier) + require.Equal(t, ufix64FromString("1350000.00000000"), node6.Fields[6]) + + // field 7: tokensUnstaking + require.Equal(t, "tokensUnstaking", nodeInfoType.Fields[7].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node6.Fields[7]) + + // field 8: tokensUnstaked + require.Equal(t, "tokensUnstaked", nodeInfoType.Fields[8].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node6.Fields[8]) + + // field 9: tokensRewarded + require.Equal(t, "tokensRewarded", nodeInfoType.Fields[9].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node6.Fields[9]) + + // field 10: delegators + require.Equal(t, "delegators", nodeInfoType.Fields[10].Identifier) + delegators, ok = node6.Fields[10].(cadence.Array) + require.True(t, ok) + require.Equal(t, 0, len(delegators.Values)) + + // field 11: delegatorIDCounter + require.Equal(t, "delegatorIDCounter", nodeInfoType.Fields[11].Identifier) + require.Equal(t, cadence.UInt32(0), node6.Fields[11]) + + // field 12: tokensRequestedToUnstake + require.Equal(t, "tokensRequestedToUnstake", nodeInfoType.Fields[12].Identifier) + require.Equal(t, ufix64FromString("0.00000000"), node6.Fields[12]) + + // field 13: initialWeight + require.Equal(t, "initialWeight", nodeInfoType.Fields[13].Identifier) + require.Equal(t, cadence.UInt64(100), node6.Fields[13]) +} + +func testEpochCollectors(t *testing.T, collectors cadence.Array) { + require.Equal(t, 2, len(collectors.Values)) + + // collector 0 + collector0, ok := collectors.Values[0].(cadence.Struct) + require.True(t, ok) + + collectorType, ok := collector0.Type().(*cadence.StructType) + require.True(t, ok) + + // field 0: index + require.Equal(t, "index", collectorType.Fields[0].Identifier) + require.Equal(t, cadence.UInt16(0), collector0.Fields[0]) + + // field 1: nodeWeights + require.Equal(t, "nodeWeights", collectorType.Fields[1].Identifier) + weights, ok := collector0.Fields[1].(cadence.Dictionary) + require.True(t, ok) + require.Equal(t, 2, len(weights.Pairs)) + require.Equal(t, + cadence.KeyValuePair{ + Key: cadence.String("0000000000000000000000000000000000000000000000000000000000000001"), + Value: cadence.UInt64(100), + }, + weights.Pairs[0]) + require.Equal(t, + cadence.KeyValuePair{ + Key: cadence.String("0000000000000000000000000000000000000000000000000000000000000002"), + Value: cadence.UInt64(100), + }, weights.Pairs[1]) + + // field 2: totalWeight + require.Equal(t, "totalWeight", collectorType.Fields[2].Identifier) + require.Equal(t, cadence.NewUInt64(100), collector0.Fields[2]) + + // field 3: generatedVotes + require.Equal(t, "generatedVotes", collectorType.Fields[3].Identifier) + generatedVotes, ok := collector0.Fields[3].(cadence.Dictionary) + require.True(t, ok) + require.Equal(t, 0, len(generatedVotes.Pairs)) + + // field 4: uniqueVoteMessageTotalWeights + require.Equal(t, "uniqueVoteMessageTotalWeights", collectorType.Fields[4].Identifier) + uniqueVoteMessageTotalWeights, ok := collector0.Fields[4].(cadence.Dictionary) + require.True(t, ok) + require.Equal(t, 0, len(uniqueVoteMessageTotalWeights.Pairs)) + + // collector 1 + collector1, ok := collectors.Values[1].(cadence.Struct) + require.True(t, ok) + + collectorType, ok = collector1.Type().(*cadence.StructType) + require.True(t, ok) + + // field 0: index + require.Equal(t, "index", collectorType.Fields[0].Identifier) + require.Equal(t, cadence.UInt16(1), collector1.Fields[0]) + + // field 1: nodeWeights + require.Equal(t, "nodeWeights", collectorType.Fields[1].Identifier) + weights, ok = collector1.Fields[1].(cadence.Dictionary) + require.True(t, ok) + require.Equal(t, 2, len(weights.Pairs)) + require.Equal(t, + cadence.KeyValuePair{ + Key: cadence.String("0000000000000000000000000000000000000000000000000000000000000003"), + Value: cadence.UInt64(100), + }, + weights.Pairs[0]) + require.Equal(t, + cadence.KeyValuePair{ + Key: cadence.String("0000000000000000000000000000000000000000000000000000000000000004"), + Value: cadence.UInt64(100), + }, weights.Pairs[1]) + + // field 2: totalWeight + require.Equal(t, "totalWeight", collectorType.Fields[2].Identifier) + require.Equal(t, cadence.NewUInt64(0), collector1.Fields[2]) + + // field 3: generatedVotes + require.Equal(t, "generatedVotes", collectorType.Fields[3].Identifier) + generatedVotes, ok = collector1.Fields[3].(cadence.Dictionary) + require.True(t, ok) + require.Equal(t, 0, len(generatedVotes.Pairs)) + + // field 4: uniqueVoteMessageTotalWeights + require.Equal(t, "uniqueVoteMessageTotalWeights", collectorType.Fields[4].Identifier) + uniqueVoteMessageTotalWeights, ok = collector1.Fields[4].(cadence.Dictionary) + require.True(t, ok) + require.Equal(t, 0, len(uniqueVoteMessageTotalWeights.Pairs)) +} + +func TestEpochCommitEvent(t *testing.T) { + event := createEpochCommittedEvent() + + b, err := ccf.Encode(event) + require.NoError(t, err) + + // Test that encoded value isn't sorted. + _, err = deterministicDecMode.Decode(nil, b) + require.Error(t, err) + + decodedValue, err := ccf.Decode(nil, b) + require.NoError(t, err) + + // Test decoded event has unsorted fields. + // If field is struct (such as ClusterQC), struct fields should be unsorted as well. + + evt, ok := decodedValue.(cadence.Event) + require.True(t, ok) + require.Equal(t, 3, len(evt.Fields)) + + evtType, ok := decodedValue.Type().(*cadence.EventType) + require.True(t, ok) + require.Equal(t, 3, len(evtType.Fields)) + + // field 0: counter + require.Equal(t, "counter", evtType.Fields[0].Identifier) + require.Equal(t, cadence.UInt64(1), evt.Fields[0]) + + // field 1: clusterQCs + require.Equal(t, "clusterQCs", evtType.Fields[1].Identifier) + clusterQCs, ok := evt.Fields[1].(cadence.Array) + require.True(t, ok) + testClusterQCs(t, clusterQCs) + + // field 2: dkgPubKeys + require.Equal(t, "dkgPubKeys", evtType.Fields[2].Identifier) + dkgPubKeys, ok := evt.Fields[2].(cadence.Array) + require.True(t, ok) + require.Equal(t, 2, len(dkgPubKeys.Values)) + require.Equal(t, cadence.String("8c588266db5f5cda629e83f8aa04ae9413593fac19e4865d06d291c9d14fbdd9bdb86a7a12f9ef8590c79cb635e3163315d193087e9336092987150d0cd2b14ac6365f7dc93eec573752108b8c12368abb65f0652d9f644e5aed611c37926950"), dkgPubKeys.Values[0]) + require.Equal(t, cadence.String("87a339e4e5c74f089da20a33f515d8c8f4464ab53ede5a74aa2432cd1ae66d522da0c122249ee176cd747ddc83ca81090498389384201614caf51eac392c1c0a916dfdcfbbdf7363f9552b6468434add3d3f6dc91a92bbe3ee368b59b7828488"), dkgPubKeys.Values[1]) +} + +func testClusterQCs(t *testing.T, clusterQCs cadence.Array) { + require.Equal(t, 2, len(clusterQCs.Values)) + + // Test clusterQC0 + + clusterQC0, ok := clusterQCs.Values[0].(cadence.Struct) + require.True(t, ok) + + clusterQCType, ok := clusterQC0.Type().(*cadence.StructType) + require.True(t, ok) + + // field 0: index + require.Equal(t, "index", clusterQCType.Fields[0].Identifier) + require.Equal(t, cadence.UInt16(0), clusterQC0.Fields[0]) + + // field 1: voteSignatures + require.Equal(t, "voteSignatures", clusterQCType.Fields[1].Identifier) + sigs, ok := clusterQC0.Fields[1].(cadence.Array) + require.True(t, ok) + require.Equal(t, 2, len(sigs.Values)) + require.Equal(t, cadence.String("a39cd1e1bf7e2fb0609b7388ce5215a6a4c01eef2aee86e1a007faa28a6b2a3dc876e11bb97cdb26c3846231d2d01e4d"), sigs.Values[0]) + require.Equal(t, cadence.String("91673ad9c717d396c9a0953617733c128049ac1a639653d4002ab245b121df1939430e313bcbfd06948f6a281f6bf853"), sigs.Values[1]) + + // field 2: voteMessage + require.Equal(t, "voteMessage", clusterQCType.Fields[2].Identifier) + require.Equal(t, cadence.String("irrelevant_for_these_purposes"), clusterQC0.Fields[2]) + + // field 3: voterIDs + require.Equal(t, "voterIDs", clusterQCType.Fields[3].Identifier) + ids, ok := clusterQC0.Fields[3].(cadence.Array) + require.True(t, ok) + require.Equal(t, 2, len(ids.Values)) + require.Equal(t, cadence.String("0000000000000000000000000000000000000000000000000000000000000001"), ids.Values[0]) + require.Equal(t, cadence.String("0000000000000000000000000000000000000000000000000000000000000002"), ids.Values[1]) + + // Test clusterQC1 + + clusterQC1, ok := clusterQCs.Values[1].(cadence.Struct) + require.True(t, ok) + + clusterQCType, ok = clusterQC1.Type().(*cadence.StructType) + require.True(t, ok) + + // field 0: index + require.Equal(t, "index", clusterQCType.Fields[0].Identifier) + require.Equal(t, cadence.UInt16(1), clusterQC1.Fields[0]) + + // field 1: voteSignatures + require.Equal(t, "voteSignatures", clusterQCType.Fields[1].Identifier) + sigs, ok = clusterQC1.Fields[1].(cadence.Array) + require.True(t, ok) + require.Equal(t, 2, len(sigs.Values)) + require.Equal(t, cadence.String("b2bff159971852ed63e72c37991e62c94822e52d4fdcd7bf29aaf9fb178b1c5b4ce20dd9594e029f3574cb29533b857a"), sigs.Values[0]) + require.Equal(t, cadence.String("9931562f0248c9195758da3de4fb92f24fa734cbc20c0cb80280163560e0e0348f843ac89ecbd3732e335940c1e8dccb"), sigs.Values[1]) + + // field 2: voteMessage + require.Equal(t, "voteMessage", clusterQCType.Fields[2].Identifier) + require.Equal(t, cadence.String("irrelevant_for_these_purposes"), clusterQC1.Fields[2]) + + // field 3: voterIDs + require.Equal(t, "voterIDs", clusterQCType.Fields[3].Identifier) + ids, ok = clusterQC1.Fields[3].(cadence.Array) + require.True(t, ok) + require.Equal(t, 2, len(ids.Values)) + require.Equal(t, cadence.String("0000000000000000000000000000000000000000000000000000000000000003"), ids.Values[0]) + require.Equal(t, cadence.String("0000000000000000000000000000000000000000000000000000000000000004"), ids.Values[1]) +} + +func TestVersionBeaconEvent(t *testing.T) { + event := createVersionBeaconEvent() + + b, err := ccf.Encode(event) + require.NoError(t, err) + + // Test that encoded value isn't sorted. + _, err = deterministicDecMode.Decode(nil, b) + require.Error(t, err) + + decodedValue, err := ccf.Decode(nil, b) + require.NoError(t, err) + + // Test decoded event has unsorted fields. + // If field is struct (such as semver), struct fields should be unsorted as well. + + evt, ok := decodedValue.(cadence.Event) + require.True(t, ok) + require.Equal(t, 2, len(evt.Fields)) + + evtType, ok := decodedValue.Type().(*cadence.EventType) + require.True(t, ok) + require.Equal(t, 2, len(evtType.Fields)) + + // field 0: versionBoundaries + require.Equal(t, "versionBoundaries", evtType.Fields[0].Identifier) + versionBoundaries, ok := evt.Fields[0].(cadence.Array) + require.True(t, ok) + testVersionBoundaries(t, versionBoundaries) + + // field 1: sequence + require.Equal(t, "sequence", evtType.Fields[1].Identifier) + require.Equal(t, cadence.UInt64(5), evt.Fields[1]) +} + +func testVersionBoundaries(t *testing.T, versionBoundaries cadence.Array) { + require.Equal(t, 1, len(versionBoundaries.Values)) + + boundary, ok := versionBoundaries.Values[0].(cadence.Struct) + require.True(t, ok) + require.Equal(t, 2, len(boundary.Fields)) + + boundaryType, ok := boundary.Type().(*cadence.StructType) + require.True(t, ok) + require.Equal(t, 2, len(boundaryType.Fields)) + + // field 0: blockHeight + require.Equal(t, "blockHeight", boundaryType.Fields[0].Identifier) + require.Equal(t, cadence.UInt64(44), boundary.Fields[0]) + + // field 1: version + require.Equal(t, "version", boundaryType.Fields[1].Identifier) + version, ok := boundary.Fields[1].(cadence.Struct) + require.True(t, ok) + testSemver(t, version) +} + +func testSemver(t *testing.T, version cadence.Struct) { + require.Equal(t, 4, len(version.Fields)) + + semverType, ok := version.Type().(*cadence.StructType) + require.True(t, ok) + require.Equal(t, 4, len(semverType.Fields)) + + // field 0: preRelease + require.Equal(t, "preRelease", semverType.Fields[0].Identifier) + require.Equal(t, cadence.NewOptional(cadence.String("")), version.Fields[0]) + + // field 1: major + require.Equal(t, "major", semverType.Fields[1].Identifier) + require.Equal(t, cadence.UInt8(2), version.Fields[1]) + + // field 2: minor + require.Equal(t, "minor", semverType.Fields[2].Identifier) + require.Equal(t, cadence.UInt8(13), version.Fields[2]) + + // field 3: patch + require.Equal(t, "patch", semverType.Fields[3].Identifier) + require.Equal(t, cadence.UInt8(7), version.Fields[3]) +} + +func createEpochSetupEvent() cadence.Event { + return cadence.NewEvent([]cadence.Value{ + // counter + cadence.NewUInt64(1), + + // nodeInfo + createEpochNodes(), + + // firstView + cadence.NewUInt64(100), + + // finalView + cadence.NewUInt64(200), + + // collectorClusters + createEpochCollectors(), + + // randomSource + cadence.String("01020304"), + + // DKGPhase1FinalView + cadence.UInt64(150), + + // DKGPhase2FinalView + cadence.UInt64(160), + + // DKGPhase3FinalView + cadence.UInt64(170), + }).WithType(newFlowEpochEpochSetupEventType()) +} + +func createEpochNodes() cadence.Array { + + nodeInfoType := newFlowIDTableStakingNodeInfoStructType() + + nodeInfo1 := cadence.NewStruct([]cadence.Value{ + // id + cadence.String("0000000000000000000000000000000000000000000000000000000000000001"), + + // role + cadence.UInt8(1), + + // networkingAddress + cadence.String("1.flow.com"), + + // networkingKey + cadence.String("378dbf45d85c614feb10d8bd4f78f4b6ef8eec7d987b937e123255444657fb3da031f232a507e323df3a6f6b8f50339c51d188e80c0e7a92420945cc6ca893fc"), + + // stakingKey + cadence.String("af4aade26d76bb2ab15dcc89adcef82a51f6f04b3cb5f4555214b40ec89813c7a5f95776ea4fe449de48166d0bbc59b919b7eabebaac9614cf6f9461fac257765415f4d8ef1376a2365ec9960121888ea5383d88a140c24c29962b0a14e4e4e7"), + + // tokensStaked + ufix64FromString("0.00000000"), + + // tokensCommitted + ufix64FromString("1350000.00000000"), + + // tokensUnstaking + ufix64FromString("0.00000000"), + + // tokensUnstaked + ufix64FromString("0.00000000"), + + // tokensRewarded + ufix64FromString("0.00000000"), + + // delegators + cadence.NewArray([]cadence.Value{}).WithType(cadence.NewVariableSizedArrayType(cadence.NewUInt32Type())), + + // delegatorIDCounter + cadence.UInt32(0), + + // tokensRequestedToUnstake + ufix64FromString("0.00000000"), + + // initialWeight + cadence.UInt64(100), + }).WithType(nodeInfoType) + + nodeInfo2 := cadence.NewStruct([]cadence.Value{ + // id + cadence.String("0000000000000000000000000000000000000000000000000000000000000002"), + + // role + cadence.UInt8(1), + + // networkingAddress + cadence.String("2.flow.com"), + + // networkingKey + cadence.String("378dbf45d85c614feb10d8bd4f78f4b6ef8eec7d987b937e123255444657fb3da031f232a507e323df3a6f6b8f50339c51d188e80c0e7a92420945cc6ca893fc"), + + // stakingKey + cadence.String("af4aade26d76bb2ab15dcc89adcef82a51f6f04b3cb5f4555214b40ec89813c7a5f95776ea4fe449de48166d0bbc59b919b7eabebaac9614cf6f9461fac257765415f4d8ef1376a2365ec9960121888ea5383d88a140c24c29962b0a14e4e4e7"), + + // tokensStaked + ufix64FromString("0.00000000"), + + // tokensCommitted + ufix64FromString("1350000.00000000"), + + // tokensUnstaking + ufix64FromString("0.00000000"), + + // tokensUnstaked + ufix64FromString("0.00000000"), + + // tokensRewarded + ufix64FromString("0.00000000"), + + // delegators + cadence.NewArray([]cadence.Value{}).WithType(cadence.NewVariableSizedArrayType(cadence.NewUInt32Type())), + + // delegatorIDCounter + cadence.UInt32(0), + + // tokensRequestedToUnstake + ufix64FromString("0.00000000"), + + // initialWeight + cadence.UInt64(100), + }).WithType(nodeInfoType) + + nodeInfo3 := cadence.NewStruct([]cadence.Value{ + // id + cadence.String("0000000000000000000000000000000000000000000000000000000000000003"), + + // role + cadence.UInt8(1), + + // networkingAddress + cadence.String("3.flow.com"), + + // networkingKey + cadence.String("378dbf45d85c614feb10d8bd4f78f4b6ef8eec7d987b937e123255444657fb3da031f232a507e323df3a6f6b8f50339c51d188e80c0e7a92420945cc6ca893fc"), + + // stakingKey + cadence.String("af4aade26d76bb2ab15dcc89adcef82a51f6f04b3cb5f4555214b40ec89813c7a5f95776ea4fe449de48166d0bbc59b919b7eabebaac9614cf6f9461fac257765415f4d8ef1376a2365ec9960121888ea5383d88a140c24c29962b0a14e4e4e7"), + + // tokensStaked + ufix64FromString("0.00000000"), + + // tokensCommitted + ufix64FromString("1350000.00000000"), + + // tokensUnstaking + ufix64FromString("0.00000000"), + + // tokensUnstaked + ufix64FromString("0.00000000"), + + // tokensRewarded + ufix64FromString("0.00000000"), + + // delegators + cadence.NewArray([]cadence.Value{}).WithType(cadence.NewVariableSizedArrayType(cadence.NewUInt32Type())), + + // delegatorIDCounter + cadence.UInt32(0), + + // tokensRequestedToUnstake + ufix64FromString("0.00000000"), + + // initialWeight + cadence.UInt64(100), + }).WithType(nodeInfoType) + + nodeInfo4 := cadence.NewStruct([]cadence.Value{ + // id + cadence.String("0000000000000000000000000000000000000000000000000000000000000004"), + + // role + cadence.UInt8(1), + + // networkingAddress + cadence.String("4.flow.com"), + + // networkingKey + cadence.String("378dbf45d85c614feb10d8bd4f78f4b6ef8eec7d987b937e123255444657fb3da031f232a507e323df3a6f6b8f50339c51d188e80c0e7a92420945cc6ca893fc"), + + // stakingKey + cadence.String("af4aade26d76bb2ab15dcc89adcef82a51f6f04b3cb5f4555214b40ec89813c7a5f95776ea4fe449de48166d0bbc59b919b7eabebaac9614cf6f9461fac257765415f4d8ef1376a2365ec9960121888ea5383d88a140c24c29962b0a14e4e4e7"), + + // tokensStaked + ufix64FromString("0.00000000"), + + // tokensCommitted + ufix64FromString("1350000.00000000"), + + // tokensUnstaking + ufix64FromString("0.00000000"), + + // tokensUnstaked + ufix64FromString("0.00000000"), + + // tokensRewarded + ufix64FromString("0.00000000"), + + // delegators + cadence.NewArray([]cadence.Value{}).WithType(cadence.NewVariableSizedArrayType(cadence.NewUInt32Type())), + + // delegatorIDCounter + cadence.UInt32(0), + + // tokensRequestedToUnstake + ufix64FromString("0.00000000"), + + // initialWeight + cadence.UInt64(100), + }).WithType(nodeInfoType) + + nodeInfo5 := cadence.NewStruct([]cadence.Value{ + // id + cadence.String("0000000000000000000000000000000000000000000000000000000000000011"), + + // role + cadence.UInt8(2), + + // networkingAddress + cadence.String("11.flow.com"), + + // networkingKey + cadence.String("cfdfe8e4362c8f79d11772cb7277ab16e5033a63e8dd5d34caf1b041b77e5b2d63c2072260949ccf8907486e4cfc733c8c42ca0e4e208f30470b0d950856cd47"), + + // stakingKey + cadence.String("8207559cd7136af378bba53a8f0196dee3849a3ab02897c1995c3e3f6ca0c4a776c3ae869d1ddbb473090054be2400ad06d7910aa2c5d1780220fdf3765a3c1764bce10c6fe66a5a2be51a422e878518bd750424bb56b8a0ecf0f8ad2057e83f"), + + // tokensStaked + ufix64FromString("0.00000000"), + + // tokensCommitted + ufix64FromString("1350000.00000000"), + + // tokensUnstaking + ufix64FromString("0.00000000"), + + // tokensUnstaked + ufix64FromString("0.00000000"), + + // tokensRewarded + ufix64FromString("0.00000000"), + + // delegators + cadence.NewArray([]cadence.Value{}).WithType(cadence.NewVariableSizedArrayType(cadence.NewUInt32Type())), + + // delegatorIDCounter + cadence.UInt32(0), + + // tokensRequestedToUnstake + ufix64FromString("0.00000000"), + + // initialWeight + cadence.UInt64(100), + }).WithType(nodeInfoType) + + nodeInfo6 := cadence.NewStruct([]cadence.Value{ + // id + cadence.String("0000000000000000000000000000000000000000000000000000000000000021"), + + // role + cadence.UInt8(3), + + // networkingAddress + cadence.String("21.flow.com"), + + // networkingKey + cadence.String("d64318ba0dbf68f3788fc81c41d507c5822bf53154530673127c66f50fe4469ccf1a054a868a9f88506a8999f2386d86fcd2b901779718cba4fb53c2da258f9e"), + + // stakingKey + cadence.String("880b162b7ec138b36af401d07868cb08d25746d905395edbb4625bdf105d4bb2b2f4b0f4ae273a296a6efefa7ce9ccb914e39947ce0e83745125cab05d62516076ff0173ed472d3791ccef937597c9ea12381d76f547a092a4981d77ff3fba83"), + + // tokensStaked + ufix64FromString("0.00000000"), + + // tokensCommitted + ufix64FromString("1350000.00000000"), + + // tokensUnstaking + ufix64FromString("0.00000000"), + + // tokensUnstaked + ufix64FromString("0.00000000"), + + // tokensRewarded + ufix64FromString("0.00000000"), + + // delegators + cadence.NewArray([]cadence.Value{}).WithType(cadence.NewVariableSizedArrayType(cadence.NewUInt32Type())), + + // delegatorIDCounter + cadence.UInt32(0), + + // tokensRequestedToUnstake + ufix64FromString("0.00000000"), + + // initialWeight + cadence.UInt64(100), + }).WithType(nodeInfoType) + + nodeInfo7 := cadence.NewStruct([]cadence.Value{ + // id + cadence.String("0000000000000000000000000000000000000000000000000000000000000031"), + + // role + cadence.UInt8(4), + + // networkingAddress + cadence.String("31.flow.com"), + + // networkingKey + cadence.String("697241208dcc9142b6f53064adc8ff1c95760c68beb2ba083c1d005d40181fd7a1b113274e0163c053a3addd47cd528ec6a1f190cf465aac87c415feaae011ae"), + + // stakingKey + cadence.String("b1f97d0a06020eca97352e1adde72270ee713c7daf58da7e74bf72235321048b4841bdfc28227964bf18e371e266e32107d238358848bcc5d0977a0db4bda0b4c33d3874ff991e595e0f537c7b87b4ddce92038ebc7b295c9ea20a1492302aa7"), + + // tokensStaked + ufix64FromString("0.00000000"), + + // tokensCommitted + ufix64FromString("1350000.00000000"), + + // tokensUnstaking + ufix64FromString("0.00000000"), + + // tokensUnstaked + ufix64FromString("0.00000000"), + + // tokensRewarded + ufix64FromString("0.00000000"), + + // delegators + cadence.NewArray([]cadence.Value{}).WithType(cadence.NewVariableSizedArrayType(cadence.NewUInt32Type())), + + // delegatorIDCounter + cadence.UInt32(0), + + // tokensRequestedToUnstake + ufix64FromString("0.00000000"), + + // initialWeight + cadence.UInt64(100), + }).WithType(nodeInfoType) + + return cadence.NewArray([]cadence.Value{ + nodeInfo1, + nodeInfo2, + nodeInfo3, + nodeInfo4, + nodeInfo5, + nodeInfo6, + nodeInfo7, + }).WithType(cadence.NewVariableSizedArrayType(nodeInfoType)) +} + +func createEpochCollectors() cadence.Array { + + clusterType := newFlowClusterQCClusterStructType() + + voteType := newFlowClusterQCVoteStructType() + + cluster1 := cadence.NewStruct([]cadence.Value{ + // index + cadence.NewUInt16(0), + + // nodeWeights + cadence.NewDictionary([]cadence.KeyValuePair{ + { + Key: cadence.String("0000000000000000000000000000000000000000000000000000000000000001"), + Value: cadence.UInt64(100), + }, + { + Key: cadence.String("0000000000000000000000000000000000000000000000000000000000000002"), + Value: cadence.UInt64(100), + }, + }).WithType(cadence.NewMeteredDictionaryType(nil, cadence.StringType{}, cadence.UInt64Type{})), + + // totalWeight + cadence.NewUInt64(100), + + // generatedVotes + cadence.NewDictionary(nil).WithType(cadence.NewDictionaryType(cadence.StringType{}, voteType)), + + // uniqueVoteMessageTotalWeights + cadence.NewDictionary(nil).WithType(cadence.NewDictionaryType(cadence.StringType{}, cadence.UInt64Type{})), + }).WithType(clusterType) + + cluster2 := cadence.NewStruct([]cadence.Value{ + // index + cadence.NewUInt16(1), + + // nodeWeights + cadence.NewDictionary([]cadence.KeyValuePair{ + { + Key: cadence.String("0000000000000000000000000000000000000000000000000000000000000003"), + Value: cadence.UInt64(100), + }, + { + Key: cadence.String("0000000000000000000000000000000000000000000000000000000000000004"), + Value: cadence.UInt64(100), + }, + }).WithType(cadence.NewMeteredDictionaryType(nil, cadence.StringType{}, cadence.UInt64Type{})), + + // totalWeight + cadence.NewUInt64(0), + + // generatedVotes + cadence.NewDictionary(nil).WithType(cadence.NewDictionaryType(cadence.StringType{}, voteType)), + + // uniqueVoteMessageTotalWeights + cadence.NewDictionary(nil).WithType(cadence.NewDictionaryType(cadence.StringType{}, cadence.UInt64Type{})), + }).WithType(clusterType) + + return cadence.NewArray([]cadence.Value{ + cluster1, + cluster2, + }).WithType(cadence.NewVariableSizedArrayType(clusterType)) +} + +func createEpochCommittedEvent() cadence.Event { + + clusterQCType := newFlowClusterQCClusterQCStructType() + + cluster1 := cadence.NewStruct([]cadence.Value{ + // index + cadence.UInt16(0), + + // voteSignatures + cadence.NewArray([]cadence.Value{ + cadence.String("a39cd1e1bf7e2fb0609b7388ce5215a6a4c01eef2aee86e1a007faa28a6b2a3dc876e11bb97cdb26c3846231d2d01e4d"), + cadence.String("91673ad9c717d396c9a0953617733c128049ac1a639653d4002ab245b121df1939430e313bcbfd06948f6a281f6bf853"), + }).WithType(cadence.NewVariableSizedArrayType(cadence.StringType{})), + + // voteMessage + cadence.String("irrelevant_for_these_purposes"), + + // voterIDs + cadence.NewArray([]cadence.Value{ + cadence.String("0000000000000000000000000000000000000000000000000000000000000001"), + cadence.String("0000000000000000000000000000000000000000000000000000000000000002"), + }).WithType(cadence.NewVariableSizedArrayType(cadence.StringType{})), + }).WithType(clusterQCType) + + cluster2 := cadence.NewStruct([]cadence.Value{ + // index + cadence.UInt16(1), + + // voteSignatures + cadence.NewArray([]cadence.Value{ + cadence.String("b2bff159971852ed63e72c37991e62c94822e52d4fdcd7bf29aaf9fb178b1c5b4ce20dd9594e029f3574cb29533b857a"), + cadence.String("9931562f0248c9195758da3de4fb92f24fa734cbc20c0cb80280163560e0e0348f843ac89ecbd3732e335940c1e8dccb"), + }).WithType(cadence.NewVariableSizedArrayType(cadence.StringType{})), + + // voteMessage + cadence.String("irrelevant_for_these_purposes"), + + // voterIDs + cadence.NewArray([]cadence.Value{ + cadence.String("0000000000000000000000000000000000000000000000000000000000000003"), + cadence.String("0000000000000000000000000000000000000000000000000000000000000004"), + }).WithType(cadence.NewVariableSizedArrayType(cadence.StringType{})), + }).WithType(clusterQCType) + + return cadence.NewEvent([]cadence.Value{ + // counter + cadence.NewUInt64(1), + + // clusterQCs + cadence.NewArray([]cadence.Value{ + cluster1, + cluster2, + }).WithType(cadence.NewVariableSizedArrayType(clusterQCType)), + + // dkgPubKeys + cadence.NewArray([]cadence.Value{ + cadence.String("8c588266db5f5cda629e83f8aa04ae9413593fac19e4865d06d291c9d14fbdd9bdb86a7a12f9ef8590c79cb635e3163315d193087e9336092987150d0cd2b14ac6365f7dc93eec573752108b8c12368abb65f0652d9f644e5aed611c37926950"), + cadence.String("87a339e4e5c74f089da20a33f515d8c8f4464ab53ede5a74aa2432cd1ae66d522da0c122249ee176cd747ddc83ca81090498389384201614caf51eac392c1c0a916dfdcfbbdf7363f9552b6468434add3d3f6dc91a92bbe3ee368b59b7828488"), + }).WithType(cadence.NewVariableSizedArrayType(cadence.StringType{})), + }).WithType(newFlowEpochEpochCommittedEventType()) +} + +func createVersionBeaconEvent() cadence.Event { + versionBoundaryType := newNodeVersionBeaconVersionBoundaryStructType() + + semverType := newNodeVersionBeaconSemverStructType() + + semver := cadence.NewStruct([]cadence.Value{ + // preRelease + cadence.NewOptional(cadence.String("")), + + // major + cadence.UInt8(2), + + // minor + cadence.UInt8(13), + + // patch + cadence.UInt8(7), + }).WithType(semverType) + + versionBoundary := cadence.NewStruct([]cadence.Value{ + // blockHeight + cadence.UInt64(44), + + // version + semver, + }).WithType(versionBoundaryType) + + return cadence.NewEvent([]cadence.Value{ + // versionBoundaries + cadence.NewArray([]cadence.Value{ + versionBoundary, + }).WithType(cadence.NewVariableSizedArrayType(versionBoundaryType)), + + // sequence + cadence.UInt64(5), + }).WithType(newNodeVersionBeaconVersionBeaconEventType()) +} + +func newFlowClusterQCVoteStructType() cadence.Type { + + // A.01cf0e2f2f715450.FlowClusterQC.Vote + + address, _ := common.HexToAddress("01cf0e2f2f715450") + location := common.NewAddressLocation(nil, address, "FlowClusterQC") + + return &cadence.StructType{ + Location: location, + QualifiedIdentifier: "FlowClusterQC.Vote", + Fields: []cadence.Field{ + { + Identifier: "nodeID", + Type: cadence.StringType{}, + }, + { + Identifier: "signature", + Type: cadence.NewOptionalType(cadence.StringType{}), + }, + { + Identifier: "message", + Type: cadence.NewOptionalType(cadence.StringType{}), + }, + { + Identifier: "clusterIndex", + Type: cadence.UInt16Type{}, + }, + { + Identifier: "weight", + Type: cadence.UInt64Type{}, + }, + }, + } +} + +func newFlowClusterQCClusterStructType() *cadence.StructType { + + // A.01cf0e2f2f715450.FlowClusterQC.Cluster + + address, _ := common.HexToAddress("01cf0e2f2f715450") + location := common.NewAddressLocation(nil, address, "FlowClusterQC") + + return &cadence.StructType{ + Location: location, + QualifiedIdentifier: "FlowClusterQC.Cluster", + Fields: []cadence.Field{ + { + Identifier: "index", + Type: cadence.UInt16Type{}, + }, + { + Identifier: "nodeWeights", + Type: cadence.NewDictionaryType(cadence.StringType{}, cadence.UInt64Type{}), + }, + { + Identifier: "totalWeight", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "generatedVotes", + Type: cadence.NewDictionaryType(cadence.StringType{}, newFlowClusterQCVoteStructType()), + }, + { + Identifier: "uniqueVoteMessageTotalWeights", + Type: cadence.NewDictionaryType(cadence.StringType{}, cadence.UInt64Type{}), + }, + }, + } +} + +func newFlowIDTableStakingNodeInfoStructType() *cadence.StructType { + + // A.01cf0e2f2f715450.FlowIDTableStaking.NodeInfo + + address, _ := common.HexToAddress("01cf0e2f2f715450") + location := common.NewAddressLocation(nil, address, "FlowIDTableStaking") + + return &cadence.StructType{ + Location: location, + QualifiedIdentifier: "FlowIDTableStaking.NodeInfo", + Fields: []cadence.Field{ + { + Identifier: "id", + Type: cadence.StringType{}, + }, + { + Identifier: "role", + Type: cadence.UInt8Type{}, + }, + { + Identifier: "networkingAddress", + Type: cadence.StringType{}, + }, + { + Identifier: "networkingKey", + Type: cadence.StringType{}, + }, + { + Identifier: "stakingKey", + Type: cadence.StringType{}, + }, + { + Identifier: "tokensStaked", + Type: cadence.UFix64Type{}, + }, + { + Identifier: "tokensCommitted", + Type: cadence.UFix64Type{}, + }, + { + Identifier: "tokensUnstaking", + Type: cadence.UFix64Type{}, + }, + { + Identifier: "tokensUnstaked", + Type: cadence.UFix64Type{}, + }, + { + Identifier: "tokensRewarded", + Type: cadence.UFix64Type{}, + }, + { + Identifier: "delegators", + Type: cadence.NewVariableSizedArrayType(cadence.NewUInt32Type()), + }, + { + Identifier: "delegatorIDCounter", + Type: cadence.UInt32Type{}, + }, + { + Identifier: "tokensRequestedToUnstake", + Type: cadence.UFix64Type{}, + }, + { + Identifier: "initialWeight", + Type: cadence.UInt64Type{}, + }, + }, + } +} + +func newFlowEpochEpochSetupEventType() *cadence.EventType { + + // A.01cf0e2f2f715450.FlowEpoch.EpochSetup + + address, _ := common.HexToAddress("01cf0e2f2f715450") + location := common.NewAddressLocation(nil, address, "FlowEpoch") + + return &cadence.EventType{ + Location: location, + QualifiedIdentifier: "FlowEpoch.EpochSetup", + Fields: []cadence.Field{ + { + Identifier: "counter", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "nodeInfo", + Type: cadence.NewVariableSizedArrayType(newFlowIDTableStakingNodeInfoStructType()), + }, + { + Identifier: "firstView", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "finalView", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "collectorClusters", + Type: cadence.NewVariableSizedArrayType(newFlowClusterQCClusterStructType()), + }, + { + Identifier: "randomSource", + Type: cadence.StringType{}, + }, + { + Identifier: "DKGPhase1FinalView", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "DKGPhase2FinalView", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "DKGPhase3FinalView", + Type: cadence.UInt64Type{}, + }, + }, + } +} + +func newFlowEpochEpochCommittedEventType() *cadence.EventType { + + // A.01cf0e2f2f715450.FlowEpoch.EpochCommitted + + address, _ := common.HexToAddress("01cf0e2f2f715450") + location := common.NewAddressLocation(nil, address, "FlowEpoch") + + return &cadence.EventType{ + Location: location, + QualifiedIdentifier: "FlowEpoch.EpochCommitted", + Fields: []cadence.Field{ + { + Identifier: "counter", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "clusterQCs", + Type: cadence.NewVariableSizedArrayType(newFlowClusterQCClusterQCStructType()), + }, + { + Identifier: "dkgPubKeys", + Type: cadence.NewVariableSizedArrayType(cadence.StringType{}), + }, + }, + } +} + +func newFlowClusterQCClusterQCStructType() *cadence.StructType { + + // A.01cf0e2f2f715450.FlowClusterQC.ClusterQC" + + address, _ := common.HexToAddress("01cf0e2f2f715450") + location := common.NewAddressLocation(nil, address, "FlowClusterQC") + + return &cadence.StructType{ + Location: location, + QualifiedIdentifier: "FlowClusterQC.ClusterQC", + Fields: []cadence.Field{ + { + Identifier: "index", + Type: cadence.UInt16Type{}, + }, + { + Identifier: "voteSignatures", + Type: cadence.NewVariableSizedArrayType(cadence.StringType{}), + }, + { + Identifier: "voteMessage", + Type: cadence.StringType{}, + }, + { + Identifier: "voterIDs", + Type: cadence.NewVariableSizedArrayType(cadence.StringType{}), + }, + }, + } +} + +func newNodeVersionBeaconVersionBeaconEventType() *cadence.EventType { + + // A.01cf0e2f2f715450.NodeVersionBeacon.VersionBeacon + + address, _ := common.HexToAddress("01cf0e2f2f715450") + location := common.NewAddressLocation(nil, address, "NodeVersionBeacon") + + return &cadence.EventType{ + Location: location, + QualifiedIdentifier: "NodeVersionBeacon.VersionBeacon", + Fields: []cadence.Field{ + { + Identifier: "versionBoundaries", + Type: cadence.NewVariableSizedArrayType(newNodeVersionBeaconVersionBoundaryStructType()), + }, + { + Identifier: "sequence", + Type: cadence.UInt64Type{}, + }, + }, + } +} + +func newNodeVersionBeaconVersionBoundaryStructType() *cadence.StructType { + + // A.01cf0e2f2f715450.NodeVersionBeacon.VersionBoundary + + address, _ := common.HexToAddress("01cf0e2f2f715450") + location := common.NewAddressLocation(nil, address, "NodeVersionBeacon") + + return &cadence.StructType{ + Location: location, + QualifiedIdentifier: "NodeVersionBeacon.VersionBoundary", + Fields: []cadence.Field{ + { + Identifier: "blockHeight", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "version", + Type: newNodeVersionBeaconSemverStructType(), + }, + }, + } +} + +func newNodeVersionBeaconSemverStructType() *cadence.StructType { + + // A.01cf0e2f2f715450.NodeVersionBeacon.Semver + + address, _ := common.HexToAddress("01cf0e2f2f715450") + location := common.NewAddressLocation(nil, address, "NodeVersionBeacon") + + return &cadence.StructType{ + Location: location, + QualifiedIdentifier: "NodeVersionBeacon.Semver", + Fields: []cadence.Field{ + { + Identifier: "preRelease", + Type: cadence.NewOptionalType(cadence.StringType{}), + }, + { + Identifier: "major", + Type: cadence.UInt8Type{}, + }, + { + Identifier: "minor", + Type: cadence.UInt8Type{}, + }, + { + Identifier: "patch", + Type: cadence.UInt8Type{}, + }, + }, + } +} + +func ufix64FromString(s string) cadence.UFix64 { + f, err := cadence.NewUFix64(s) + if err != nil { + panic(err) + } + return f +} diff --git a/encoding/ccf/simple_type_utils.go b/encoding/ccf/simple_type_utils.go index 0e0f498c19..1f28866fd5 100644 --- a/encoding/ccf/simple_type_utils.go +++ b/encoding/ccf/simple_type_utils.go @@ -82,6 +82,9 @@ const ( // Cadence simple type IDs TypeVoid TypeFunction TypeWord128 + TypeWord256 + TypeAnyStructAttachmentType + TypeAnyResourceAttachmentType ) // NOTE: cadence.FunctionType isn't included in simpleTypeIDByType @@ -198,6 +201,9 @@ func simpleTypeIDByType(typ cadence.Type) (uint64, bool) { case cadence.Word128Type: return TypeWord128, true + case cadence.Word256Type: + return TypeWord256, true + case cadence.Fix64Type: return TypeFix64, true @@ -245,6 +251,12 @@ func simpleTypeIDByType(typ cadence.Type) (uint64, bool) { case cadence.DeployedContractType: return TypeDeployedContract, true + + case cadence.AnyStructAttachmentType: + return TypeAnyStructAttachmentType, true + + case cadence.AnyResourceAttachmentType: + return TypeAnyResourceAttachmentType, true } return 0, false diff --git a/encoding/ccf/traverse_value.go b/encoding/ccf/traverse_value.go index c75bc40561..d82c83891c 100644 --- a/encoding/ccf/traverse_value.go +++ b/encoding/ccf/traverse_value.go @@ -65,6 +65,10 @@ func compositeTypesFromValue(v cadence.Value) ([]cadence.Type, ccfTypeIDByCadenc func (ct *compositeTypes) traverseValue(v cadence.Value) { + if v == nil { + return + } + // Traverse type for composite/interface types. checkRuntimeType := ct.traverseType(v.Type()) @@ -202,6 +206,7 @@ func (ct *compositeTypes) traverseType(typ cadence.Type) (checkRuntimeType bool) cadence.Word32Type, cadence.Word64Type, cadence.Word128Type, + cadence.Word256Type, cadence.Fix64Type, cadence.UFix64Type, cadence.PathType, diff --git a/encoding/json/decode.go b/encoding/json/decode.go index 604f0fe359..bc5cbbc318 100644 --- a/encoding/json/decode.go +++ b/encoding/json/decode.go @@ -137,6 +137,7 @@ const ( returnKey = "return" typeBoundKey = "typeBound" purityKey = "purity" + functionTypeKey = "functionType" ) func (d *Decoder) decodeJSON(v any) cadence.Value { @@ -205,6 +206,8 @@ func (d *Decoder) decodeJSON(v any) cadence.Value { return d.decodeWord64(valueJSON) case word128TypeStr: return d.decodeWord128(valueJSON) + case word256TypeStr: + return d.decodeWord256(valueJSON) case fix64TypeStr: return d.decodeFix64(valueJSON) case ufix64TypeStr: @@ -229,6 +232,8 @@ func (d *Decoder) decodeJSON(v any) cadence.Value { return d.decodeCapability(valueJSON) case enumTypeStr: return d.decodeEnum(valueJSON) + case functionTypeStr: + return d.decodeFunction(valueJSON) } panic(errors.NewDefaultUserError("invalid type: %s", typeStr)) @@ -587,6 +592,24 @@ func (d *Decoder) decodeWord128(valueJSON any) cadence.Word128 { return value } +func (d *Decoder) decodeWord256(valueJSON any) cadence.Word256 { + value, err := cadence.NewMeteredWord256FromBig( + d.gauge, + func() *big.Int { + bigInt := d.decodeBigInt(valueJSON) + if bigInt == nil { + // TODO: propagate toString error from decodeBigInt + panic(errors.NewDefaultUserError("invalid Word256: %s", valueJSON)) + } + return bigInt + }, + ) + if err != nil { + panic(errors.NewDefaultUserError("invalid Word256: %w", err)) + } + return value +} + func (d *Decoder) decodeFix64(valueJSON any) cadence.Fix64 { v, err := cadence.NewMeteredFix64(d.gauge, func() (string, error) { return toString(valueJSON), nil @@ -864,6 +887,20 @@ func (d *Decoder) decodePath(valueJSON any) cadence.Path { return path } +func (d *Decoder) decodeFunction(valueJSON any) cadence.Function { + obj := toObject(valueJSON) + + functionType, ok := d.decodeType(obj.Get(functionTypeKey), typeDecodingResults{}).(*cadence.FunctionType) + if !ok { + panic(errors.NewDefaultUserError("invalid function: invalid function type")) + } + + return cadence.NewMeteredFunction( + d.gauge, + functionType, + ) +} + func (d *Decoder) decodeTypeParameter(valueJSON any, results typeDecodingResults) cadence.TypeParameter { obj := toObject(valueJSON) // Unmetered because decodeTypeParameter is metered in decodeTypeParameters and called nowhere else @@ -950,7 +987,10 @@ func (d *Decoder) decodePurity(purity any) cadence.FunctionPurity { } func (d *Decoder) decodeFunctionType(typeParametersValue, parametersValue, returnValue any, purity any, results typeDecodingResults) cadence.Type { - typeParameters := d.decodeTypeParameters(toSlice(typeParametersValue), results) + var typeParameters []cadence.TypeParameter + if typeParametersValue != nil { + typeParameters = d.decodeTypeParameters(toSlice(typeParametersValue), results) + } parameters := d.decodeParameters(toSlice(parametersValue), results) returnType := d.decodeType(returnValue, results) functionPurity := d.decodePurity(purity) @@ -1156,7 +1196,7 @@ func (d *Decoder) decodeType(valueJSON any, results typeDecodingResults) cadence switch kindValue { case "Function": - typeParametersValue := obj.Get(typeParametersKey) + typeParametersValue := obj[typeParametersKey] parametersValue := obj.Get(parametersKey) returnValue := obj.Get(returnKey) purity, hasPurity := obj[purityKey] @@ -1282,6 +1322,8 @@ func (d *Decoder) decodeType(valueJSON any, results typeDecodingResults) cadence return cadence.TheWord64Type case "Word128": return cadence.TheWord128Type + case "Word256": + return cadence.TheWord256Type case "Fix64": return cadence.TheFix64Type case "UFix64": diff --git a/encoding/json/encode.go b/encoding/json/encode.go index e37b962a7b..83f607fb04 100644 --- a/encoding/json/encode.go +++ b/encoding/json/encode.go @@ -183,6 +183,7 @@ type jsonReferenceType struct { type jsonRestrictedType struct { Kind string `json:"kind"` + TypeID string `json:"typeID"` Type jsonValue `json:"type"` Restrictions []jsonValue `json:"restrictions"` } @@ -200,6 +201,7 @@ type jsonParameterType struct { type jsonFunctionType struct { Kind string `json:"kind"` + TypeID string `json:"typeID"` TypeParameters []jsonTypeParameter `json:"typeParameters"` Parameters []jsonParameterType `json:"parameters"` Return jsonValue `json:"return"` @@ -252,12 +254,14 @@ const ( word32TypeStr = "Word32" word64TypeStr = "Word64" word128TypeStr = "Word128" + word256TypeStr = "Word256" fix64TypeStr = "Fix64" ufix64TypeStr = "UFix64" arrayTypeStr = "Array" dictionaryTypeStr = "Dictionary" structTypeStr = "Struct" resourceTypeStr = "Resource" + attachmentTypeStr = "Attachment" eventTypeStr = "Event" contractTypeStr = "Contract" linkTypeStr = "Link" @@ -323,6 +327,8 @@ func Prepare(v cadence.Value) jsonValue { return prepareWord64(v) case cadence.Word128: return prepareWord128(v) + case cadence.Word256: + return prepareWord256(v) case cadence.Fix64: return prepareFix64(v) case cadence.UFix64: @@ -353,8 +359,12 @@ func Prepare(v cadence.Value) jsonValue { return prepareIDCapability(v) case cadence.Enum: return prepareEnum(v) + case cadence.Attachment: + return prepareAttachment(v) case cadence.Function: return prepareFunction(v) + case nil: + return nil default: panic(fmt.Errorf("unsupported value: %T, %v", v, v)) } @@ -538,6 +548,13 @@ func prepareWord128(v cadence.Word128) jsonValue { } } +func prepareWord256(v cadence.Word256) jsonValue { + return jsonValueObject{ + Type: word256TypeStr, + Value: encodeBig(v.Big()), + } +} + func prepareFix64(v cadence.Fix64) jsonValue { return jsonValueObject{ Type: fix64TypeStr, @@ -601,8 +618,14 @@ func prepareEnum(v cadence.Enum) jsonValue { return prepareComposite(enumTypeStr, v.EnumType.ID(), v.EnumType.Fields, v.Fields) } +func prepareAttachment(v cadence.Attachment) jsonValue { + return prepareComposite(attachmentTypeStr, v.AttachmentType.ID(), v.AttachmentType.Fields, v.Fields) +} + func prepareComposite(kind, id string, fieldTypes []cadence.Field, fields []cadence.Value) jsonValue { - if len(fieldTypes) != len(fields) { + // Ensure there are _at least _ as many field values as field types. + // There might be more field values in the case of attachments. + if len(fields) < len(fieldTypes) { panic(fmt.Errorf( "%s field count (%d) does not match declared type (%d)", kind, @@ -614,10 +637,17 @@ func prepareComposite(kind, id string, fieldTypes []cadence.Field, fields []cade compositeFields := make([]jsonCompositeField, len(fields)) for i, value := range fields { - fieldType := fieldTypes[i] + var name string + // Provide the field name, if the field type is available. + // In the case of attachments, they are provided as field values, + // but there is no corresponding field type. + if i < len(fieldTypes) { + fieldType := fieldTypes[i] + name = fieldType.Identifier + } compositeFields[i] = jsonCompositeField{ - Name: fieldType.Identifier, + Name: name, Value: Prepare(value), } } @@ -809,6 +839,7 @@ func prepareType(typ cadence.Type, results typePreparationResults) jsonValue { cadence.Word32Type, cadence.Word64Type, cadence.Word128Type, + cadence.Word256Type, cadence.Fix64Type, cadence.UFix64Type, cadence.BlockType, @@ -909,6 +940,7 @@ func prepareType(typ cadence.Type, results typePreparationResults) jsonValue { case *cadence.FunctionType: typeJson := jsonFunctionType{ Kind: "Function", + TypeID: typ.ID(), TypeParameters: prepareTypeParameters(typ.TypeParameters, results), Parameters: prepareParameters(typ.Parameters, results), Return: prepareType(typ.ReturnType, results), @@ -930,6 +962,7 @@ func prepareType(typ cadence.Type, results typePreparationResults) jsonValue { } return jsonRestrictedType{ Kind: "Restriction", + TypeID: typ.ID(), Type: prepareType(typ.Type, results), Restrictions: restrictions, } diff --git a/encoding/json/encoding_test.go b/encoding/json/encoding_test.go index f95fea9612..23f797a15e 100644 --- a/encoding/json/encoding_test.go +++ b/encoding/json/encoding_test.go @@ -637,6 +637,26 @@ func TestEncodeWord128(t *testing.T) { }...) } +func TestEncodeWord256(t *testing.T) { + + t.Parallel() + + testAllEncodeAndDecode(t, []encodeTest{ + { + "Zero", + cadence.NewWord256(0), + // language=json + `{"type":"Word256","value":"0"}`, + }, + { + "Max", + cadence.Word256{Value: sema.Word256TypeMaxIntBig}, + // language=json + `{"type":"Word256","value":"115792089237316195423570985008687907853269984665640564039457584007913129639935"}`, + }, + }...) +} + func TestEncodeFix64(t *testing.T) { t.Parallel() @@ -1757,6 +1777,7 @@ func TestEncodeSimpleTypes(t *testing.T) { cadence.Word32Type{}, cadence.Word64Type{}, cadence.Word128Type{}, + cadence.Word256Type{}, cadence.Fix64Type{}, cadence.UFix64Type{}, cadence.BlockType{}, @@ -2560,7 +2581,7 @@ func TestEncodeType(t *testing.T) { ) }) - t.Run("with static function", func(t *testing.T) { + t.Run("with static function, with type parameters", func(t *testing.T) { testEncodeAndDecode( t, @@ -2583,6 +2604,7 @@ func TestEncodeType(t *testing.T) { "staticType": { "kind": "Function", "purity": "", + "typeID": "fun(String):Int", "return": { "kind": "Int" }, @@ -2629,6 +2651,7 @@ func TestEncodeType(t *testing.T) { { "kind" : "Function", "purity": "view", + "typeID": "view fun(String):Int", "return" : {"kind" : "Int"}, "typeParameters": [], "parameters" : [ @@ -2640,6 +2663,45 @@ func TestEncodeType(t *testing.T) { }) + t.Run("with static function, without type parameters (decode only)", func(t *testing.T) { + + testDecode( + t, + // language=json + ` + { + "type": "Type", + "value": { + "staticType": { + "kind": "Function", + "typeID": "((String):Int)", + "return": { + "kind": "Int" + }, + "parameters": [ + { + "label": "qux", + "id": "baz", + "type": { + "kind": "String" + } + } + ] + } + } + } + `, + cadence.TypeValue{ + StaticType: &cadence.FunctionType{ + Parameters: []cadence.Parameter{ + {Label: "qux", Identifier: "baz", Type: cadence.StringType{}}, + }, + ReturnType: cadence.IntType{}, + }, + }, + ) + }) + t.Run("with implicit purity", func(t *testing.T) { encodedValue := `{"type":"Type","value":{"staticType": @@ -2714,6 +2776,7 @@ func TestEncodeType(t *testing.T) { "value": { "staticType": { "kind": "Restriction", + "typeID": "Int{String}", "type": { "kind": "Int" }, @@ -3551,20 +3614,6 @@ func TestExportFunctionValue(t *testing.T) { t.Parallel() - ty := &cadence.ResourceType{ - Location: utils.TestLocation, - QualifiedIdentifier: "Foo", - Fields: []cadence.Field{ - { - Identifier: "foo", - }, - }, - } - - ty.Fields[0].Type = &cadence.OptionalType{ - Type: ty, - } - testEncode( t, cadence.Function{ @@ -3580,6 +3629,7 @@ func TestExportFunctionValue(t *testing.T) { "value": { "functionType": { "kind": "Function", + "typeID": "fun():Void", "parameters": [], "typeParameters": [], "purity":"", @@ -3592,3 +3642,77 @@ func TestExportFunctionValue(t *testing.T) { `, ) } + +func TestImportFunctionValue(t *testing.T) { + + t.Parallel() + + t.Run("without type parameters", func(t *testing.T) { + + t.Parallel() + + testDecode( + t, + // language=json + ` + { + "type": "Function", + "value": { + "functionType": { + "kind": "Function", + "typeID": "(():Void)", + "parameters": [], + "return": { + "kind": "Void" + } + } + } + } + `, + cadence.Function{ + FunctionType: &cadence.FunctionType{ + Parameters: []cadence.Parameter{}, + ReturnType: cadence.VoidType{}, + }, + }, + ) + }) + + t.Run("with type parameters", func(t *testing.T) { + + t.Parallel() + + testDecode( + t, + // language=json + ` + { + "type": "Function", + "value": { + "functionType": { + "kind": "Function", + "typeID": "(():Void)", + "typeParameters": [ + {"name": "T"} + ], + "parameters": [], + "return": { + "kind": "Void" + } + } + } + } + `, + cadence.Function{ + FunctionType: &cadence.FunctionType{ + TypeParameters: []cadence.TypeParameter{ + {Name: "T"}, + }, + Parameters: []cadence.Parameter{}, + ReturnType: cadence.VoidType{}, + }, + }, + ) + }) + +} diff --git a/meetings/2023-05-04.md b/meetings/2023-05-04.md new file mode 100644 index 0000000000..df84098a6a --- /dev/null +++ b/meetings/2023-05-04.md @@ -0,0 +1,167 @@ +# May 4th, 2023 + +## FLIPs + +### Capability Controllers + +* FLIP: [https://github.com/onflow/flow/pull/798](https://github.com/onflow/flow/pull/798) + +* Status: + * Final API adjustments + * Implementation nearing completion + * No remaining concerns + +* Open problems: + * None + +* Next steps: + * Update FLIP, no capcons for links + * Migration, rollout documentation + * Documentation around pattern (e.g. getCapability to construct, link later) + * Clarify migration: find storage path, storage path does not need to store anything + * **Accept, unless objections** + +### Entitlements and Safe Downcasting + +* FLIP: [https://github.com/onflow/flips/pull/54](https://github.com/onflow/flips/pull/54) + +* Status: + * Implementation done + * No remaining concerns + * Additional features requested → follow-up FLIPs + +* Open problems/concerns: + * Complexity + * Parts: entitlements, downcasting, mappings + * Syntax (comma, and, set) + * Migration? + * Current access control pattern: public declaration, *but* restricted type to gate access + * Maybe get rid of pub and priv? + * Type in existing capability + * Breakage is OK, security problem not. Maybe have contract be broken until fixed + +* Next steps: + * Details for migration, maybe breakout session + * Documentation and tutorials, for existing and new developers + * Demonstrate with e.g. AuthAccount/PublicAccount, standard library (arrays, dictionaries, etc.), FT, NFT, etc. + * Preview release for Stable Cadence: Emulator, Playground, etc. + * **Accept, unless objections** + * Maybe propose removal of restricted types, not "necessary" or very useful anymore + +### Attachments + +* FLIP: [https://github.com/onflow/flips/pull/11](https://github.com/onflow/flips/pull/11) + +* Forum discussion: [https://forum.onflow.org/t/flip-cadence-extensions-attachments/3645/2](https://forum.onflow.org/t/flip-cadence-extensions-attachments/3645/2) + +* Status: + * Approved and merged 🎉 + * Deployed to Testnet + * Receiving feedback + * Discovering use-cases + * Need mitigation for trolling attack + * Mainnet blocked on solution for trolling attack + * Had breakout session for trolling attack + * Have idea for solution, but might depend on breaking change (Stable Cadence) + +* Open problem: + * Trolling attack, blocking. Working on solution + +* Next steps: + * Address trolling attack + * Breakout session for contexts ("try-catch") in public + +### Interface Inheritance + +* FLIP: [https://github.com/onflow/flips/pull/40](https://github.com/onflow/flips/pull/40) + +* Forum discussion: [https://forum.onflow.org/t/flip-interface-inheritance-in-cadence/3750](https://forum.onflow.org/t/flip-interface-inheritance-in-cadence/3750) + +* Status: + * Had more breakout sessions, ironed out almost all last questions/problems + * Implementation available + +* Open questions/concerns: + * Explicit declaration of inherited interfaces? + * Declaration (conformance list) + * Also in restricted types? Subtyping relationship → clarify in FLIP + * No tooling required when reading contract + * Also helps author + * Verbosity / explicitness vs conciseness / implicitness + * Example: FT + +* Next steps: + * Resolve last question/concern + * **Accept, unless objections** + +### Extend transaction format + +* FLIP: [https://github.com/onflow/flips/pull/41](https://github.com/onflow/flips/pull/41) + +* Status: + * No updates at the moment + * Last breakout session was a while back + * Planning to restart working group + +* Open problems: + * Unexplored corner: + * Useful for multi-sign use-cases + * Does model also make sense for single-sign use-cases? + * Signer assignment + * Mapping signatures to roles + * Currently Flow transaction signatures are sequential + * Action-based model? + +* Next steps: + * Have another breakout session + * Contact Jeff + +### External Mutability + +* [https://github.com/onflow/flips/pull/58](https://github.com/onflow/flips/pull/58) + +* [https://github.com/onflow/flips/pull/59](https://github.com/onflow/flips/pull/59) + +* Status + * Had more breakout session + * Working on comparison and examples + +* Open problems: + * Usability + * FLIPs have different compromises + +* Next steps: + * Comparison of different solutions and examples + * Another breakout session + +### Interface Conformance Improvements + +* FLIP: [https://github.com/onflow/flips/pull/83](https://github.com/onflow/flips/pull/83) + +* Overview: + * Relaxation of existing restriction for interface conformance + * Default function in one interface + Conditions in another interface is currently rejected + * Proposal is to relax restriction and allow this case, given there is no conflict + +* Open problem: + * Conditions may currently have side-effects + * Delay until Stable Cadence? + +* Status: + * Discuss and approve asynchronous + +## Questions + +* Can/should we maybe wipe Testnet? + * Many broken/stale accounts and contracts + * Hadn’t come up before + * Maybe suggest, e.g. in GitHub discussions and/or forum + * Big disruption for some users + * Iteration broken (but shouldn’t be) + * More realistic? There will be always broken code / accounts etc + * Want to be able to test if dapp works with broken + * Testnet is "staging" for Mainnet + * Cannot wipe Mainnet + * After Stable Cadence, should be less of an issue + * Maybe FLIP? "Governance"? + * Should be able to force delete broken values (--> force delete problem, see above: attachment trolling problem) diff --git a/meetings/2023-06-15.md b/meetings/2023-06-15.md new file mode 100644 index 0000000000..8f2ff0df6f --- /dev/null +++ b/meetings/2023-06-15.md @@ -0,0 +1,274 @@ +# June 15th, 2023 + +## FLIPs + +### Capability Controllers + +* FLIP: [https://github.com/onflow/flow/pull/798](https://github.com/onflow/flow/pull/798) + +* Status: + * FLIP approved and merged 🎉 + * Implemented + * Deployed on TN + * Need documentation + +### Entitlements and Safe Downcasting + +* FLIP: [https://github.com/onflow/flips/pull/54](https://github.com/onflow/flips/pull/54) + +* Status: + * FLIP approved and merged 🎉 + * Implementation almost complete (reference conversion) + * Plan for migration: [https://github.com/onflow/flips/pull/95](https://github.com/onflow/flips/pull/95) + +* Open problems/concerns: + * Migration + * Need to migrate values (run-time types) and need to e.g. re-hash in dictionary + +* Next steps: + * Merge last PR(s) + * Preview release for Stable Cadence: Emulator, Playground, etc. + +### Attachments + +* FLIP: [https://github.com/onflow/flips/pull/11](https://github.com/onflow/flips/pull/11) + +* Forum discussion: [https://forum.onflow.org/t/flip-cadence-extensions-attachments/3645/2](https://forum.onflow.org/t/flip-cadence-extensions-attachments/3645/2) + +* Status: + * FLIP approved and merged 🎉 + * Deployed to Testnet + * Receiving feedback + * Discovering use-cases + * Need mitigation for forced resource deletion + * Mainnet blocked on solution for forced resource deletion solution + * Had breakout session for resource deletion solution + * Have idea for solution, but might depend on breaking change (Stable Cadence) + +* Open problem: + * Resource deletion, blocking. Working on solution + * Maybe deploy to MN without solution? + +* Next steps: + * Resource deletion solution + +### Interface Inheritance + +* FLIP: [https://github.com/onflow/flips/pull/40](https://github.com/onflow/flips/pull/40) + +* Forum discussion: [https://forum.onflow.org/t/flip-interface-inheritance-in-cadence/3750](https://forum.onflow.org/t/flip-interface-inheritance-in-cadence/3750) + +* Status: + * FLIP approved and merged 🎉 + * Implemented + +### Interface Conformance Improvements + +* FLIP: [https://github.com/onflow/flips/pull/83](https://github.com/onflow/flips/pull/83) + +* Overview: + * For interface conformance + * Allow two interfaces, conditions + default function (currently prohibited) + +* Status: + * Positive sentiment + +* Open questions: + * None + +* Next steps: + + * **Approve, unless opposition** + +### Remove pub, pub(set) and priv + +* FLIP: [https://github.com/onflow/flips/pull/84](https://github.com/onflow/flips/pull/84) + +* Overview: + * Remove access modifiers + * Replacements already exist (access(all), access(self)) + +* Status: + * Positive sentiment + * Implementation drafted + +* Open questions: + * None + +* Next steps: + * **Approve, unless opposition** + +### Emit events from function conditions, allow concrete events in interfaces + +* FLIP: [https://github.com/onflow/flips/pull/111](https://github.com/onflow/flips/pull/111) + +* Overview: + * Originally proposed by Deniz + * Emit events from conditions + * Allow concrete events in interfaces, avoids need for redeclaration + * Depends on nested type requirements getting removed + +* Status: + * Positive sentiment + +* Open questions: + * None + +* Next steps: + * Discuss a bit more + * Approve, unless opposition + +### Remove restricted types + +* FLIP: [https://github.com/onflow/flips/pull/85](https://github.com/onflow/flips/pull/85) + +* Overview: + * Remove restricted types + * Used for access control (restricted references) + * Redundant with addition of entitlements + * Interface sets (intersection types) + +* Status: + * Positive sentiment + +* Open questions: + * None + +* Next steps: + * **Approve, unless opposition** + +### Account Type + +* FLIP: [https://github.com/onflow/flips/pull/92](https://github.com/onflow/flips/pull/92) + +* Overview: + * Replace AuthAccount and PublicAccount + * Originated in account linking FLIP + * Use entitlements + +* Status: + * Positive sentiment + +* Open problems: + * None + * Migration (effects on existing API, forced breakage) + * (Naming) + +* Next steps: + * Finish design for migration + * **Approve, unless opposition** + +### External Mutability + +* Vision: [https://github.com/onflow/flips/pull/97](https://github.com/onflow/flips/pull/97) + * Not a FLIP, but supporting document + * Bigger picture for FLIPs + * Update on previous proposals + * Looking at problem again: use entitlements + +* FLIPs: + + * **Change member access semantic** + * FLIP: [https://github.com/onflow/flips/pull/89](https://github.com/onflow/flips/pull/89) + + * Overview: + * Accessing a field on a reference returns a reference + + * Status: + * Positive sentiment + + * Open questions: + * None + + * Next steps: + * **Approve, unless opposition** + + * **Introduce built-in mutability entitlements** + + * FLIP: [https://github.com/onflow/flips/pull/86](https://github.com/onflow/flips/pull/86) + + * Overview: + * Built-in entitlements for mutating functions of containers (arrays, dictionaries) + * Can also used by user-defined types + + * Status: + * Positive sentiment + + * Open questions: + * Naming + + * Next steps: + * Maybe approve and refactor names later if needed (before release) + * **Approve, unless opposition** + + * **Improve entitlement mappings** + + * FLIP: [https://github.com/onflow/flips/pull/94](https://github.com/onflow/flips/pull/94) + + * Overview: + * Allow entitlement mappings for non-reference fields + * Improves "Change member access semantics" (FLIP 89) + + * Status: + * Positive sentiment + + * Open questions: + * None + + * Next steps: + * **Approve, unless opposition** + +### Add Range type + +* FLIP: [https://github.com/onflow/flips/pull/96](https://github.com/onflow/flips/pull/96) + +* Status: + * Design almost complete + * Implementation in progress + * Looking for feedback + * Positive sentiment, but details unclear + +* Open questions: + * Multiple types? + +* Next steps: + * Complete + * **Approve, unless opposition** + +### Extend transaction format + +* FLIP: [https://github.com/onflow/flips/pull/41](https://github.com/onflow/flips/pull/41) + +* Status: + * No updates at the moment + * Last breakout session was a while back + * Planning to restart working group + +* Open problems: + + * Unexplored corner: + * Useful for multi-sign use-cases + * Does model also make sense for single-sign use-cases? + + * Signer assignment + * Mapping signatures to roles + * Currently Flow transaction signatures are sequential + + * Action-based model? + +* Next steps: + * Have another breakout session + * Contact Jeff + +## Questions/Feedback + +* Indicate feature support in CLI + +* Applications to demo changes and additions + + * Candidates: + * Standards (FT, NFT) + * Core contracts + * Kitty Items (e.g. for marketplace) + * NFT Pawn Shop + * Asset Handover diff --git a/npm-packages/cadence-parser/package.json b/npm-packages/cadence-parser/package.json index 325ca99a4b..c33546050b 100644 --- a/npm-packages/cadence-parser/package.json +++ b/npm-packages/cadence-parser/package.json @@ -1,6 +1,6 @@ { "name": "@onflow/cadence-parser", - "version": "0.38.0", + "version": "0.39.12", "description": "The Cadence parser", "homepage": "https://github.com/onflow/cadence", "repository": { diff --git a/runtime/account_test.go b/runtime/account_test.go index 4f9a189a66..6089428707 100644 --- a/runtime/account_test.go +++ b/runtime/account_test.go @@ -517,7 +517,7 @@ func TestRuntimeAuthAccountKeysAdd(t *testing.T) { ) assert.EqualValues(t, - stdlib.AccountKeyAddedEventType.ID(), + stdlib.AccountKeyAddedFromPublicKeyEventType.ID(), storage.events[1].Type().ID(), ) } diff --git a/runtime/attachments_test.go b/runtime/attachments_test.go index 9bfb3e0d76..7a05f902df 100644 --- a/runtime/attachments_test.go +++ b/runtime/attachments_test.go @@ -227,6 +227,7 @@ func TestAccountAttachmentExportFailure(t *testing.T) { } func TestAccountAttachmentExport(t *testing.T) { + t.Parallel() storage := newTestLedger(nil, nil) @@ -311,6 +312,7 @@ func TestAccountAttachmentExport(t *testing.T) { } func TestAccountAttachedExport(t *testing.T) { + t.Parallel() storage := newTestLedger(nil, nil) diff --git a/runtime/cmd/check/main.go b/runtime/cmd/check/main.go index 9f6d2c7821..4566d7e118 100644 --- a/runtime/cmd/check/main.go +++ b/runtime/cmd/check/main.go @@ -91,7 +91,7 @@ func main() { } type benchResult struct { - // N is the the number of iterations + // N is the number of iterations Iterations int `json:"iterations"` // T is the total time taken Time time.Duration `json:"time"` diff --git a/runtime/cmd/parse/main.go b/runtime/cmd/parse/main.go index 29e5a01546..b1afb4f93d 100644 --- a/runtime/cmd/parse/main.go +++ b/runtime/cmd/parse/main.go @@ -50,7 +50,7 @@ func main() { } type benchResult struct { - // N is the the number of iterations + // N is the number of iterations Iterations int `json:"iterations"` // T is the total time taken Time time.Duration `json:"time"` diff --git a/runtime/convertTypes.go b/runtime/convertTypes.go index 0788155665..13b9810f2b 100644 --- a/runtime/convertTypes.go +++ b/runtime/convertTypes.go @@ -125,6 +125,8 @@ func ExportMeteredType( return cadence.TheWord64Type case sema.Word128Type: return cadence.TheWord128Type + case sema.Word256Type: + return cadence.TheWord256Type case sema.Fix64Type: return cadence.TheFix64Type case sema.UFix64Type: @@ -650,6 +652,8 @@ func ImportType(memoryGauge common.MemoryGauge, t cadence.Type) interpreter.Stat return interpreter.NewPrimitiveStaticType(memoryGauge, interpreter.PrimitiveStaticTypeWord64) case cadence.Word128Type: return interpreter.NewPrimitiveStaticType(memoryGauge, interpreter.PrimitiveStaticTypeWord128) + case cadence.Word256Type: + return interpreter.NewPrimitiveStaticType(memoryGauge, interpreter.PrimitiveStaticTypeWord256) case cadence.Fix64Type: return interpreter.NewPrimitiveStaticType(memoryGauge, interpreter.PrimitiveStaticTypeFix64) case cadence.UFix64Type: diff --git a/runtime/convertValues.go b/runtime/convertValues.go index 6d3fbd3ca4..82a2caa0c7 100644 --- a/runtime/convertValues.go +++ b/runtime/convertValues.go @@ -191,6 +191,13 @@ func exportValueWithInterpreter( return v.ToBigInt(inter) }, ) + case interpreter.Word256Value: + return cadence.NewMeteredWord256FromBig( + inter, + func() *big.Int { + return v.ToBigInt(inter) + }, + ) case interpreter.Fix64Value: return cadence.Fix64(v), nil case interpreter.UFix64Value: @@ -802,6 +809,8 @@ func (i valueImporter) importValue(value cadence.Value, expectedType sema.Type) return i.importWord64(v), nil case cadence.Word128: return i.importWord128(v), nil + case cadence.Word256: + return i.importWord256(v), nil case cadence.Fix64: return i.importFix64(v), nil case cadence.UFix64: @@ -1051,6 +1060,15 @@ func (i valueImporter) importWord128(v cadence.Word128) interpreter.Word128Value ) } +func (i valueImporter) importWord256(v cadence.Word256) interpreter.Word256Value { + return interpreter.NewWord256ValueFromBigInt( + i.inter, + func() *big.Int { + return v.Value + }, + ) +} + func (i valueImporter) importFix64(v cadence.Fix64) interpreter.Fix64Value { return interpreter.NewFix64Value( i.inter, diff --git a/runtime/convertValues_test.go b/runtime/convertValues_test.go index 74ca3facf8..9b584041ee 100644 --- a/runtime/convertValues_test.go +++ b/runtime/convertValues_test.go @@ -374,6 +374,11 @@ func TestExportValue(t *testing.T) { value: interpreter.NewUnmeteredWord128ValueFromUint64(42), expected: cadence.NewWord128(42), }, + { + label: "Word256", + value: interpreter.NewUnmeteredWord256ValueFromUint64(42), + expected: cadence.NewWord256(42), + }, { label: "Fix64", value: interpreter.NewUnmeteredFix64Value(-123000000), @@ -813,6 +818,11 @@ func TestImportValue(t *testing.T) { value: cadence.NewWord128(42), expected: interpreter.NewUnmeteredWord128ValueFromUint64(42), }, + { + label: "Word256", + value: cadence.NewWord256(42), + expected: interpreter.NewUnmeteredWord256ValueFromUint64(42), + }, { label: "Fix64", value: cadence.Fix64(-123000000), @@ -1088,6 +1098,11 @@ func TestImportRuntimeType(t *testing.T) { actual: cadence.Word128Type{}, expected: interpreter.PrimitiveStaticTypeWord128, }, + { + label: "Word256", + actual: cadence.Word256Type{}, + expected: interpreter.PrimitiveStaticTypeWord256, + }, { label: "Fix64", actual: cadence.Fix64Type{}, @@ -1498,8 +1513,11 @@ func TestExportStructValue(t *testing.T) { } actual := exportValueFromScript(t, script) - expected := cadence.NewStruct([]cadence.Value{cadence.NewInt(42)}). - WithType(fooStructType) + expected := cadence.ValueWithCachedTypeID( + cadence.NewStruct([]cadence.Value{ + cadence.NewInt(42), + }).WithType(fooStructType), + ) assert.Equal(t, expected, actual) } @@ -1523,11 +1541,12 @@ func TestExportResourceValue(t *testing.T) { ` actual := exportValueFromScript(t, script) - expected := + expected := cadence.ValueWithCachedTypeID( cadence.NewResource([]cadence.Value{ cadence.NewUInt64(0), cadence.NewInt(42), - }).WithType(newFooResourceType()) + }).WithType(newFooResourceType()), + ) assert.Equal(t, expected, actual) } @@ -1552,32 +1571,37 @@ func TestExportResourceArrayValue(t *testing.T) { fooResourceType := newFooResourceType() - actual := exportValueFromScript(t, script) - expected := cadence.NewArray([]cadence.Value{ - cadence.NewResource([]cadence.Value{ - cadence.NewUInt64(0), - cadence.NewInt(1), - }).WithType(fooResourceType), - cadence.NewResource([]cadence.Value{ - cadence.NewUInt64(0), - cadence.NewInt(2), - }).WithType(fooResourceType), - }).WithType(&cadence.VariableSizedArrayType{ - ElementType: &cadence.ResourceType{ - Location: common.ScriptLocation{}, - QualifiedIdentifier: "Foo", - Fields: []cadence.Field{ - { - Identifier: "uuid", - Type: cadence.UInt64Type{}, - }, - { - Identifier: "bar", - Type: cadence.IntType{}, + actual := cadence.ValueWithCachedTypeID( + exportValueFromScript(t, script), + ) + + expected := cadence.ValueWithCachedTypeID( + cadence.NewArray([]cadence.Value{ + cadence.NewResource([]cadence.Value{ + cadence.NewUInt64(0), + cadence.NewInt(1), + }).WithType(fooResourceType), + cadence.NewResource([]cadence.Value{ + cadence.NewUInt64(0), + cadence.NewInt(2), + }).WithType(fooResourceType), + }).WithType(&cadence.VariableSizedArrayType{ + ElementType: &cadence.ResourceType{ + Location: common.ScriptLocation{}, + QualifiedIdentifier: "Foo", + Fields: []cadence.Field{ + { + Identifier: "uuid", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "bar", + Type: cadence.IntType{}, + }, }, }, - }, - }) + }), + ) assert.Equal(t, expected, actual) } @@ -1605,39 +1629,44 @@ func TestExportResourceDictionaryValue(t *testing.T) { fooResourceType := newFooResourceType() - actual := exportValueFromScript(t, script) - expected := cadence.NewDictionary([]cadence.KeyValuePair{ - { - Key: cadence.String("b"), - Value: cadence.NewResource([]cadence.Value{ - cadence.NewUInt64(0), - cadence.NewInt(2), - }).WithType(fooResourceType), - }, - { - Key: cadence.String("a"), - Value: cadence.NewResource([]cadence.Value{ - cadence.NewUInt64(0), - cadence.NewInt(1), - }).WithType(fooResourceType), - }, - }).WithType(&cadence.DictionaryType{ - KeyType: cadence.StringType{}, - ElementType: &cadence.ResourceType{ - Location: common.ScriptLocation{}, - QualifiedIdentifier: "Foo", - Fields: []cadence.Field{ - { - Identifier: "uuid", - Type: cadence.UInt64Type{}, - }, - { - Identifier: "bar", - Type: cadence.IntType{}, + actual := cadence.ValueWithCachedTypeID( + exportValueFromScript(t, script), + ) + + expected := cadence.ValueWithCachedTypeID( + cadence.NewDictionary([]cadence.KeyValuePair{ + { + Key: cadence.String("b"), + Value: cadence.NewResource([]cadence.Value{ + cadence.NewUInt64(0), + cadence.NewInt(2), + }).WithType(fooResourceType), + }, + { + Key: cadence.String("a"), + Value: cadence.NewResource([]cadence.Value{ + cadence.NewUInt64(0), + cadence.NewInt(1), + }).WithType(fooResourceType), + }, + }).WithType(&cadence.DictionaryType{ + KeyType: cadence.StringType{}, + ElementType: &cadence.ResourceType{ + Location: common.ScriptLocation{}, + QualifiedIdentifier: "Foo", + Fields: []cadence.Field{ + { + Identifier: "uuid", + Type: cadence.UInt64Type{}, + }, + { + Identifier: "bar", + Type: cadence.IntType{}, + }, }, }, - }, - }) + }), + ) assert.Equal(t, expected, actual) } @@ -1702,14 +1731,18 @@ func TestExportNestedResourceValueFromScript(t *testing.T) { } ` - actual := exportValueFromScript(t, script) - expected := cadence.NewResource([]cadence.Value{ - cadence.NewUInt64(0), + actual := cadence.ValueWithCachedTypeID( + exportValueFromScript(t, script), + ) + expected := cadence.ValueWithCachedTypeID( cadence.NewResource([]cadence.Value{ cadence.NewUInt64(0), - cadence.NewInt(42), - }).WithType(barResourceType), - }).WithType(fooResourceType) + cadence.NewResource([]cadence.Value{ + cadence.NewUInt64(0), + cadence.NewInt(42), + }).WithType(barResourceType), + }).WithType(fooResourceType), + ) assert.Equal(t, expected, actual) } @@ -2478,15 +2511,20 @@ func TestExportCompositeValueWithFunctionValueField(t *testing.T) { }, } - actual := exportValueFromScript(t, script) - expected := cadence.NewStruct([]cadence.Value{ - cadence.NewInt(42), - cadence.Function{ - FunctionType: &cadence.FunctionType{ - ReturnType: cadence.VoidType{}, + actual := cadence.ValueWithCachedTypeID( + exportValueFromScript(t, script), + ) + + expected := cadence.ValueWithCachedTypeID( + cadence.NewStruct([]cadence.Value{ + cadence.NewInt(42), + cadence.Function{ + FunctionType: &cadence.FunctionType{ + ReturnType: cadence.VoidType{}, + }, }, - }, - }).WithType(fooStructType) + }).WithType(fooStructType), + ) assert.Equal(t, expected, actual) } @@ -2573,24 +2611,28 @@ func TestRuntimeEnumValue(t *testing.T) { t.Parallel() - enumValue := cadence.Enum{ - EnumType: &cadence.EnumType{ - Location: common.ScriptLocation{}, - QualifiedIdentifier: "Direction", - Fields: []cadence.Field{ - { - Identifier: sema.EnumRawValueFieldName, - Type: cadence.IntType{}, + newEnumValue := func() cadence.Enum { + return cadence.Enum{ + EnumType: &cadence.EnumType{ + Location: common.ScriptLocation{}, + QualifiedIdentifier: "Direction", + Fields: []cadence.Field{ + { + Identifier: sema.EnumRawValueFieldName, + Type: cadence.IntType{}, + }, }, + RawType: cadence.IntType{}, }, - RawType: cadence.IntType{}, - }, - Fields: []cadence.Value{ - cadence.NewInt(3), - }, + Fields: []cadence.Value{ + cadence.NewInt(3), + }, + } } t.Run("test export", func(t *testing.T) { + t.Parallel() + script := ` access(all) fun main(): Direction { return Direction.RIGHT @@ -2604,11 +2646,18 @@ func TestRuntimeEnumValue(t *testing.T) { } ` + expected := newEnumValue() actual := exportValueFromScript(t, script) - assert.Equal(t, enumValue, actual) + + assert.Equal(t, + cadence.ValueWithCachedTypeID(expected), + cadence.ValueWithCachedTypeID(actual), + ) }) t.Run("test import", func(t *testing.T) { + t.Parallel() + script := ` access(all) fun main(dir: Direction): Direction { if !dir.isInstance(Type()) { @@ -2626,9 +2675,11 @@ func TestRuntimeEnumValue(t *testing.T) { } ` - actual, err := executeTestScript(t, script, enumValue) + expected := newEnumValue() + actual, err := executeTestScript(t, script, expected) require.NoError(t, err) - assert.Equal(t, enumValue, actual) + + assert.Equal(t, expected, actual) }) } @@ -2833,6 +2884,11 @@ func TestRuntimeArgumentPassing(t *testing.T) { typeSignature: "Word128", exportedValue: cadence.NewWord128(42), }, + { + label: "Word256", + typeSignature: "Word256", + exportedValue: cadence.NewWord256(42), + }, { label: "Fix64", typeSignature: "Fix64", @@ -3192,107 +3248,119 @@ func TestRuntimeMalformedArgumentPassing(t *testing.T) { // Struct with wrong field type - malformedStructType1 := &cadence.StructType{ - Location: common.ScriptLocation{}, - QualifiedIdentifier: "Foo", - Fields: []cadence.Field{ - { - Identifier: "a", - Type: cadence.IntType{}, + newMalformedStructType1 := func() *cadence.StructType { + return &cadence.StructType{ + Location: common.ScriptLocation{}, + QualifiedIdentifier: "Foo", + Fields: []cadence.Field{ + { + Identifier: "a", + Type: cadence.IntType{}, + }, }, - }, + } } - malformedStruct1 := cadence.Struct{ - StructType: malformedStructType1, - Fields: []cadence.Value{ - cadence.NewInt(3), - }, + newMalformedStruct1 := func() cadence.Struct { + return cadence.Struct{ + StructType: newMalformedStructType1(), + Fields: []cadence.Value{ + cadence.NewInt(3), + }, + } } // Struct with wrong field name - malformedStruct2 := cadence.Struct{ - StructType: &cadence.StructType{ - Location: common.ScriptLocation{}, - QualifiedIdentifier: "Foo", - Fields: []cadence.Field{ - { - Identifier: "nonExisting", - Type: cadence.StringType{}, + newMalformedStruct2 := func() cadence.Struct { + return cadence.Struct{ + StructType: &cadence.StructType{ + Location: common.ScriptLocation{}, + QualifiedIdentifier: "Foo", + Fields: []cadence.Field{ + { + Identifier: "nonExisting", + Type: cadence.StringType{}, + }, }, }, - }, - Fields: []cadence.Value{ - cadence.String("John"), - }, + Fields: []cadence.Value{ + cadence.String("John"), + }, + } } // Struct with nested malformed array value - malformedStruct3 := cadence.Struct{ - StructType: &cadence.StructType{ - Location: common.ScriptLocation{}, - QualifiedIdentifier: "Bar", - Fields: []cadence.Field{ - { - Identifier: "a", - Type: &cadence.VariableSizedArrayType{ - ElementType: malformedStructType1, + newMalformedStruct3 := func() cadence.Struct { + return cadence.Struct{ + StructType: &cadence.StructType{ + Location: common.ScriptLocation{}, + QualifiedIdentifier: "Bar", + Fields: []cadence.Field{ + { + Identifier: "a", + Type: &cadence.VariableSizedArrayType{ + ElementType: newMalformedStructType1(), + }, }, }, }, - }, - Fields: []cadence.Value{ - cadence.NewArray([]cadence.Value{ - malformedStruct1, - }), - }, + Fields: []cadence.Value{ + cadence.NewArray([]cadence.Value{ + newMalformedStruct1(), + }), + }, + } } // Struct with nested malformed dictionary value - malformedStruct4 := cadence.Struct{ - StructType: &cadence.StructType{ - Location: common.ScriptLocation{}, - QualifiedIdentifier: "Baz", - Fields: []cadence.Field{ - { - Identifier: "a", - Type: &cadence.DictionaryType{ - KeyType: cadence.StringType{}, - ElementType: malformedStructType1, + newMalformedStruct4 := func() cadence.Struct { + return cadence.Struct{ + StructType: &cadence.StructType{ + Location: common.ScriptLocation{}, + QualifiedIdentifier: "Baz", + Fields: []cadence.Field{ + { + Identifier: "a", + Type: &cadence.DictionaryType{ + KeyType: cadence.StringType{}, + ElementType: newMalformedStructType1(), + }, }, }, }, - }, - Fields: []cadence.Value{ - cadence.NewDictionary([]cadence.KeyValuePair{ - { - Key: cadence.String("foo"), - Value: malformedStruct1, - }, - }), - }, + Fields: []cadence.Value{ + cadence.NewDictionary([]cadence.KeyValuePair{ + { + Key: cadence.String("foo"), + Value: newMalformedStruct1(), + }, + }), + }, + } } // Struct with nested array with mismatching element type - malformedStruct5 := cadence.Struct{ - StructType: &cadence.StructType{ - Location: common.ScriptLocation{}, - QualifiedIdentifier: "Bar", - Fields: []cadence.Field{ - { - Identifier: "a", - Type: &cadence.VariableSizedArrayType{ - ElementType: malformedStructType1, + newMalformedStruct5 := func() cadence.Struct { + return cadence.Struct{ + StructType: &cadence.StructType{ + Location: common.ScriptLocation{}, + QualifiedIdentifier: "Bar", + Fields: []cadence.Field{ + { + Identifier: "a", + Type: &cadence.VariableSizedArrayType{ + ElementType: newMalformedStructType1(), + }, }, }, }, - }, - Fields: []cadence.Value{ - cadence.NewArray([]cadence.Value{ - cadence.String("mismatching value"), - }), - }, + Fields: []cadence.Value{ + cadence.NewArray([]cadence.Value{ + cadence.String("mismatching value"), + }), + }, + } } type argumentPassingTest struct { @@ -3307,38 +3375,38 @@ func TestRuntimeMalformedArgumentPassing(t *testing.T) { { label: "Malformed Struct field type", typeSignature: "Foo", - exportedValue: malformedStruct1, + exportedValue: newMalformedStruct1(), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, }, { label: "Malformed Struct field name", typeSignature: "Foo", - exportedValue: malformedStruct2, + exportedValue: newMalformedStruct2(), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, }, { label: "Malformed AnyStruct", typeSignature: "AnyStruct", - exportedValue: malformedStruct1, + exportedValue: newMalformedStruct1(), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, }, { label: "Malformed nested struct array", typeSignature: "Bar", - exportedValue: malformedStruct3, + exportedValue: newMalformedStruct3(), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, }, { label: "Malformed nested struct dictionary", typeSignature: "Baz", - exportedValue: malformedStruct4, + exportedValue: newMalformedStruct4(), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, }, { label: "Variable-size array with malformed element", typeSignature: "[Foo]", exportedValue: cadence.NewArray([]cadence.Value{ - malformedStruct1, + newMalformedStruct1(), }), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, }, @@ -3346,7 +3414,7 @@ func TestRuntimeMalformedArgumentPassing(t *testing.T) { label: "Constant-size array with malformed element", typeSignature: "[Foo; 1]", exportedValue: cadence.NewArray([]cadence.Value{ - malformedStruct1, + newMalformedStruct1(), }), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, }, @@ -3381,13 +3449,13 @@ func TestRuntimeMalformedArgumentPassing(t *testing.T) { { label: "Inner array with mismatching element", typeSignature: "Bar", - exportedValue: malformedStruct5, + exportedValue: newMalformedStruct5(), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, }, { label: "Malformed Optional", typeSignature: "Foo?", - exportedValue: cadence.NewOptional(malformedStruct1), + exportedValue: cadence.NewOptional(newMalformedStruct1()), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, }, { @@ -3396,7 +3464,7 @@ func TestRuntimeMalformedArgumentPassing(t *testing.T) { exportedValue: cadence.NewDictionary([]cadence.KeyValuePair{ { Key: cadence.String("foo"), - Value: malformedStruct1, + Value: newMalformedStruct1(), }, }), expectedInvalidEntryPointArgumentErrType: &MalformedValueError{}, diff --git a/runtime/coverage.go b/runtime/coverage.go index 529e6cab5b..a39e560c23 100644 --- a/runtime/coverage.go +++ b/runtime/coverage.go @@ -19,6 +19,7 @@ package runtime import ( + "bytes" "encoding/json" "fmt" "sort" @@ -64,6 +65,7 @@ func (c *LocationCoverage) Percentage() string { // location. coveredLines = c.Statements } + percentage := 100 * float64(coveredLines) / float64(c.Statements) return fmt.Sprintf("%0.1f%%", percentage) } @@ -236,15 +238,22 @@ func (r *CoverageReport) IsLocationInspected(location Location) bool { func (r *CoverageReport) Percentage() string { totalStatements := r.Statements() totalCoveredLines := r.Hits() + var percentage float64 = 100 + if totalStatements != 0 { + percentage = 100 * float64(totalCoveredLines) / float64(totalStatements) + } return fmt.Sprintf( "%0.1f%%", - 100*float64(totalCoveredLines)/float64(totalStatements), + percentage, ) } // String returns a human-friendly message for the covered // statements percentage. func (r *CoverageReport) String() string { + if r.Statements() == 0 { + return "There are no statements to cover" + } return fmt.Sprintf("Coverage: %v of statements", r.Percentage()) } @@ -475,3 +484,58 @@ func (r *CoverageReport) UnmarshalJSON(data []byte) error { return nil } + +// MarshalLCOV serializes each common.Location/*LocationCoverage +// key/value pair on the *CoverageReport.Coverage map, to the +// LCOV format. Currently supports only line coverage, function +// and branch coverage are not yet available. +// Description for the LCOV file format, can be found here +// https://github.com/linux-test-project/lcov/blob/master/man/geninfo.1#L948. +func (r *CoverageReport) MarshalLCOV() ([]byte, error) { + i := 0 + locations := make([]common.Location, len(r.Coverage)) + for location := range r.Coverage { // nolint:maprange + locations[i] = location + i++ + } + sort.Slice(locations, func(i, j int) bool { + return locations[i].ID() < locations[j].ID() + }) + + buf := new(bytes.Buffer) + for _, location := range locations { + coverage := r.Coverage[location] + _, err := fmt.Fprintf(buf, "TN:\nSF:%s\n", location.ID()) + if err != nil { + return nil, err + } + + i := 0 + lines := make([]int, len(coverage.LineHits)) + for line := range coverage.LineHits { // nolint:maprange + lines[i] = line + i++ + } + sort.Ints(lines) + + for _, line := range lines { + hits := coverage.LineHits[line] + _, err = fmt.Fprintf(buf, "DA:%v,%v\n", line, hits) + if err != nil { + return nil, err + } + } + + _, err = fmt.Fprintf( + buf, + "LF:%v\nLH:%v\nend_of_record\n", + coverage.Statements, + coverage.CoveredLines(), + ) + if err != nil { + return nil, err + } + } + + return buf.Bytes(), nil +} diff --git a/runtime/coverage_test.go b/runtime/coverage_test.go index 7e3ff5d548..c28c55574d 100644 --- a/runtime/coverage_test.go +++ b/runtime/coverage_test.go @@ -1648,3 +1648,208 @@ func TestRuntimeCoverageWithLocationFilter(t *testing.T) { coverageReport.String(), ) } + +func TestRuntimeCoverageWithNoStatements(t *testing.T) { + + t.Parallel() + + importedScript := []byte(` + access(all) contract FooContract { + access(all) resource interface Receiver { + } + } + `) + + script := []byte(` + import "FooContract" + access(all) fun main(): Int { + Type<@{FooContract.Receiver}>().identifier + return 42 + } + `) + + coverageReport := NewCoverageReport() + + scriptlocation := common.ScriptLocation{0x1b, 0x2c} + + runtimeInterface := &testRuntimeInterface{ + getCode: func(location Location) (bytes []byte, err error) { + switch location { + case common.StringLocation("FooContract"): + return importedScript, nil + default: + return nil, fmt.Errorf("unknown import location: %s", location) + } + }, + } + runtime := NewInterpreterRuntime(Config{ + CoverageReport: coverageReport, + }) + coverageReport.ExcludeLocation(scriptlocation) + value, err := runtime.ExecuteScript( + Script{ + Source: script, + }, + Context{ + Interface: runtimeInterface, + Location: scriptlocation, + CoverageReport: coverageReport, + }, + ) + require.NoError(t, err) + + assert.Equal(t, cadence.NewInt(42), value) + + _, err = json.Marshal(coverageReport) + require.NoError(t, err) + + assert.Equal( + t, + "There are no statements to cover", + coverageReport.String(), + ) + + summary := coverageReport.Summary() + + actual, err := json.Marshal(summary) + require.NoError(t, err) + + expected := ` + { + "coverage": "100.0%", + "hits": 0, + "locations": 0, + "misses": 0, + "statements": 0 + } + ` + require.JSONEq(t, expected, string(actual)) +} + +func TestCoverageReportLCOVFormat(t *testing.T) { + + t.Parallel() + + integerTraits := []byte(` + access(all) let specialNumbers: {Int: String} = { + 1729: "Harshad", + 8128: "Harmonic", + 41041: "Carmichael" + } + + access(all) fun addSpecialNumber(_ n: Int, _ trait: String) { + specialNumbers[n] = trait + } + + access(all) fun getIntegerTrait(_ n: Int): String { + if n < 0 { + return "Negative" + } else if n == 0 { + return "Zero" + } else if n < 10 { + return "Small" + } else if n < 100 { + return "Big" + } else if n < 1000 { + return "Huge" + } + + if specialNumbers.containsKey(n) { + return specialNumbers[n]! + } + + return "Enormous" + } + `) + + script := []byte(` + import "IntegerTraits" + + access(all) fun main(): Int { + let testInputs: {Int: String} = { + -1: "Negative", + 0: "Zero", + 9: "Small", + 99: "Big", + 999: "Huge", + 1001: "Enormous", + 1729: "Harshad", + 8128: "Harmonic", + 41041: "Carmichael" + } + + for input in testInputs.keys { + let result = getIntegerTrait(input) + assert(result == testInputs[input]) + } + + addSpecialNumber(78557, "Sierpinski") + assert("Sierpinski" == getIntegerTrait(78557)) + + return 42 + } + `) + + coverageReport := NewCoverageReport() + scriptlocation := common.ScriptLocation{} + coverageReport.ExcludeLocation(scriptlocation) + + runtimeInterface := &testRuntimeInterface{ + getCode: func(location Location) (bytes []byte, err error) { + switch location { + case common.StringLocation("IntegerTraits"): + return integerTraits, nil + default: + return nil, fmt.Errorf("unknown import location: %s", location) + } + }, + } + + runtime := newTestInterpreterRuntime() + runtime.defaultConfig.CoverageReport = coverageReport + + value, err := runtime.ExecuteScript( + Script{ + Source: script, + }, + Context{ + Interface: runtimeInterface, + Location: scriptlocation, + CoverageReport: coverageReport, + }, + ) + require.NoError(t, err) + + assert.Equal(t, cadence.NewInt(42), value) + + actual, err := coverageReport.MarshalLCOV() + require.NoError(t, err) + + expected := `TN: +SF:S.IntegerTraits +DA:9,1 +DA:13,10 +DA:14,1 +DA:15,9 +DA:16,1 +DA:17,8 +DA:18,1 +DA:19,7 +DA:20,1 +DA:21,6 +DA:22,1 +DA:25,5 +DA:26,4 +DA:29,1 +LF:14 +LH:14 +end_of_record +` + require.Equal(t, expected, string(actual)) + + assert.Equal( + t, + "Coverage: 100.0% of statements", + coverageReport.String(), + ) +} diff --git a/runtime/deployment_test.go b/runtime/deployment_test.go index 68c306818e..8e9202905a 100644 --- a/runtime/deployment_test.go +++ b/runtime/deployment_test.go @@ -74,16 +74,25 @@ func TestRuntimeTransactionWithContractDeployment(t *testing.T) { inter := newTestInterpreter(t) + require.Equal(t, + ImportType(inter, codeHashValue.Type()), + interpreter.ConvertSemaToStaticType(inter, stdlib.AccountEventCodeHashParameter.TypeAnnotation.Type), + ) + codeHash, err := ImportValue( inter, interpreter.EmptyLocationRange, nil, codeHashValue, - sema.ByteArrayType, + stdlib.HashType, ) require.NoError(t, err) - actualCodeHash, err := interpreter.ByteArrayValueToByteSlice(inter, codeHash, interpreter.EmptyLocationRange) + actualCodeHash, err := interpreter.ByteArrayValueToByteSlice( + inter, + codeHash, + interpreter.EmptyLocationRange, + ) require.NoError(t, err) require.Equal(t, expectedCodeHash[:], actualCodeHash) diff --git a/runtime/environment.go b/runtime/environment.go index 619475b638..784b8c0f96 100644 --- a/runtime/environment.go +++ b/runtime/environment.go @@ -136,6 +136,7 @@ func (e *interpreterEnvironment) newInterpreterConfig() *interpreter.Config { AuthAccountHandler: e.newAuthAccountHandler(), OnRecordTrace: e.newOnRecordTraceHandler(), OnResourceOwnerChange: e.newResourceOwnerChangedHandler(), + CompositeTypeHandler: e.newCompositeTypeHandler(), TracingEnabled: e.config.TracingEnabled, AtreeValueValidationEnabled: e.config.AtreeValidationEnabled, // NOTE: ignore e.config.AtreeValidationEnabled here, @@ -863,6 +864,16 @@ func (e *interpreterEnvironment) newImportLocationHandler() interpreter.ImportLo } } +func (e *interpreterEnvironment) newCompositeTypeHandler() interpreter.CompositeTypeHandlerFunc { + return func(location common.Location, typeID common.TypeID) *sema.CompositeType { + if _, ok := location.(stdlib.FlowLocation); ok { + return stdlib.FlowEventTypes[typeID] + } + + return nil + } +} + func (e *interpreterEnvironment) loadContract( inter *interpreter.Interpreter, compositeType *sema.CompositeType, diff --git a/runtime/imported_values_memory_metering_test.go b/runtime/imported_values_memory_metering_test.go index 63eaaa9911..998179b922 100644 --- a/runtime/imported_values_memory_metering_test.go +++ b/runtime/imported_values_memory_metering_test.go @@ -334,6 +334,18 @@ func TestImportedValueMemoryMetering(t *testing.T) { assert.Equal(t, uint64(16), meter[common.MemoryKindBigInt]) }) + t.Run("Word256", func(t *testing.T) { + t.Parallel() + + script := []byte(` + access(all) fun main(x: Word256) {} + `) + + meter := make(map[common.MemoryKind]uint64) + executeScript(t, script, meter, cadence.NewWord256(2)) + assert.Equal(t, uint64(32), meter[common.MemoryKindBigInt]) + }) + t.Run("Fix64", func(t *testing.T) { t.Parallel() diff --git a/runtime/interpreter/config.go b/runtime/interpreter/config.go index 88092a729f..cc5262da5d 100644 --- a/runtime/interpreter/config.go +++ b/runtime/interpreter/config.go @@ -48,9 +48,11 @@ type Config struct { // AuthAccountHandler is used to handle accounts AuthAccountHandler AuthAccountHandlerFunc // UUIDHandler is used to handle the generation of UUIDs - UUIDHandler UUIDHandlerFunc - BaseActivation *VariableActivation - Debugger *Debugger + UUIDHandler UUIDHandlerFunc + // CompositeTypeHandler is used to load composite types + CompositeTypeHandler CompositeTypeHandlerFunc + BaseActivation *VariableActivation + Debugger *Debugger // OnStatement is triggered when a statement is about to be executed OnStatement OnStatementFunc // OnLoopIteration is triggered when a loop iteration is about to be executed diff --git a/runtime/interpreter/conversion.go b/runtime/interpreter/conversion.go index 525054a31e..448462acca 100644 --- a/runtime/interpreter/conversion.go +++ b/runtime/interpreter/conversion.go @@ -25,7 +25,7 @@ import ( "github.com/onflow/cadence/runtime/errors" ) -func ByteArrayValueToByteSlice(memoryGauge common.MemoryGauge, value Value, locationRange LocationRange) ([]byte, error) { +func ByteArrayValueToByteSlice(interpreter *Interpreter, value Value, locationRange LocationRange) ([]byte, error) { array, ok := value.(*ArrayValue) if !ok { return nil, errors.NewDefaultUserError("value is not an array") @@ -38,9 +38,9 @@ func ByteArrayValueToByteSlice(memoryGauge common.MemoryGauge, value Value, loca result = make([]byte, 0, count) var err error - array.Iterate(memoryGauge, func(element Value) (resume bool) { + array.Iterate(interpreter, func(element Value) (resume bool) { var b byte - b, err = ByteValueToByte(memoryGauge, element, locationRange) + b, err = ByteValueToByte(interpreter, element, locationRange) if err != nil { return false } @@ -112,3 +112,32 @@ func ByteSliceToByteArrayValue(interpreter *Interpreter, buf []byte) *ArrayValue values..., ) } + +func ByteSliceToConstantSizedByteArrayValue(interpreter *Interpreter, buf []byte) *ArrayValue { + + common.UseMemory(interpreter, common.NewBytesMemoryUsage(len(buf))) + + var values []Value + + count := len(buf) + if count > 0 { + values = make([]Value, count) + for i, b := range buf { + values[i] = UInt8Value(b) + } + } + + constantSizedByteArrayStaticType := NewConstantSizedStaticType( + interpreter, + PrimitiveStaticTypeUInt8, + int64(len(buf)), + ) + + return NewArrayValue( + interpreter, + EmptyLocationRange, + constantSizedByteArrayStaticType, + common.ZeroAddress, + values..., + ) +} diff --git a/runtime/interpreter/conversion_test.go b/runtime/interpreter/conversion_test.go index e98d9623cf..670e39852e 100644 --- a/runtime/interpreter/conversion_test.go +++ b/runtime/interpreter/conversion_test.go @@ -153,3 +153,56 @@ func TestByteValueToByte(t *testing.T) { } }) } + +func TestByteSliceToArrayValue(t *testing.T) { + t.Parallel() + + t.Run("variable sized", func(t *testing.T) { + b := []byte{0, 1, 2} + + inter := newTestInterpreter(t) + + expectedType := VariableSizedStaticType{ + Type: PrimitiveStaticTypeUInt8, + } + + expected := NewArrayValue( + inter, + EmptyLocationRange, + expectedType, + common.ZeroAddress, + NewUnmeteredUInt8Value(0), + NewUnmeteredUInt8Value(1), + NewUnmeteredUInt8Value(2), + ) + + result := ByteSliceToByteArrayValue(inter, b) + require.Equal(t, expectedType, result.Type) + require.True(t, result.Equal(inter, EmptyLocationRange, expected)) + }) + + t.Run("const sized", func(t *testing.T) { + b := []byte{0, 1, 2} + + inter := newTestInterpreter(t) + + expectedType := ConstantSizedStaticType{ + Size: int64(len(b)), + Type: PrimitiveStaticTypeUInt8, + } + + expected := NewArrayValue( + inter, + EmptyLocationRange, + expectedType, + common.ZeroAddress, + NewUnmeteredUInt8Value(0), + NewUnmeteredUInt8Value(1), + NewUnmeteredUInt8Value(2), + ) + + result := ByteSliceToConstantSizedByteArrayValue(inter, b) + require.Equal(t, expectedType, result.Type) + require.True(t, result.Equal(inter, EmptyLocationRange, expected)) + }) +} diff --git a/runtime/interpreter/decode.go b/runtime/interpreter/decode.go index 29fb25c8c0..4c132de63a 100644 --- a/runtime/interpreter/decode.go +++ b/runtime/interpreter/decode.go @@ -291,6 +291,9 @@ func (d StorableDecoder) decodeStorable() (atree.Storable, error) { case CBORTagWord128Value: storable, err = d.decodeWord128() + case CBORTagWord256Value: + storable, err = d.decodeWord256() + // Fix* case CBORTagFix64Value: @@ -759,6 +762,28 @@ func (d StorableDecoder) decodeWord128() (Word128Value, error) { return NewUnmeteredWord128ValueFromBigInt(bigInt), nil } +func (d StorableDecoder) decodeWord256() (Word256Value, error) { + bigInt, err := d.decodeBigInt() + if err != nil { + if e, ok := err.(*cbor.WrongTypeError); ok { + return Word256Value{}, errors.NewUnexpectedError("invalid Word256 encoding: %s", e.ActualType.String()) + } + return Word256Value{}, err + } + + if bigInt.Sign() < 0 { + return Word256Value{}, errors.NewUnexpectedError("invalid Word256: got %s, expected positive", bigInt) + } + + max := sema.Word256TypeMaxIntBig + if bigInt.Cmp(max) > 0 { + return Word256Value{}, errors.NewUnexpectedError("invalid Word256: got %s, expected max %s", bigInt, max) + } + + // NOTE: already metered by `decodeBigInt` + return NewUnmeteredWord256ValueFromBigInt(bigInt), nil +} + func (d StorableDecoder) decodeFix64() (Fix64Value, error) { value, err := decodeInt64(d) if err != nil { diff --git a/runtime/interpreter/encode.go b/runtime/interpreter/encode.go index 87b313db0a..7b8d71c359 100644 --- a/runtime/interpreter/encode.go +++ b/runtime/interpreter/encode.go @@ -159,7 +159,7 @@ const ( CBORTagWord32Value CBORTagWord64Value CBORTagWord128Value - _ // future: Word256 + CBORTagWord256Value _ // Fix* @@ -633,6 +633,23 @@ func (v Word128Value) Encode(e *atree.Encoder) error { return e.CBOR.EncodeBigInt(v.BigInt) } +// Encode encodes Word256Value as +// +// cbor.Tag{ +// Number: CBORTagWord256Value, +// Content: *big.Int(v.BigInt), +// } +func (v Word256Value) Encode(e *atree.Encoder) error { + err := e.CBOR.EncodeRawBytes([]byte{ + // tag number + 0xd8, CBORTagWord256Value, + }) + if err != nil { + return err + } + return e.CBOR.EncodeBigInt(v.BigInt) +} + // Encode encodes Fix64Value as // // cbor.Tag{ diff --git a/runtime/interpreter/encoding_test.go b/runtime/interpreter/encoding_test.go index 0f745c724b..79e30e9f43 100644 --- a/runtime/interpreter/encoding_test.go +++ b/runtime/interpreter/encoding_test.go @@ -2615,6 +2615,134 @@ func TestEncodeDecodeWord128Value(t *testing.T) { }) } +func TestEncodeDecodeWord256Value(t *testing.T) { + + t.Parallel() + + t.Run("zero", func(t *testing.T) { + t.Parallel() + + testEncodeDecode(t, + encodeDecodeTest{ + value: NewUnmeteredWord256ValueFromUint64(0), + encoded: []byte{ + 0xd8, CBORTagWord256Value, + // positive bignum + 0xc2, + // byte string, length 0 + 0x40, + }, + }, + ) + }) + + t.Run("positive", func(t *testing.T) { + t.Parallel() + + testEncodeDecode(t, + encodeDecodeTest{ + value: NewUnmeteredWord256ValueFromUint64(42), + encoded: []byte{ + 0xd8, CBORTagWord256Value, + // positive bignum + 0xc2, + // byte string, length 1 + 0x41, + 0x2a, + }, + }, + ) + }) + + t.Run("max", func(t *testing.T) { + t.Parallel() + + testEncodeDecode(t, + encodeDecodeTest{ + value: NewUnmeteredWord256ValueFromBigInt(sema.Word256TypeMaxIntBig), + encoded: []byte{ + 0xd8, CBORTagWord256Value, + // positive bignum + 0xc2, + // byte string, length 32 + 0x58, 0x20, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + }, + }, + ) + }) + + t.Run("negative", func(t *testing.T) { + t.Parallel() + + testEncodeDecode(t, + encodeDecodeTest{ + encoded: []byte{ + 0xd8, CBORTagWord256Value, + // negative bignum + 0xc3, + // byte string, length 1 + 0x41, + 0x2a, + }, + invalid: true, + }, + ) + }) + + t.Run(">max", func(t *testing.T) { + t.Parallel() + + testEncodeDecode(t, + encodeDecodeTest{ + encoded: []byte{ + 0xd8, CBORTagWord256Value, + // positive bignum + 0xc2, + // byte string, length 65 + 0x58, 0x41, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, + 0xff, + }, + invalid: true, + }, + ) + }) + + t.Run("RFC", func(t *testing.T) { + + t.Parallel() + + rfcValue, ok := new(big.Int).SetString("18446744073709551616", 10) + require.True(t, ok) + + testEncodeDecode(t, + encodeDecodeTest{ + value: NewUnmeteredWord256ValueFromBigInt(rfcValue), + encoded: []byte{ + // tag + 0xd8, CBORTagWord256Value, + // positive bignum + 0xc2, + // byte string, length 9 + 0x49, + 0x01, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + }, + }, + ) + }) +} + func TestEncodeDecodeSomeValue(t *testing.T) { t.Parallel() diff --git a/runtime/interpreter/errors.go b/runtime/interpreter/errors.go index ef6c86aed8..422a8b93cf 100644 --- a/runtime/interpreter/errors.go +++ b/runtime/interpreter/errors.go @@ -868,6 +868,19 @@ func (StorageMutatedDuringIterationError) Error() string { return "storage iteration continued after modifying storage" } +// ContainerMutatedDuringIterationError +type ContainerMutatedDuringIterationError struct { + LocationRange +} + +var _ errors.UserError = ContainerMutatedDuringIterationError{} + +func (ContainerMutatedDuringIterationError) IsUserError() {} + +func (ContainerMutatedDuringIterationError) Error() string { + return "resource container modified during iteration" +} + // InvalidHexByteError type InvalidHexByteError struct { LocationRange diff --git a/runtime/interpreter/hashablevalue.go b/runtime/interpreter/hashablevalue.go index 76a2de96fc..05983e2b8f 100644 --- a/runtime/interpreter/hashablevalue.go +++ b/runtime/interpreter/hashablevalue.go @@ -91,7 +91,7 @@ const ( HashInputTypeWord32 HashInputTypeWord64 HashInputTypeWord128 - _ // future: Word256 + HashInputTypeWord256 _ // Fix* diff --git a/runtime/interpreter/interpreter.go b/runtime/interpreter/interpreter.go index a7e1f870d8..c9da8ffb25 100644 --- a/runtime/interpreter/interpreter.go +++ b/runtime/interpreter/interpreter.go @@ -179,6 +179,9 @@ type PublicAccountHandlerFunc func( // UUIDHandlerFunc is a function that handles the generation of UUIDs. type UUIDHandlerFunc func() (uint64, error) +// CompositeTypeHandlerFunc is a function that loads composite types. +type CompositeTypeHandlerFunc func(location common.Location, typeID common.TypeID) *sema.CompositeType + // CompositeTypeCode contains the "prepared" / "callable" "code" // for the functions and the destructor of a composite // (contract, struct, resource, event). @@ -256,13 +259,13 @@ var _ ast.ExpressionVisitor[Value] = &Interpreter{} // BaseActivation is the activation which contains all base declarations. // It is reused across all interpreters. -var BaseActivation = func() *VariableActivation { - // No need to meter since this is only created once - activation := activations.NewActivation[*Variable](nil, nil) +var BaseActivation *VariableActivation - defineBaseFunctions(activation) - return activation -}() +func init() { + // No need to meter since this is only created once + BaseActivation = activations.NewActivation[*Variable](nil, nil) + defineBaseFunctions(BaseActivation) +} func NewInterpreter( program *Program, @@ -1639,7 +1642,7 @@ func (interpreter *Interpreter) functionWrappers( name := functionDeclaration.Identifier.Identifier functionWrapper := interpreter.functionConditionsWrapper( functionDeclaration, - functionType.ReturnTypeAnnotation.Type, + functionType, lexicalScope, ) if functionWrapper == nil { @@ -1973,6 +1976,11 @@ func (interpreter *Interpreter) convert(value Value, valueType, targetType sema. return ConvertWord128(interpreter, value, locationRange) } + case sema.Word256Type: + if !valueType.Equal(unwrappedTargetType) { + return ConvertWord256(interpreter, value, locationRange) + } + // Fix* case sema.Fix64Type: @@ -2229,7 +2237,11 @@ func (interpreter *Interpreter) declareInterface( interfaceType := interpreter.Program.Elaboration.InterfaceDeclarationType(declaration) typeID := interfaceType.ID() - initializerFunctionWrapper := interpreter.initializerFunctionWrapper(declaration.Members, lexicalScope) + initializerFunctionWrapper := interpreter.initializerFunctionWrapper( + declaration.Members, + interfaceType.InitializerParameters, + lexicalScope, + ) destructorFunctionWrapper := interpreter.destructorFunctionWrapper(declaration.Members, lexicalScope) functionWrappers := interpreter.functionWrappers(declaration.Members, lexicalScope) defaultFunctions := interpreter.defaultFunctions(declaration.Members, lexicalScope) @@ -2265,7 +2277,11 @@ func (interpreter *Interpreter) declareTypeRequirement( compositeType := interpreter.Program.Elaboration.CompositeDeclarationType(declaration) typeID := compositeType.ID() - initializerFunctionWrapper := interpreter.initializerFunctionWrapper(declaration.Members, lexicalScope) + initializerFunctionWrapper := interpreter.initializerFunctionWrapper( + declaration.Members, + compositeType.ConstructorParameters, + lexicalScope, + ) destructorFunctionWrapper := interpreter.destructorFunctionWrapper(declaration.Members, lexicalScope) functionWrappers := interpreter.functionWrappers(declaration.Members, lexicalScope) defaultFunctions := interpreter.defaultFunctions(declaration.Members, lexicalScope) @@ -2280,6 +2296,7 @@ func (interpreter *Interpreter) declareTypeRequirement( func (interpreter *Interpreter) initializerFunctionWrapper( members *ast.Members, + parameters []sema.Parameter, lexicalScope *VariableActivation, ) FunctionWrapper { @@ -2297,11 +2314,18 @@ func (interpreter *Interpreter) initializerFunctionWrapper( return interpreter.functionConditionsWrapper( firstInitializer.FunctionDeclaration, - sema.VoidType, + &sema.FunctionType{ + Parameters: parameters, + ReturnTypeAnnotation: sema.VoidTypeAnnotation, + }, lexicalScope, ) } +var voidFunctionType = &sema.FunctionType{ + ReturnTypeAnnotation: sema.VoidTypeAnnotation, +} + func (interpreter *Interpreter) destructorFunctionWrapper( members *ast.Members, lexicalScope *VariableActivation, @@ -2314,14 +2338,14 @@ func (interpreter *Interpreter) destructorFunctionWrapper( return interpreter.functionConditionsWrapper( destructor.FunctionDeclaration, - sema.VoidType, + voidFunctionType, lexicalScope, ) } func (interpreter *Interpreter) functionConditionsWrapper( declaration *ast.FunctionDeclaration, - returnType sema.Type, + functionType *sema.FunctionType, lexicalScope *VariableActivation, ) FunctionWrapper { @@ -2347,15 +2371,10 @@ func (interpreter *Interpreter) functionConditionsWrapper( } return func(inner FunctionValue) FunctionValue { - // Construct a raw HostFunctionValue without a type, - // instead of using NewHostFunctionValue, which requires a type. - // - // This host function value is an internally created and used function, - // and can never be passed around as a value. - // Hence, the type is not required. - - return &HostFunctionValue{ - Function: func(invocation Invocation) Value { + return NewHostFunctionValue( + interpreter, + functionType, + func(invocation Invocation) Value { // Start a new activation record. // Lexical scope: use the function declaration's activation record, // not the current one (which would be dynamic scope) @@ -2449,10 +2468,10 @@ func (interpreter *Interpreter) functionConditionsWrapper( preConditions, body, rewrittenPostConditions, - returnType, + functionType.ReturnTypeAnnotation.Type, ) }, - } + ) } } @@ -2747,6 +2766,12 @@ var fromStringFunctionValues = func() map[string]fromStringFunctionValue { } return })), + newFromStringFunction(sema.Word256Type, bigIntValueParser(func(b *big.Int) (v Value, ok bool) { + if ok = inRange(b, sema.Word256TypeMinIntBig, sema.Word256TypeMaxIntBig); ok { + v = NewUnmeteredWord256ValueFromBigInt(b) + } + return + })), // fixed-points newFromStringFunction(sema.Fix64Type, func(inter *Interpreter, input string) OptionalValue { @@ -2970,6 +2995,11 @@ var fromBigEndianBytesFunctionValues = func() map[string]fromBigEndianBytesFunct return BigEndianBytesToUnsignedBigInt(b) }) }), + newFromBigEndianBytesFunction(sema.Word256Type, 32, func(i *Interpreter, b []byte) Value { + return NewWord256ValueFromBigInt(i, func() *big.Int { + return BigEndianBytesToUnsignedBigInt(b) + }) + }), // fixed-points newFromBigEndianBytesFunction(sema.Fix64Type, 8, func(i *Interpreter, b []byte) Value { @@ -3179,6 +3209,15 @@ var ConverterDeclarations = []ValueConverterDeclaration{ min: NewUnmeteredWord128ValueFromUint64(0), max: NewUnmeteredWord128ValueFromBigInt(sema.Word128TypeMaxIntBig), }, + { + name: sema.Word256TypeName, + functionType: sema.NumberConversionFunctionType(sema.Word256Type), + convert: func(interpreter *Interpreter, value Value, locationRange LocationRange) Value { + return ConvertWord256(interpreter, value, locationRange) + }, + min: NewUnmeteredWord256ValueFromUint64(0), + max: NewUnmeteredWord256ValueFromBigInt(sema.Word256TypeMaxIntBig), + }, { name: sema.Fix64TypeName, functionType: sema.NumberConversionFunctionType(sema.Fix64Type), @@ -4317,6 +4356,50 @@ func (interpreter *Interpreter) authAccountBorrowFunction(addressValue AddressVa ) } +func (interpreter *Interpreter) authAccountCheckFunction(addressValue AddressValue) *HostFunctionValue { + + // Converted addresses can be cached and don't have to be recomputed on each function invocation + address := addressValue.ToAddress() + + return NewHostFunctionValue( + interpreter, + sema.AuthAccountTypeCheckFunctionType, + func(invocation Invocation) Value { + interpreter := invocation.Interpreter + + path, ok := invocation.Arguments[0].(PathValue) + if !ok { + panic(errors.NewUnreachableError()) + } + + domain := path.Domain.Identifier() + identifier := path.Identifier + + storageMapKey := StringStorageMapKey(identifier) + + value := interpreter.ReadStored(address, domain, storageMapKey) + + if value == nil { + return FalseValue + } + + // If there is value stored for the given path, + // check that it satisfies the type given as the type argument. + + typeParameterPair := invocation.TypeParameterTypes.Oldest() + if typeParameterPair == nil { + panic(errors.NewUnreachableError()) + } + + ty := typeParameterPair.Value + + valueStaticType := value.StaticType(interpreter) + + return AsBoolValue(interpreter.IsSubTypeOfSemaType(valueStaticType, ty)) + }, + ) +} + func (interpreter *Interpreter) authAccountLinkFunction(addressValue AddressValue) *HostFunctionValue { // Converted addresses can be cached and don't have to be recomputed on each function invocation @@ -4977,40 +5060,40 @@ func (interpreter *Interpreter) GetCompositeType( qualifiedIdentifier string, typeID common.TypeID, ) (*sema.CompositeType, error) { + var compositeType *sema.CompositeType if location == nil { - return interpreter.getNativeCompositeType(qualifiedIdentifier) - } - - return interpreter.getUserCompositeType(location, typeID) -} - -func (interpreter *Interpreter) getUserCompositeType(location common.Location, typeID common.TypeID) (*sema.CompositeType, error) { - elaboration := interpreter.getElaboration(location) - if elaboration == nil { - return nil, TypeLoadingError{ - TypeID: typeID, + compositeType = sema.NativeCompositeTypes[qualifiedIdentifier] + if compositeType != nil { + return compositeType, nil + } + } else { + compositeType = interpreter.getUserCompositeType(location, typeID) + if compositeType != nil { + return compositeType, nil } } - ty := elaboration.CompositeType(typeID) - if ty == nil { - return nil, TypeLoadingError{ - TypeID: typeID, + config := interpreter.SharedState.Config + compositeTypeHandler := config.CompositeTypeHandler + if compositeTypeHandler != nil { + compositeType = compositeTypeHandler(location, typeID) + if compositeType != nil { + return compositeType, nil } } - return ty, nil + return nil, TypeLoadingError{ + TypeID: typeID, + } } -func (interpreter *Interpreter) getNativeCompositeType(qualifiedIdentifier string) (*sema.CompositeType, error) { - ty := sema.NativeCompositeTypes[qualifiedIdentifier] - if ty == nil { - return nil, TypeLoadingError{ - TypeID: common.TypeID(qualifiedIdentifier), - } +func (interpreter *Interpreter) getUserCompositeType(location common.Location, typeID common.TypeID) *sema.CompositeType { + elaboration := interpreter.getElaboration(location) + if elaboration == nil { + return nil } - return ty, nil + return elaboration.CompositeType(typeID) } func (interpreter *Interpreter) getInterfaceType(location common.Location, qualifiedIdentifier string) (*sema.InterfaceType, error) { @@ -5720,3 +5803,43 @@ func (interpreter *Interpreter) idCapabilityCheckFunction( }, ) } + +func (interpreter *Interpreter) validateMutation(storageID atree.StorageID, locationRange LocationRange) { + _, present := interpreter.SharedState.containerValueIteration[storageID] + if !present { + return + } + panic(ContainerMutatedDuringIterationError{ + LocationRange: locationRange, + }) +} + +func (interpreter *Interpreter) withMutationPrevention(storageID atree.StorageID, f func()) { + oldIteration, present := interpreter.SharedState.containerValueIteration[storageID] + interpreter.SharedState.containerValueIteration[storageID] = struct{}{} + + f() + + if !present { + delete(interpreter.SharedState.containerValueIteration, storageID) + } else { + interpreter.SharedState.containerValueIteration[storageID] = oldIteration + } +} + +func (interpreter *Interpreter) withResourceDestruction( + storageID atree.StorageID, + locationRange LocationRange, + f func(), +) { + _, exists := interpreter.SharedState.destroyedResources[storageID] + if exists { + panic(DestroyedResourceError{ + LocationRange: locationRange, + }) + } + + interpreter.SharedState.destroyedResources[storageID] = struct{}{} + + f() +} diff --git a/runtime/interpreter/interpreter_expression.go b/runtime/interpreter/interpreter_expression.go index 192b24dd85..2abd8fe8a4 100644 --- a/runtime/interpreter/interpreter_expression.go +++ b/runtime/interpreter/interpreter_expression.go @@ -685,6 +685,9 @@ func (interpreter *Interpreter) NewIntegerValueFromBigInt(value *big.Int, intege case sema.Word128Type: // BigInt value is already metered at parser. return NewUnmeteredWord128ValueFromBigInt(value) + case sema.Word256Type: + // BigInt value is already metered at parser. + return NewUnmeteredWord256ValueFromBigInt(value) default: panic(errors.NewUnreachableError()) @@ -930,6 +933,15 @@ func (interpreter *Interpreter) visitInvocationExpressionWithImplicitArgument(in panic(errors.NewUnreachableError()) } + // Bound functions + if boundFunction, ok := function.(BoundFunctionValue); ok && boundFunction.Self != nil { + self := *boundFunction.Self + if resource, ok := self.(ReferenceTrackedResourceKindedValue); ok { + storageID := resource.StorageID() + interpreter.trackReferencedResourceKindedValue(storageID, resource) + } + } + // NOTE: evaluate all argument expressions in call-site scope, not in function body var argumentExpressions []ast.Expression diff --git a/runtime/interpreter/primitivestatictype.go b/runtime/interpreter/primitivestatictype.go index 28bcc25177..0cc1584f10 100644 --- a/runtime/interpreter/primitivestatictype.go +++ b/runtime/interpreter/primitivestatictype.go @@ -152,7 +152,7 @@ const ( PrimitiveStaticTypeWord32 PrimitiveStaticTypeWord64 PrimitiveStaticTypeWord128 - _ // future: Word256 + PrimitiveStaticTypeWord256 _ // Fix* @@ -248,6 +248,7 @@ func (t PrimitiveStaticType) elementSize() uint { PrimitiveStaticTypeInt128, PrimitiveStaticTypeInt256, PrimitiveStaticTypeWord128, + PrimitiveStaticTypeWord256, PrimitiveStaticTypeInteger, PrimitiveStaticTypeSignedInteger, PrimitiveStaticTypeNumber, @@ -397,6 +398,8 @@ func (i PrimitiveStaticType) SemaType() sema.Type { return sema.Word64Type case PrimitiveStaticTypeWord128: return sema.Word128Type + case PrimitiveStaticTypeWord256: + return sema.Word256Type // Fix* case PrimitiveStaticTypeFix64: @@ -529,6 +532,8 @@ func ConvertSemaToPrimitiveStaticType( typ = PrimitiveStaticTypeWord64 case sema.Word128Type: typ = PrimitiveStaticTypeWord128 + case sema.Word256Type: + typ = PrimitiveStaticTypeWord256 // Fix* case sema.Fix64Type: diff --git a/runtime/interpreter/primitivestatictype_string.go b/runtime/interpreter/primitivestatictype_string.go index 1a927bcd0b..639e2cff9d 100644 --- a/runtime/interpreter/primitivestatictype_string.go +++ b/runtime/interpreter/primitivestatictype_string.go @@ -45,6 +45,7 @@ func _() { _ = x[PrimitiveStaticTypeWord32-55] _ = x[PrimitiveStaticTypeWord64-56] _ = x[PrimitiveStaticTypeWord128-57] + _ = x[PrimitiveStaticTypeWord256-58] _ = x[PrimitiveStaticTypeFix64-64] _ = x[PrimitiveStaticTypeUFix64-72] _ = x[PrimitiveStaticTypePath-76] @@ -71,7 +72,7 @@ func _() { _ = x[PrimitiveStaticType_Count-105] } -const _PrimitiveStaticType_name = "UnknownVoidAnyNeverAnyStructAnyResourceBoolAddressStringCharacterMetaTypeBlockNumberSignedNumberIntegerSignedIntegerFixedPointSignedFixedPointIntInt8Int16Int32Int64Int128Int256UIntUInt8UInt16UInt32UInt64UInt128UInt256Word8Word16Word32Word64Word128Fix64UFix64PathCapabilityStoragePathCapabilityPathPublicPathPrivatePathAuthAccountPublicAccountDeployedContractAuthAccountContractsPublicAccountContractsAuthAccountKeysPublicAccountKeysAccountKeyAuthAccountInboxStorageCapabilityControllerAccountCapabilityControllerAuthAccountStorageCapabilitiesAuthAccountAccountCapabilitiesAuthAccountCapabilitiesPublicAccountCapabilities_Count" +const _PrimitiveStaticType_name = "UnknownVoidAnyNeverAnyStructAnyResourceBoolAddressStringCharacterMetaTypeBlockNumberSignedNumberIntegerSignedIntegerFixedPointSignedFixedPointIntInt8Int16Int32Int64Int128Int256UIntUInt8UInt16UInt32UInt64UInt128UInt256Word8Word16Word32Word64Word128Word256Fix64UFix64PathCapabilityStoragePathCapabilityPathPublicPathPrivatePathAuthAccountPublicAccountDeployedContractAuthAccountContractsPublicAccountContractsAuthAccountKeysPublicAccountKeysAccountKeyAuthAccountInboxStorageCapabilityControllerAccountCapabilityControllerAuthAccountStorageCapabilitiesAuthAccountAccountCapabilitiesAuthAccountCapabilitiesPublicAccountCapabilities_Count" var _PrimitiveStaticType_map = map[PrimitiveStaticType]string{ 0: _PrimitiveStaticType_name[0:7], @@ -111,30 +112,31 @@ var _PrimitiveStaticType_map = map[PrimitiveStaticType]string{ 55: _PrimitiveStaticType_name[228:234], 56: _PrimitiveStaticType_name[234:240], 57: _PrimitiveStaticType_name[240:247], - 64: _PrimitiveStaticType_name[247:252], - 72: _PrimitiveStaticType_name[252:258], - 76: _PrimitiveStaticType_name[258:262], - 77: _PrimitiveStaticType_name[262:272], - 78: _PrimitiveStaticType_name[272:283], - 79: _PrimitiveStaticType_name[283:297], - 80: _PrimitiveStaticType_name[297:307], - 81: _PrimitiveStaticType_name[307:318], - 90: _PrimitiveStaticType_name[318:329], - 91: _PrimitiveStaticType_name[329:342], - 92: _PrimitiveStaticType_name[342:358], - 93: _PrimitiveStaticType_name[358:378], - 94: _PrimitiveStaticType_name[378:400], - 95: _PrimitiveStaticType_name[400:415], - 96: _PrimitiveStaticType_name[415:432], - 97: _PrimitiveStaticType_name[432:442], - 98: _PrimitiveStaticType_name[442:458], - 99: _PrimitiveStaticType_name[458:485], - 100: _PrimitiveStaticType_name[485:512], - 101: _PrimitiveStaticType_name[512:542], - 102: _PrimitiveStaticType_name[542:572], - 103: _PrimitiveStaticType_name[572:595], - 104: _PrimitiveStaticType_name[595:620], - 105: _PrimitiveStaticType_name[620:626], + 58: _PrimitiveStaticType_name[247:254], + 64: _PrimitiveStaticType_name[254:259], + 72: _PrimitiveStaticType_name[259:265], + 76: _PrimitiveStaticType_name[265:269], + 77: _PrimitiveStaticType_name[269:279], + 78: _PrimitiveStaticType_name[279:290], + 79: _PrimitiveStaticType_name[290:304], + 80: _PrimitiveStaticType_name[304:314], + 81: _PrimitiveStaticType_name[314:325], + 90: _PrimitiveStaticType_name[325:336], + 91: _PrimitiveStaticType_name[336:349], + 92: _PrimitiveStaticType_name[349:365], + 93: _PrimitiveStaticType_name[365:385], + 94: _PrimitiveStaticType_name[385:407], + 95: _PrimitiveStaticType_name[407:422], + 96: _PrimitiveStaticType_name[422:439], + 97: _PrimitiveStaticType_name[439:449], + 98: _PrimitiveStaticType_name[449:465], + 99: _PrimitiveStaticType_name[465:492], + 100: _PrimitiveStaticType_name[492:519], + 101: _PrimitiveStaticType_name[519:549], + 102: _PrimitiveStaticType_name[549:579], + 103: _PrimitiveStaticType_name[579:602], + 104: _PrimitiveStaticType_name[602:627], + 105: _PrimitiveStaticType_name[627:633], } func (i PrimitiveStaticType) String() string { diff --git a/runtime/interpreter/sharedstate.go b/runtime/interpreter/sharedstate.go index 193a78b25e..c7529d615b 100644 --- a/runtime/interpreter/sharedstate.go +++ b/runtime/interpreter/sharedstate.go @@ -43,6 +43,8 @@ type SharedState struct { storageMutatedDuringIteration bool CapabilityControllerIterations map[AddressPath]int MutationDuringCapabilityControllerIteration bool + containerValueIteration map[atree.StorageID]struct{} + destroyedResources map[atree.StorageID]struct{} currentEntitlementMappedValue Authorization } @@ -61,6 +63,8 @@ func NewSharedState(config *Config) *SharedState { referencedResourceKindedValues: map[atree.StorageID]map[ReferenceTrackedResourceKindedValue]struct{}{}, resourceVariables: map[ResourceKindedValue]*Variable{}, CapabilityControllerIterations: map[AddressPath]int{}, + containerValueIteration: map[atree.StorageID]struct{}{}, + destroyedResources: map[atree.StorageID]struct{}{}, } } diff --git a/runtime/interpreter/statictype_test.go b/runtime/interpreter/statictype_test.go index 2eca7ead0a..560800e1e3 100644 --- a/runtime/interpreter/statictype_test.go +++ b/runtime/interpreter/statictype_test.go @@ -1213,6 +1213,11 @@ func TestStaticTypeConversion(t *testing.T) { semaType: sema.Word128Type, staticType: PrimitiveStaticTypeWord128, }, + { + name: "Word256", + semaType: sema.Word256Type, + staticType: PrimitiveStaticTypeWord256, + }, { name: "Fix64", diff --git a/runtime/interpreter/value.go b/runtime/interpreter/value.go index c50eb8bbb3..340d5f9eb3 100644 --- a/runtime/interpreter/value.go +++ b/runtime/interpreter/value.go @@ -1698,17 +1698,25 @@ func (v *ArrayValue) Accept(interpreter *Interpreter, visitor Visitor) { }) } -func (v *ArrayValue) Iterate(gauge common.MemoryGauge, f func(element Value) (resume bool)) { - err := v.array.Iterate(func(element atree.Value) (resume bool, err error) { - // atree.Array iteration provides low-level atree.Value, - // convert to high-level interpreter.Value +func (v *ArrayValue) Iterate(interpreter *Interpreter, f func(element Value) (resume bool)) { + iterate := func() { + err := v.array.Iterate(func(element atree.Value) (resume bool, err error) { + // atree.Array iteration provides low-level atree.Value, + // convert to high-level interpreter.Value - resume = f(MustConvertStoredValue(gauge, element)) + resume = f(MustConvertStoredValue(interpreter, element)) - return resume, nil - }) - if err != nil { - panic(errors.NewExternalError(err)) + return resume, nil + }) + if err != nil { + panic(errors.NewExternalError(err)) + } + } + + if v.IsResourceKinded(interpreter) { + interpreter.withMutationPrevention(v.StorageID(), iterate) + } else { + iterate() } } @@ -1762,8 +1770,6 @@ func (v *ArrayValue) Destroy(interpreter *Interpreter, locationRange LocationRan v.checkInvalidatedResourceUse(interpreter, locationRange) } - storageID := v.StorageID() - if config.TracingEnabled { startTime := time.Now() @@ -1779,9 +1785,17 @@ func (v *ArrayValue) Destroy(interpreter *Interpreter, locationRange LocationRan }() } - v.Walk(interpreter, func(element Value) { - maybeDestroy(interpreter, locationRange, element) - }) + storageID := v.StorageID() + + interpreter.withResourceDestruction( + storageID, + locationRange, + func() { + v.Walk(interpreter, func(element Value) { + maybeDestroy(interpreter, locationRange, element) + }) + }, + ) v.isDestroyed = true @@ -1935,6 +1949,8 @@ func (v *ArrayValue) SetKey(interpreter *Interpreter, locationRange LocationRang func (v *ArrayValue) Set(interpreter *Interpreter, locationRange LocationRange, index int, element Value) { + interpreter.validateMutation(v.StorageID(), locationRange) + // We only need to check the lower bound before converting from `int` (signed) to `uint64` (unsigned). // atree's Array.Set function will check the upper bound and report an atree.IndexOutOfBoundsError @@ -2007,6 +2023,8 @@ func (v *ArrayValue) MeteredString(memoryGauge common.MemoryGauge, seenReference func (v *ArrayValue) Append(interpreter *Interpreter, locationRange LocationRange, element Value) { + interpreter.validateMutation(v.StorageID(), locationRange) + // length increases by 1 dataSlabs, metaDataSlabs := common.AdditionalAtreeMemoryUsage( v.array.Count(), @@ -2053,6 +2071,8 @@ func (v *ArrayValue) InsertKey(interpreter *Interpreter, locationRange LocationR func (v *ArrayValue) Insert(interpreter *Interpreter, locationRange LocationRange, index int, element Value) { + interpreter.validateMutation(v.StorageID(), locationRange) + // We only need to check the lower bound before converting from `int` (signed) to `uint64` (unsigned). // atree's Array.Insert function will check the upper bound and report an atree.IndexOutOfBoundsError @@ -2106,6 +2126,8 @@ func (v *ArrayValue) RemoveKey(interpreter *Interpreter, locationRange LocationR func (v *ArrayValue) Remove(interpreter *Interpreter, locationRange LocationRange, index int) Value { + interpreter.validateMutation(v.StorageID(), locationRange) + // We only need to check the lower bound before converting from `int` (signed) to `uint64` (unsigned). // atree's Array.Remove function will check the upper bound and report an atree.IndexOutOfBoundsError @@ -12963,7 +12985,7 @@ func NewWord128ValueFromUint64(memoryGauge common.MemoryGauge, value int64) Word var Word128MemoryUsage = common.NewBigIntMemoryUsage(16) func NewWord128ValueFromBigInt(memoryGauge common.MemoryGauge, bigIntConstructor func() *big.Int) Word128Value { - common.UseMemory(memoryGauge, Int128MemoryUsage) + common.UseMemory(memoryGauge, Word128MemoryUsage) value := bigIntConstructor() return NewUnmeteredWord128ValueFromBigInt(value) } @@ -13497,85 +13519,91 @@ func (Word128Value) ChildStorables() []atree.Storable { return nil } -// FixedPointValue is a fixed-point number value -type FixedPointValue interface { - NumberValue - IntegerPart() NumberValue - Scale() int -} - -// Fix64Value -type Fix64Value int64 +// Word256Value -const Fix64MaxValue = math.MaxInt64 +type Word256Value struct { + BigInt *big.Int +} -const fix64Size = int(unsafe.Sizeof(Fix64Value(0))) +func NewWord256ValueFromUint64(memoryGauge common.MemoryGauge, value int64) Word256Value { + return NewWord256ValueFromBigInt( + memoryGauge, + func() *big.Int { + return new(big.Int).SetInt64(value) + }, + ) +} -var fix64MemoryUsage = common.NewNumberMemoryUsage(fix64Size) +var Word256MemoryUsage = common.NewBigIntMemoryUsage(32) -func NewFix64ValueWithInteger(gauge common.MemoryGauge, constructor func() int64, locationRange LocationRange) Fix64Value { - common.UseMemory(gauge, fix64MemoryUsage) - return NewUnmeteredFix64ValueWithInteger(constructor(), locationRange) +func NewWord256ValueFromBigInt(memoryGauge common.MemoryGauge, bigIntConstructor func() *big.Int) Word256Value { + common.UseMemory(memoryGauge, Word256MemoryUsage) + value := bigIntConstructor() + return NewUnmeteredWord256ValueFromBigInt(value) } -func NewUnmeteredFix64ValueWithInteger(integer int64, locationRange LocationRange) Fix64Value { +func NewUnmeteredWord256ValueFromUint64(value uint64) Word256Value { + return NewUnmeteredWord256ValueFromBigInt(new(big.Int).SetUint64(value)) +} - if integer < sema.Fix64TypeMinInt { - panic(UnderflowError{LocationRange: locationRange}) +func NewUnmeteredWord256ValueFromBigInt(value *big.Int) Word256Value { + return Word256Value{ + BigInt: value, } +} - if integer > sema.Fix64TypeMaxInt { - panic(OverflowError{LocationRange: locationRange}) - } +var _ Value = Word256Value{} +var _ atree.Storable = Word256Value{} +var _ NumberValue = Word256Value{} +var _ IntegerValue = Word256Value{} +var _ EquatableValue = Word256Value{} +var _ ComparableValue = Word256Value{} +var _ HashableValue = Word256Value{} +var _ MemberAccessibleValue = Word256Value{} - return NewUnmeteredFix64Value(integer * sema.Fix64Factor) -} +func (Word256Value) isValue() {} -func NewFix64Value(gauge common.MemoryGauge, valueGetter func() int64) Fix64Value { - common.UseMemory(gauge, fix64MemoryUsage) - return NewUnmeteredFix64Value(valueGetter()) +func (v Word256Value) Accept(interpreter *Interpreter, visitor Visitor) { + visitor.VisitWord256Value(interpreter, v) } -func NewUnmeteredFix64Value(integer int64) Fix64Value { - return Fix64Value(integer) +func (Word256Value) Walk(_ *Interpreter, _ func(Value)) { + // NO-OP } -var _ Value = Fix64Value(0) -var _ atree.Storable = Fix64Value(0) -var _ NumberValue = Fix64Value(0) -var _ FixedPointValue = Fix64Value(0) -var _ EquatableValue = Fix64Value(0) -var _ ComparableValue = Fix64Value(0) -var _ HashableValue = Fix64Value(0) -var _ MemberAccessibleValue = Fix64Value(0) - -func (Fix64Value) isValue() {} +func (Word256Value) StaticType(interpreter *Interpreter) StaticType { + return NewPrimitiveStaticType(interpreter, PrimitiveStaticTypeWord256) +} -func (v Fix64Value) Accept(interpreter *Interpreter, visitor Visitor) { - visitor.VisitFix64Value(interpreter, v) +func (Word256Value) IsImportable(_ *Interpreter) bool { + return true } -func (Fix64Value) Walk(_ *Interpreter, _ func(Value)) { - // NO-OP +func (v Word256Value) ToInt(locationRange LocationRange) int { + if !v.BigInt.IsInt64() { + panic(OverflowError{LocationRange: locationRange}) + } + return int(v.BigInt.Int64()) } -func (Fix64Value) StaticType(interpreter *Interpreter) StaticType { - return NewPrimitiveStaticType(interpreter, PrimitiveStaticTypeFix64) +func (v Word256Value) ByteLength() int { + return common.BigIntByteLength(v.BigInt) } -func (Fix64Value) IsImportable(_ *Interpreter) bool { - return true +func (v Word256Value) ToBigInt(memoryGauge common.MemoryGauge) *big.Int { + common.UseMemory(memoryGauge, common.NewBigIntMemoryUsage(v.ByteLength())) + return new(big.Int).Set(v.BigInt) } -func (v Fix64Value) String() string { - return format.Fix64(int64(v)) +func (v Word256Value) String() string { + return format.BigInt(v.BigInt) } -func (v Fix64Value) RecursiveString(_ SeenReferences) string { +func (v Word256Value) RecursiveString(_ SeenReferences) string { return v.String() } -func (v Fix64Value) MeteredString(memoryGauge common.MemoryGauge, _ SeenReferences) string { +func (v Word256Value) MeteredString(memoryGauge common.MemoryGauge, _ SeenReferences) string { common.UseMemory( memoryGauge, common.NewRawStringMemoryUsage( @@ -13585,118 +13613,664 @@ func (v Fix64Value) MeteredString(memoryGauge common.MemoryGauge, _ SeenReferenc return v.String() } -func (v Fix64Value) ToInt(locationRange LocationRange) int { - return int(v / sema.Fix64Factor) +func (v Word256Value) Negate(*Interpreter, LocationRange) NumberValue { + panic(errors.NewUnreachableError()) } -func (v Fix64Value) Negate(interpreter *Interpreter, locationRange LocationRange) NumberValue { - // INT32-C - if v == math.MinInt64 { - panic(OverflowError{LocationRange: locationRange}) +func (v Word256Value) Plus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Word256Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationPlus, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) } - valueGetter := func() int64 { - return int64(-v) - } + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + sum := new(big.Int) + sum.Add(v.BigInt, o.BigInt) + // Given that this value is backed by an arbitrary size integer, + // we can just add and wrap around in case of overflow. + // + // Note that since v and o are both in the range [0, 2**256 - 1), + // their sum will be in range [0, 2*(2**256 - 1)). + // Hence it is sufficient to subtract 2**256 to wrap around. + // + // If Go gains a native uint256 type and we switch this value + // to be based on it, then we need to follow INT30-C: + // + // if sum < v { + // ... + // } + // + if sum.Cmp(sema.Word256TypeMaxIntBig) > 0 { + sum.Sub(sum, sema.Word256TypeMaxIntPlusOneBig) + } + return sum + }, + ) +} - return NewFix64Value(interpreter, valueGetter) +func (v Word256Value) SaturatingPlus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + panic(errors.NewUnreachableError()) } -func (v Fix64Value) Plus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { - o, ok := other.(Fix64Value) +func (v Word256Value) Minus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Word256Value) if !ok { panic(InvalidOperandsError{ - Operation: ast.OperationPlus, + Operation: ast.OperationMinus, LeftType: v.StaticType(interpreter), RightType: other.StaticType(interpreter), }) } - valueGetter := func() int64 { - return safeAddInt64(int64(v), int64(o), locationRange) - } + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + diff := new(big.Int) + diff.Sub(v.BigInt, o.BigInt) + // Given that this value is backed by an arbitrary size integer, + // we can just subtract and wrap around in case of underflow. + // + // Note that since v and o are both in the range [0, 2**256 - 1), + // their difference will be in range [-(2**256 - 1), 2**256 - 1). + // Hence it is sufficient to add 2**256 to wrap around. + // + // If Go gains a native uint256 type and we switch this value + // to be based on it, then we need to follow INT30-C: + // + // if diff > v { + // ... + // } + // + if diff.Sign() < 0 { + diff.Add(diff, sema.Word256TypeMaxIntPlusOneBig) + } + return diff + }, + ) +} - return NewFix64Value(interpreter, valueGetter) +func (v Word256Value) SaturatingMinus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + panic(errors.NewUnreachableError()) } -func (v Fix64Value) SaturatingPlus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { - o, ok := other.(Fix64Value) +func (v Word256Value) Mod(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Word256Value) if !ok { panic(InvalidOperandsError{ - FunctionName: sema.NumericTypeSaturatingAddFunctionName, - LeftType: v.StaticType(interpreter), - RightType: other.StaticType(interpreter), + Operation: ast.OperationMod, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), }) } - valueGetter := func() int64 { - // INT32-C - if (o > 0) && (v > (math.MaxInt64 - o)) { - return math.MaxInt64 - } else if (o < 0) && (v < (math.MinInt64 - o)) { - return math.MinInt64 - } - return int64(v + o) - } - - return NewFix64Value(interpreter, valueGetter) + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + res := new(big.Int) + if o.BigInt.Cmp(res) == 0 { + panic(DivisionByZeroError{LocationRange: locationRange}) + } + return res.Rem(v.BigInt, o.BigInt) + }, + ) } -func (v Fix64Value) Minus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { - o, ok := other.(Fix64Value) +func (v Word256Value) Mul(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Word256Value) if !ok { panic(InvalidOperandsError{ - Operation: ast.OperationMinus, + Operation: ast.OperationMul, LeftType: v.StaticType(interpreter), RightType: other.StaticType(interpreter), }) } - valueGetter := func() int64 { - // INT32-C - if (o > 0) && (v < (math.MinInt64 + o)) { - panic(OverflowError{LocationRange: locationRange}) - } else if (o < 0) && (v > (math.MaxInt64 + o)) { - panic(UnderflowError{LocationRange: locationRange}) - } - - return int64(v - o) - } + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + res := new(big.Int) + res.Mul(v.BigInt, o.BigInt) + if res.Cmp(sema.Word256TypeMaxIntBig) > 0 { + res.Mod(res, sema.Word256TypeMaxIntPlusOneBig) + } + return res + }, + ) +} - return NewFix64Value(interpreter, valueGetter) +func (v Word256Value) SaturatingMul(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + panic(errors.NewUnreachableError()) } -func (v Fix64Value) SaturatingMinus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { - o, ok := other.(Fix64Value) +func (v Word256Value) Div(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Word256Value) if !ok { panic(InvalidOperandsError{ - FunctionName: sema.NumericTypeSaturatingSubtractFunctionName, - LeftType: v.StaticType(interpreter), - RightType: other.StaticType(interpreter), + Operation: ast.OperationDiv, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), }) } - valueGetter := func() int64 { - // INT32-C - if (o > 0) && (v < (math.MinInt64 + o)) { - return math.MinInt64 - } else if (o < 0) && (v > (math.MaxInt64 + o)) { - return math.MaxInt64 - } - return int64(v - o) - } + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + res := new(big.Int) + if o.BigInt.Cmp(res) == 0 { + panic(DivisionByZeroError{LocationRange: locationRange}) + } + return res.Div(v.BigInt, o.BigInt) + }, + ) - return NewFix64Value(interpreter, valueGetter) } -var minInt64Big = big.NewInt(math.MinInt64) -var maxInt64Big = big.NewInt(math.MaxInt64) +func (v Word256Value) SaturatingDiv(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + panic(errors.NewUnreachableError()) +} -func (v Fix64Value) Mul(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { - o, ok := other.(Fix64Value) +func (v Word256Value) Less(interpreter *Interpreter, other ComparableValue, locationRange LocationRange) BoolValue { + o, ok := other.(Word256Value) if !ok { panic(InvalidOperandsError{ - Operation: ast.OperationMul, + Operation: ast.OperationLess, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + cmp := v.BigInt.Cmp(o.BigInt) + return AsBoolValue(cmp == -1) +} + +func (v Word256Value) LessEqual(interpreter *Interpreter, other ComparableValue, locationRange LocationRange) BoolValue { + o, ok := other.(Word256Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationLessEqual, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + cmp := v.BigInt.Cmp(o.BigInt) + return AsBoolValue(cmp <= 0) +} + +func (v Word256Value) Greater(interpreter *Interpreter, other ComparableValue, locationRange LocationRange) BoolValue { + o, ok := other.(Word256Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationGreater, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + cmp := v.BigInt.Cmp(o.BigInt) + return AsBoolValue(cmp == 1) +} + +func (v Word256Value) GreaterEqual(interpreter *Interpreter, other ComparableValue, locationRange LocationRange) BoolValue { + o, ok := other.(Word256Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationGreaterEqual, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + cmp := v.BigInt.Cmp(o.BigInt) + return AsBoolValue(cmp >= 0) +} + +func (v Word256Value) Equal(_ *Interpreter, _ LocationRange, other Value) bool { + otherInt, ok := other.(Word256Value) + if !ok { + return false + } + cmp := v.BigInt.Cmp(otherInt.BigInt) + return cmp == 0 +} + +// HashInput returns a byte slice containing: +// - HashInputTypeWord256 (1 byte) +// - big int encoded in big endian (n bytes) +func (v Word256Value) HashInput(_ *Interpreter, _ LocationRange, scratch []byte) []byte { + b := UnsignedBigIntToBigEndianBytes(v.BigInt) + + length := 1 + len(b) + var buffer []byte + if length <= len(scratch) { + buffer = scratch[:length] + } else { + buffer = make([]byte, length) + } + + buffer[0] = byte(HashInputTypeWord256) + copy(buffer[1:], b) + return buffer +} + +func ConvertWord256(memoryGauge common.MemoryGauge, value Value, locationRange LocationRange) Value { + return NewWord256ValueFromBigInt( + memoryGauge, + func() *big.Int { + + var v *big.Int + + switch value := value.(type) { + case BigNumberValue: + v = value.ToBigInt(memoryGauge) + + case NumberValue: + v = big.NewInt(int64(value.ToInt(locationRange))) + + default: + panic(errors.NewUnreachableError()) + } + + if v.Cmp(sema.Word256TypeMaxIntBig) > 0 || v.Sign() < 0 { + // When Sign() < 0, Mod will add sema.Word256TypeMaxIntPlusOneBig + // to ensure the range is [0, sema.Word256TypeMaxIntPlusOneBig) + v.Mod(v, sema.Word256TypeMaxIntPlusOneBig) + } + + return v + }, + ) +} + +func (v Word256Value) BitwiseOr(interpreter *Interpreter, other IntegerValue, locationRange LocationRange) IntegerValue { + o, ok := other.(Word256Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationBitwiseOr, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + res := new(big.Int) + return res.Or(v.BigInt, o.BigInt) + }, + ) +} + +func (v Word256Value) BitwiseXor(interpreter *Interpreter, other IntegerValue, locationRange LocationRange) IntegerValue { + o, ok := other.(Word256Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationBitwiseXor, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + res := new(big.Int) + return res.Xor(v.BigInt, o.BigInt) + }, + ) +} + +func (v Word256Value) BitwiseAnd(interpreter *Interpreter, other IntegerValue, locationRange LocationRange) IntegerValue { + o, ok := other.(Word256Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationBitwiseAnd, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + res := new(big.Int) + return res.And(v.BigInt, o.BigInt) + }, + ) + +} + +func (v Word256Value) BitwiseLeftShift(interpreter *Interpreter, other IntegerValue, locationRange LocationRange) IntegerValue { + o, ok := other.(Word256Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationBitwiseLeftShift, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + res := new(big.Int) + if o.BigInt.Sign() < 0 { + panic(UnderflowError{LocationRange: locationRange}) + } + if !o.BigInt.IsUint64() { + panic(OverflowError{LocationRange: locationRange}) + } + return res.Lsh(v.BigInt, uint(o.BigInt.Uint64())) + }, + ) +} + +func (v Word256Value) BitwiseRightShift(interpreter *Interpreter, other IntegerValue, locationRange LocationRange) IntegerValue { + o, ok := other.(Word256Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationBitwiseRightShift, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + return NewWord256ValueFromBigInt( + interpreter, + func() *big.Int { + res := new(big.Int) + if o.BigInt.Sign() < 0 { + panic(UnderflowError{LocationRange: locationRange}) + } + if !o.BigInt.IsUint64() { + panic(OverflowError{LocationRange: locationRange}) + } + return res.Rsh(v.BigInt, uint(o.BigInt.Uint64())) + }, + ) +} + +func (v Word256Value) GetMember(interpreter *Interpreter, locationRange LocationRange, name string) Value { + return getNumberValueMember(interpreter, v, name, sema.Word256Type, locationRange) +} + +func (Word256Value) RemoveMember(_ *Interpreter, _ LocationRange, _ string) Value { + // Numbers have no removable members (fields / functions) + panic(errors.NewUnreachableError()) +} + +func (Word256Value) SetMember(_ *Interpreter, _ LocationRange, _ string, _ Value) bool { + // Numbers have no settable members (fields / functions) + panic(errors.NewUnreachableError()) +} + +func (v Word256Value) ToBigEndianBytes() []byte { + return UnsignedBigIntToBigEndianBytes(v.BigInt) +} + +func (v Word256Value) ConformsToStaticType( + _ *Interpreter, + _ LocationRange, + _ TypeConformanceResults, +) bool { + return true +} + +func (Word256Value) IsStorable() bool { + return true +} + +func (v Word256Value) Storable(_ atree.SlabStorage, _ atree.Address, _ uint64) (atree.Storable, error) { + return v, nil +} + +func (Word256Value) NeedsStoreTo(_ atree.Address) bool { + return false +} + +func (Word256Value) IsResourceKinded(_ *Interpreter) bool { + return false +} + +func (v Word256Value) Transfer( + interpreter *Interpreter, + _ LocationRange, + _ atree.Address, + remove bool, + storable atree.Storable, +) Value { + if remove { + interpreter.RemoveReferencedSlab(storable) + } + return v +} + +func (v Word256Value) Clone(_ *Interpreter) Value { + return NewUnmeteredWord256ValueFromBigInt(v.BigInt) +} + +func (Word256Value) DeepRemove(_ *Interpreter) { + // NO-OP +} + +func (v Word256Value) ByteSize() uint32 { + return cborTagSize + getBigIntCBORSize(v.BigInt) +} + +func (v Word256Value) StoredValue(_ atree.SlabStorage) (atree.Value, error) { + return v, nil +} + +func (Word256Value) ChildStorables() []atree.Storable { + return nil +} + +// FixedPointValue is a fixed-point number value +type FixedPointValue interface { + NumberValue + IntegerPart() NumberValue + Scale() int +} + +// Fix64Value +type Fix64Value int64 + +const Fix64MaxValue = math.MaxInt64 + +const fix64Size = int(unsafe.Sizeof(Fix64Value(0))) + +var fix64MemoryUsage = common.NewNumberMemoryUsage(fix64Size) + +func NewFix64ValueWithInteger(gauge common.MemoryGauge, constructor func() int64, locationRange LocationRange) Fix64Value { + common.UseMemory(gauge, fix64MemoryUsage) + return NewUnmeteredFix64ValueWithInteger(constructor(), locationRange) +} + +func NewUnmeteredFix64ValueWithInteger(integer int64, locationRange LocationRange) Fix64Value { + + if integer < sema.Fix64TypeMinInt { + panic(UnderflowError{LocationRange: locationRange}) + } + + if integer > sema.Fix64TypeMaxInt { + panic(OverflowError{LocationRange: locationRange}) + } + + return NewUnmeteredFix64Value(integer * sema.Fix64Factor) +} + +func NewFix64Value(gauge common.MemoryGauge, valueGetter func() int64) Fix64Value { + common.UseMemory(gauge, fix64MemoryUsage) + return NewUnmeteredFix64Value(valueGetter()) +} + +func NewUnmeteredFix64Value(integer int64) Fix64Value { + return Fix64Value(integer) +} + +var _ Value = Fix64Value(0) +var _ atree.Storable = Fix64Value(0) +var _ NumberValue = Fix64Value(0) +var _ FixedPointValue = Fix64Value(0) +var _ EquatableValue = Fix64Value(0) +var _ ComparableValue = Fix64Value(0) +var _ HashableValue = Fix64Value(0) +var _ MemberAccessibleValue = Fix64Value(0) + +func (Fix64Value) isValue() {} + +func (v Fix64Value) Accept(interpreter *Interpreter, visitor Visitor) { + visitor.VisitFix64Value(interpreter, v) +} + +func (Fix64Value) Walk(_ *Interpreter, _ func(Value)) { + // NO-OP +} + +func (Fix64Value) StaticType(interpreter *Interpreter) StaticType { + return NewPrimitiveStaticType(interpreter, PrimitiveStaticTypeFix64) +} + +func (Fix64Value) IsImportable(_ *Interpreter) bool { + return true +} + +func (v Fix64Value) String() string { + return format.Fix64(int64(v)) +} + +func (v Fix64Value) RecursiveString(_ SeenReferences) string { + return v.String() +} + +func (v Fix64Value) MeteredString(memoryGauge common.MemoryGauge, _ SeenReferences) string { + common.UseMemory( + memoryGauge, + common.NewRawStringMemoryUsage( + OverEstimateNumberStringLength(memoryGauge, v), + ), + ) + return v.String() +} + +func (v Fix64Value) ToInt(locationRange LocationRange) int { + return int(v / sema.Fix64Factor) +} + +func (v Fix64Value) Negate(interpreter *Interpreter, locationRange LocationRange) NumberValue { + // INT32-C + if v == math.MinInt64 { + panic(OverflowError{LocationRange: locationRange}) + } + + valueGetter := func() int64 { + return int64(-v) + } + + return NewFix64Value(interpreter, valueGetter) +} + +func (v Fix64Value) Plus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Fix64Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationPlus, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + valueGetter := func() int64 { + return safeAddInt64(int64(v), int64(o), locationRange) + } + + return NewFix64Value(interpreter, valueGetter) +} + +func (v Fix64Value) SaturatingPlus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Fix64Value) + if !ok { + panic(InvalidOperandsError{ + FunctionName: sema.NumericTypeSaturatingAddFunctionName, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + valueGetter := func() int64 { + // INT32-C + if (o > 0) && (v > (math.MaxInt64 - o)) { + return math.MaxInt64 + } else if (o < 0) && (v < (math.MinInt64 - o)) { + return math.MinInt64 + } + return int64(v + o) + } + + return NewFix64Value(interpreter, valueGetter) +} + +func (v Fix64Value) Minus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Fix64Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationMinus, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + valueGetter := func() int64 { + // INT32-C + if (o > 0) && (v < (math.MinInt64 + o)) { + panic(OverflowError{LocationRange: locationRange}) + } else if (o < 0) && (v > (math.MaxInt64 + o)) { + panic(UnderflowError{LocationRange: locationRange}) + } + + return int64(v - o) + } + + return NewFix64Value(interpreter, valueGetter) +} + +func (v Fix64Value) SaturatingMinus(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Fix64Value) + if !ok { + panic(InvalidOperandsError{ + FunctionName: sema.NumericTypeSaturatingSubtractFunctionName, + LeftType: v.StaticType(interpreter), + RightType: other.StaticType(interpreter), + }) + } + + valueGetter := func() int64 { + // INT32-C + if (o > 0) && (v < (math.MinInt64 + o)) { + return math.MinInt64 + } else if (o < 0) && (v > (math.MaxInt64 + o)) { + return math.MaxInt64 + } + return int64(v - o) + } + + return NewFix64Value(interpreter, valueGetter) +} + +var minInt64Big = big.NewInt(math.MinInt64) +var maxInt64Big = big.NewInt(math.MaxInt64) + +func (v Fix64Value) Mul(interpreter *Interpreter, other NumberValue, locationRange LocationRange) NumberValue { + o, ok := other.(Fix64Value) + if !ok { + panic(InvalidOperandsError{ + Operation: ast.OperationMul, LeftType: v.StaticType(interpreter), RightType: other.StaticType(interpreter), }) @@ -14764,8 +15338,6 @@ func (v *CompositeValue) Destroy(interpreter *Interpreter, locationRange Locatio v.checkInvalidatedResourceUse(interpreter, locationRange) } - storageID := v.StorageID() - if config.TracingEnabled { startTime := time.Now() @@ -14784,46 +15356,54 @@ func (v *CompositeValue) Destroy(interpreter *Interpreter, locationRange Locatio }() } - // if this type has attachments, destroy all of them before invoking the destructor - v.forEachAttachment(interpreter, locationRange, func(attachment *CompositeValue) { - // an attachment's destructor may make reference to `base`, so we must set the base value - // for the attachment before invoking its destructor. For other functions, this happens - // automatically when the attachment is accessed with the access expression `v[A]`, which - // is a necessary pre-requisite for calling any members of the attachment. However, in - // the case of a destructor, this is called implicitly, and thus must have its `base` - // set manually - attachment.setBaseValue(interpreter, v, attachmentBaseAuthorization(interpreter, attachment)) - attachment.Destroy(interpreter, locationRange) - }) + storageID := v.StorageID() - interpreter = v.getInterpreter(interpreter) + interpreter.withResourceDestruction( + storageID, + locationRange, + func() { + // if this type has attachments, destroy all of them before invoking the destructor + v.forEachAttachment(interpreter, locationRange, func(attachment *CompositeValue) { + // an attachment's destructor may make reference to `base`, so we must set the base value + // for the attachment before invoking its destructor. For other functions, this happens + // automatically when the attachment is accessed with the access expression `v[A]`, which + // is a necessary pre-requisite for calling any members of the attachment. However, in + // the case of a destructor, this is called implicitly, and thus must have its `base` + // set manually + attachment.setBaseValue(interpreter, v, attachmentBaseAuthorization(interpreter, attachment)) + attachment.Destroy(interpreter, locationRange) + }) - // if composite was deserialized, dynamically link in the destructor - if v.Destructor == nil { - v.Destructor = interpreter.SharedState.typeCodes.CompositeCodes[v.TypeID()].DestructorFunction - } + interpreter = v.getInterpreter(interpreter) - destructor := v.Destructor + // if composite was deserialized, dynamically link in the destructor + if v.Destructor == nil { + v.Destructor = interpreter.SharedState.typeCodes.CompositeCodes[v.TypeID()].DestructorFunction + } - if destructor != nil { - var base *EphemeralReferenceValue - var self MemberAccessibleValue = v - if v.Kind == common.CompositeKindAttachment { - base, self = attachmentBaseAndSelfValues(interpreter, locationRange, v) - } - invocation := NewInvocation( - interpreter, - &self, - base, - nil, - nil, - nil, - nil, - locationRange, - ) + destructor := v.Destructor - destructor.invoke(invocation) - } + if destructor != nil { + var base *EphemeralReferenceValue + var self MemberAccessibleValue = v + if v.Kind == common.CompositeKindAttachment { + base, self = attachmentBaseAndSelfValues(interpreter, locationRange, v) + } + invocation := NewInvocation( + interpreter, + &self, + base, + nil, + nil, + nil, + nil, + locationRange, + ) + + destructor.invoke(invocation) + } + }, + ) v.isDestroyed = true @@ -14847,6 +15427,7 @@ func (v *CompositeValue) Destroy(interpreter *Interpreter, locationRange Locatio } }, ) + } func (v *CompositeValue) getBuiltinMember(interpreter *Interpreter, locationRange LocationRange, name string) Value { @@ -16297,20 +16878,27 @@ func (v *DictionaryValue) Accept(interpreter *Interpreter, visitor Visitor) { }) } -func (v *DictionaryValue) Iterate(gauge common.MemoryGauge, f func(key, value Value) (resume bool)) { - err := v.dictionary.Iterate(func(key, value atree.Value) (resume bool, err error) { - // atree.OrderedMap iteration provides low-level atree.Value, - // convert to high-level interpreter.Value +func (v *DictionaryValue) Iterate(interpreter *Interpreter, f func(key, value Value) (resume bool)) { + iterate := func() { + err := v.dictionary.Iterate(func(key, value atree.Value) (resume bool, err error) { + // atree.OrderedMap iteration provides low-level atree.Value, + // convert to high-level interpreter.Value - resume = f( - MustConvertStoredValue(gauge, key), - MustConvertStoredValue(gauge, value), - ) + resume = f( + MustConvertStoredValue(interpreter, key), + MustConvertStoredValue(interpreter, value), + ) - return resume, nil - }) - if err != nil { - panic(errors.NewExternalError(err)) + return resume, nil + }) + if err != nil { + panic(errors.NewExternalError(err)) + } + } + if v.IsResourceKinded(interpreter) { + interpreter.withMutationPrevention(v.StorageID(), iterate) + } else { + iterate() } } @@ -16395,8 +16983,6 @@ func (v *DictionaryValue) Destroy(interpreter *Interpreter, locationRange Locati v.checkInvalidatedResourceUse(interpreter, locationRange) } - storageID := v.StorageID() - if config.TracingEnabled { startTime := time.Now() @@ -16412,12 +16998,21 @@ func (v *DictionaryValue) Destroy(interpreter *Interpreter, locationRange Locati }() } - v.Iterate(interpreter, func(key, value Value) (resume bool) { - // Resources cannot be keys at the moment, so should theoretically not be needed - maybeDestroy(interpreter, locationRange, key) - maybeDestroy(interpreter, locationRange, value) - return true - }) + storageID := v.StorageID() + + interpreter.withResourceDestruction( + storageID, + locationRange, + func() { + v.Iterate(interpreter, func(key, value Value) (resume bool) { + // Resources cannot be keys at the moment, so should theoretically not be needed + maybeDestroy(interpreter, locationRange, key) + maybeDestroy(interpreter, locationRange, value) + + return true + }) + }, + ) v.isDestroyed = true @@ -16463,21 +17058,29 @@ func (v *DictionaryValue) ForEachKey( ) } - err := v.dictionary.IterateKeys( - func(item atree.Value) (bool, error) { - key := MustConvertStoredValue(interpreter, item) + iterate := func() { + err := v.dictionary.IterateKeys( + func(item atree.Value) (bool, error) { + key := MustConvertStoredValue(interpreter, item) - shouldContinue, ok := procedure.invoke(iterationInvocation(key)).(BoolValue) - if !ok { - panic(errors.NewUnreachableError()) - } + shouldContinue, ok := procedure.invoke(iterationInvocation(key)).(BoolValue) + if !ok { + panic(errors.NewUnreachableError()) + } - return bool(shouldContinue), nil - }, - ) + return bool(shouldContinue), nil + }, + ) - if err != nil { - panic(errors.NewExternalError(err)) + if err != nil { + panic(errors.NewExternalError(err)) + } + } + + if v.IsResourceKinded(interpreter) { + interpreter.withMutationPrevention(v.StorageID(), iterate) + } else { + iterate() } } @@ -16554,6 +17157,8 @@ func (v *DictionaryValue) SetKey( keyValue Value, value Value, ) { + interpreter.validateMutation(v.StorageID(), locationRange) + config := interpreter.SharedState.Config if config.InvalidatedResourceValidationEnabled { @@ -16836,6 +17441,8 @@ func (v *DictionaryValue) Remove( keyValue Value, ) OptionalValue { + interpreter.validateMutation(v.StorageID(), locationRange) + valueComparator := newValueComparator(interpreter, locationRange) hashInputProvider := newHashInputProvider(interpreter, locationRange) @@ -16891,6 +17498,8 @@ func (v *DictionaryValue) Insert( keyValue, value Value, ) OptionalValue { + interpreter.validateMutation(v.StorageID(), locationRange) + // length increases by 1 dataSlabs, metaDataSlabs := common.AdditionalAtreeMemoryUsage(v.dictionary.Count(), v.elementSize, false) common.UseMemory(interpreter, common.AtreeMapElementOverhead) @@ -17306,6 +17915,7 @@ func (v *DictionaryValue) Clone(interpreter *Interpreter) Value { } func (v *DictionaryValue) DeepRemove(interpreter *Interpreter) { + config := interpreter.SharedState.Config if config.TracingEnabled { diff --git a/runtime/interpreter/value_account.go b/runtime/interpreter/value_account.go index 14e21bbf2e..4ee802539b 100644 --- a/runtime/interpreter/value_account.go +++ b/runtime/interpreter/value_account.go @@ -68,6 +68,7 @@ func NewAuthAccountValue( var copyFunction *HostFunctionValue var saveFunction *HostFunctionValue var borrowFunction *HostFunctionValue + var checkFunction *HostFunctionValue var linkFunction *HostFunctionValue var linkAccountFunction *HostFunctionValue var unlinkFunction *HostFunctionValue @@ -184,6 +185,12 @@ func NewAuthAccountValue( } return borrowFunction + case sema.AuthAccountTypeCheckFunctionName: + if checkFunction == nil { + checkFunction = inter.authAccountCheckFunction(address) + } + return checkFunction + case sema.AuthAccountTypeLinkFunctionName: if linkFunction == nil { linkFunction = inter.authAccountLinkFunction(address) diff --git a/runtime/interpreter/value_function.go b/runtime/interpreter/value_function.go index 3e32868a8a..137c0cc943 100644 --- a/runtime/interpreter/value_function.go +++ b/runtime/interpreter/value_function.go @@ -386,7 +386,15 @@ func (f BoundFunctionValue) FunctionType() *sema.FunctionType { } func (f BoundFunctionValue) invoke(invocation Invocation) Value { - invocation.Self = f.Self + self := f.Self + invocation.Self = self + if self != nil { + if resource, ok := (*self).(ResourceKindedValue); ok && resource.IsDestroyed() { + panic(DestroyedResourceError{ + LocationRange: invocation.LocationRange, + }) + } + } invocation.Base = f.Base invocation.BoundAuthorization = f.BoundAuthorization return f.Function.invoke(invocation) diff --git a/runtime/interpreter/value_test.go b/runtime/interpreter/value_test.go index 4e4ce8203c..b4b172f2d9 100644 --- a/runtime/interpreter/value_test.go +++ b/runtime/interpreter/value_test.go @@ -962,8 +962,12 @@ func TestStringer(t *testing.T) { expected: "64", }, "Word128": { - value: NewUnmeteredWord128ValueFromUint64(64), - expected: "64", + value: NewUnmeteredWord128ValueFromUint64(128), + expected: "128", + }, + "Word256": { + value: NewUnmeteredWord256ValueFromUint64(256), + expected: "256", }, "UFix64": { value: NewUnmeteredUFix64ValueWithInteger(64, EmptyLocationRange), @@ -1480,6 +1484,18 @@ func TestGetHashInput(t *testing.T) { value: NewUnmeteredWord128ValueFromBigInt(sema.Word128TypeMaxIntBig), expected: append([]byte{byte(HashInputTypeWord128)}, sema.Word128TypeMaxIntBig.Bytes()...), }, + "Word256": { + value: NewUnmeteredWord256ValueFromUint64(256), + expected: []byte{byte(HashInputTypeWord256), 1, 0}, + }, + "Word256 min": { + value: NewUnmeteredWord256ValueFromUint64(0), + expected: append([]byte{byte(HashInputTypeWord256)}, 0), + }, + "Word256 max": { + value: NewUnmeteredWord256ValueFromBigInt(sema.Word256TypeMaxIntBig), + expected: append([]byte{byte(HashInputTypeWord256)}, sema.Word256TypeMaxIntBig.Bytes()...), + }, "UFix64": { value: NewUnmeteredUFix64ValueWithInteger(64, EmptyLocationRange), expected: []byte{byte(HashInputTypeUFix64), 0x0, 0x0, 0x0, 0x1, 0x7d, 0x78, 0x40, 0x0}, @@ -3409,6 +3425,7 @@ func TestNumberValue_Equal(t *testing.T) { "Word32": NewUnmeteredWord32Value(32), "Word64": NewUnmeteredWord64Value(64), "Word128": NewUnmeteredWord128ValueFromUint64(128), + "Word256": NewUnmeteredWord256ValueFromUint64(256), "UFix64": NewUnmeteredUFix64ValueWithInteger(64, EmptyLocationRange), "Fix64": NewUnmeteredFix64ValueWithInteger(-32, EmptyLocationRange), } @@ -3788,6 +3805,7 @@ func TestNumberValueIntegerConversion(t *testing.T) { sema.Word32Type: NewUnmeteredWord32Value(42), sema.Word64Type: NewUnmeteredWord64Value(42), sema.Word128Type: NewUnmeteredWord128ValueFromUint64(42), + sema.Word256Type: NewUnmeteredWord256ValueFromUint64(42), sema.Int8Type: NewUnmeteredInt8Value(42), sema.Int16Type: NewUnmeteredInt16Value(42), sema.Int32Type: NewUnmeteredInt32Value(42), @@ -4092,6 +4110,7 @@ func TestValue_ConformsToStaticType(t *testing.T) { sema.Word32Type: NewUnmeteredWord32Value(42), sema.Word64Type: NewUnmeteredWord64Value(42), sema.Word128Type: NewUnmeteredWord128ValueFromUint64(42), + sema.Word256Type: NewUnmeteredWord256ValueFromUint64(42), sema.Int8Type: NewUnmeteredInt8Value(42), sema.Int16Type: NewUnmeteredInt16Value(42), sema.Int32Type: NewUnmeteredInt32Value(42), diff --git a/runtime/interpreter/visitor.go b/runtime/interpreter/visitor.go index 6f11f15541..587a899865 100644 --- a/runtime/interpreter/visitor.go +++ b/runtime/interpreter/visitor.go @@ -45,6 +45,7 @@ type Visitor interface { VisitWord32Value(interpreter *Interpreter, value Word32Value) VisitWord64Value(interpreter *Interpreter, value Word64Value) VisitWord128Value(interpreter *Interpreter, value Word128Value) + VisitWord256Value(interpreter *Interpreter, value Word256Value) VisitFix64Value(interpreter *Interpreter, value Fix64Value) VisitUFix64Value(interpreter *Interpreter, value UFix64Value) VisitCompositeValue(interpreter *Interpreter, value *CompositeValue) bool @@ -95,6 +96,7 @@ type EmptyVisitor struct { Word32ValueVisitor func(interpreter *Interpreter, value Word32Value) Word64ValueVisitor func(interpreter *Interpreter, value Word64Value) Word128ValueVisitor func(interpreter *Interpreter, value Word128Value) + Word256ValueVisitor func(interpreter *Interpreter, value Word256Value) Fix64ValueVisitor func(interpreter *Interpreter, value Fix64Value) UFix64ValueVisitor func(interpreter *Interpreter, value UFix64Value) CompositeValueVisitor func(interpreter *Interpreter, value *CompositeValue) bool @@ -302,6 +304,13 @@ func (v EmptyVisitor) VisitWord128Value(interpreter *Interpreter, value Word128V v.Word128ValueVisitor(interpreter, value) } +func (v EmptyVisitor) VisitWord256Value(interpreter *Interpreter, value Word256Value) { + if v.Word256ValueVisitor == nil { + return + } + v.Word256ValueVisitor(interpreter, value) +} + func (v EmptyVisitor) VisitFix64Value(interpreter *Interpreter, value Fix64Value) { if v.Fix64ValueVisitor == nil { return diff --git a/runtime/literal.go b/runtime/literal.go index 59a271e33d..1a3278e6ba 100644 --- a/runtime/literal.go +++ b/runtime/literal.go @@ -269,6 +269,8 @@ func convertIntValue( return interpreter.ConvertWord64(memoryGauge, intValue, interpreter.EmptyLocationRange), nil case sema.Word128Type: return interpreter.ConvertWord128(memoryGauge, intValue, interpreter.EmptyLocationRange), nil + case sema.Word256Type: + return interpreter.ConvertWord256(memoryGauge, intValue, interpreter.EmptyLocationRange), nil default: return nil, UnsupportedLiteralError diff --git a/runtime/resource_duplicate_test.go b/runtime/resource_duplicate_test.go index 7a1eb38ba4..743cf198bf 100644 --- a/runtime/resource_duplicate_test.go +++ b/runtime/resource_duplicate_test.go @@ -23,15 +23,445 @@ import ( "fmt" "testing" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/onflow/cadence" "github.com/onflow/cadence/encoding/json" "github.com/onflow/cadence/runtime/common" "github.com/onflow/cadence/runtime/interpreter" + "github.com/onflow/cadence/runtime/sema" + "github.com/onflow/cadence/runtime/tests/checker" . "github.com/onflow/cadence/runtime/tests/utils" ) +func TestRuntimeResourceDuplicationUsingDestructorIteration(t *testing.T) { + t.Parallel() + + t.Run("Reported error", func(t *testing.T) { + + t.Parallel() + + script := ` + // This Vault class is from Flow docs, used as our "victim" in this example + access(all) resource Vault { + // Balance of a user's Vault + // we use unsigned fixed point numbers for balances + // because they can represent decimals and do not allow negative values + access(all) var balance: UFix64 + + init(balance: UFix64) { + self.balance = balance + } + + access(all) fun withdraw(amount: UFix64): @Vault { + self.balance = self.balance - amount + return <-create Vault(balance: amount) + } + + access(all) fun deposit(from: @Vault) { + self.balance = self.balance + from.balance + destroy from + } + } + + // --- this code actually makes use of the vuln --- + access(all) resource DummyResource { + access(all) var dictRef: &{Bool: AnyResource}; + access(all) var arrRef: &[Vault]; + access(all) var victim: @Vault; + init(dictRef: &{Bool: AnyResource}, arrRef: &[Vault], victim: @Vault) { + self.dictRef = dictRef; + self.arrRef = arrRef; + self.victim <- victim; + } + + destroy() { + self.arrRef.append(<- self.victim) + self.dictRef[false] <-> self.dictRef[true]; // This screws up the destruction order + } + } + + access(all) fun duplicateResource(victim1: @Vault, victim2: @Vault): @[Vault]{ + let arr : @[Vault] <- []; + let dict: @{Bool: DummyResource} <- { } + let ref = &dict as &{Bool: AnyResource}; + let arrRef = &arr as &[Vault]; + + var v1: @DummyResource? <- create DummyResource(dictRef: ref, arrRef: arrRef, victim: <- victim1); + dict[false] <-> v1; + destroy v1; + + var v2: @DummyResource? <- create DummyResource(dictRef: ref, arrRef: arrRef, victim: <- victim2); + dict[true] <-> v2; + destroy v2; + + destroy dict // Trigger the destruction chain where dict[false] will be destructed twice + return <- arr; + } + + // --- end of vuln code --- + + access(all) fun main() { + + var v1 <- create Vault(balance: 1000.0); // This will be duplicated + var v2 <- create Vault(balance: 1.0); // This will be lost + var v3 <- create Vault(balance: 0.0); // We'll collect the spoils here + + // The call will return an array of [v1, v1] + var res <- duplicateResource(victim1: <- v1, victim2: <-v2) + + v3.deposit(from: <- res.removeLast()); + v3.deposit(from: <- res.removeLast()); + destroy res; + + log(v3.balance); + destroy v3; + }` + + runtime := newTestInterpreterRuntime() + + accountCodes := map[common.Location][]byte{} + + var events []cadence.Event + + signerAccount := common.MustBytesToAddress([]byte{0x1}) + + storage := newTestLedger(nil, nil) + + runtimeInterface := &testRuntimeInterface{ + getCode: func(location Location) (bytes []byte, err error) { + return accountCodes[location], nil + }, + storage: storage, + getSigningAccounts: func() ([]Address, error) { + return []Address{signerAccount}, nil + }, + resolveLocation: singleIdentifierLocationResolver(t), + getAccountContractCode: func(location common.AddressLocation) (code []byte, err error) { + return accountCodes[location], nil + }, + updateAccountContractCode: func(location common.AddressLocation, code []byte) error { + accountCodes[location] = code + return nil + }, + emitEvent: func(event cadence.Event) error { + events = append(events, event) + return nil + }, + log: func(s string) { + assert.Fail(t, "we should not reach this point") + }, + } + runtimeInterface.decodeArgument = func(b []byte, t cadence.Type) (value cadence.Value, err error) { + return json.Decode(nil, b) + } + + _, err := runtime.ExecuteScript( + Script{ + Source: []byte(script), + Arguments: [][]byte{}, + }, + Context{ + Interface: runtimeInterface, + Location: common.ScriptLocation{}, + }, + ) + + require.ErrorAs(t, err, &interpreter.ContainerMutatedDuringIterationError{}) + }) + + t.Run("simplified", func(t *testing.T) { + + t.Parallel() + + script := ` + access(all) resource Vault { + access(all) var balance: UFix64 + access(all) var dictRef: &{Bool: Vault}; + + init(balance: UFix64, _ dictRef: &{Bool: Vault}) { + self.balance = balance + self.dictRef = dictRef; + } + + access(all) fun withdraw(amount: UFix64): @Vault { + self.balance = self.balance - amount + return <-create Vault(balance: amount, self.dictRef) + } + + access(all) fun deposit(from: @Vault) { + self.balance = self.balance + from.balance + destroy from + } + + destroy() { + self.dictRef[false] <-> self.dictRef[true]; // This screws up the destruction order + } + } + + access(all) fun main(): UFix64 { + + let dict: @{Bool: Vault} <- { } + let dictRef = &dict as &{Bool: Vault}; + + var v1 <- create Vault(balance: 1000.0, dictRef); // This will be duplicated + var v2 <- create Vault(balance: 1.0, dictRef); // This will be lost + + var v1Ref = &v1 as &Vault + + destroy dict.insert(key: false, <- v1) + destroy dict.insert(key: true, <- v2) + + destroy dict; + + // v1 is not destroyed! + return v1Ref.balance + }` + + runtime := newTestInterpreterRuntime() + + accountCodes := map[common.Location][]byte{} + + var events []cadence.Event + + signerAccount := common.MustBytesToAddress([]byte{0x1}) + + storage := newTestLedger(nil, nil) + + runtimeInterface := &testRuntimeInterface{ + getCode: func(location Location) (bytes []byte, err error) { + return accountCodes[location], nil + }, + storage: storage, + getSigningAccounts: func() ([]Address, error) { + return []Address{signerAccount}, nil + }, + resolveLocation: singleIdentifierLocationResolver(t), + getAccountContractCode: func(location common.AddressLocation) (code []byte, err error) { + return accountCodes[location], nil + }, + updateAccountContractCode: func(location common.AddressLocation, code []byte) error { + accountCodes[location] = code + return nil + }, + emitEvent: func(event cadence.Event) error { + events = append(events, event) + return nil + }, + } + runtimeInterface.decodeArgument = func(b []byte, t cadence.Type) (value cadence.Value, err error) { + return json.Decode(nil, b) + } + + _, err := runtime.ExecuteScript( + Script{ + Source: []byte(script), + Arguments: [][]byte{}, + }, + Context{ + Interface: runtimeInterface, + Location: common.ScriptLocation{}, + }, + ) + + var checkerErr *sema.CheckerError + require.ErrorAs(t, err, &checkerErr) + + errs := checker.RequireCheckerErrors(t, checkerErr, 1) + + assert.IsType(t, &sema.InvalidatedResourceReferenceError{}, errs[0]) + + }) + + t.Run("forEachKey", func(t *testing.T) { + + t.Parallel() + + script := ` + access(all) resource R{} + + access(all) fun main() { + var dict: @{Int: R} <- {} + + var r1: @R? <- create R() + var r2: @R? <- create R() + var r3: @R? <- create R() + + dict[0] <-> r1 + dict[1] <-> r2 + dict[2] <-> r3 + + destroy r1 + destroy r2 + destroy r3 + + let acc = getAuthAccount(0x1) + acc.save(<-dict, to: /storage/foo) + + let ref = acc.borrow<&{Int: R}>(from: /storage/foo)! + + ref.forEachKey(fun(i: Int): Bool { + var r4: @R? <- create R() + ref[i+1] <-> r4 + destroy r4 + return true + }) + }` + + runtime := newTestInterpreterRuntime() + + accountCodes := map[common.Location][]byte{} + + var events []cadence.Event + + signerAccount := common.MustBytesToAddress([]byte{0x1}) + + storage := newTestLedger(nil, nil) + + runtimeInterface := &testRuntimeInterface{ + getCode: func(location Location) (bytes []byte, err error) { + return accountCodes[location], nil + }, + storage: storage, + getSigningAccounts: func() ([]Address, error) { + return []Address{signerAccount}, nil + }, + resolveLocation: singleIdentifierLocationResolver(t), + getAccountContractCode: func(location common.AddressLocation) (code []byte, err error) { + return accountCodes[location], nil + }, + updateAccountContractCode: func(location common.AddressLocation, code []byte) error { + accountCodes[location] = code + return nil + }, + emitEvent: func(event cadence.Event) error { + events = append(events, event) + return nil + }, + } + runtimeInterface.decodeArgument = func(b []byte, t cadence.Type) (value cadence.Value, err error) { + return json.Decode(nil, b) + } + + _, err := runtime.ExecuteScript( + Script{ + Source: []byte(script), + Arguments: [][]byte{}, + }, + Context{ + Interface: runtimeInterface, + Location: common.ScriptLocation{}, + }, + ) + + require.ErrorAs(t, err, &interpreter.ContainerMutatedDuringIterationError{}) + }) + + t.Run("array", func(t *testing.T) { + + t.Parallel() + + script := ` + access(all) resource Vault { + access(all) var balance: UFix64 + access(all) var arrRef: &[Vault] + + init(balance: UFix64, _ arrRef: &[Vault]) { + self.balance = balance + self.arrRef = arrRef; + } + + access(all) fun withdraw(amount: UFix64): @Vault { + self.balance = self.balance - amount + return <-create Vault(balance: amount, self.arrRef) + } + + access(all) fun deposit(from: @Vault) { + self.balance = self.balance + from.balance + destroy from + } + + destroy() { + self.arrRef.append(<-create Vault(balance: 0.0, self.arrRef)) + } + } + + access(all) fun main(): UFix64 { + + let arr: @[Vault] <- [] + let arrRef = &arr as &[Vault]; + + var v1 <- create Vault(balance: 1000.0, arrRef); // This will be duplicated + var v2 <- create Vault(balance: 1.0, arrRef); // This will be lost + + var v1Ref = &v1 as &Vault + + arr.append(<- v1) + arr.append(<- v2) + + destroy arr + + // v1 is not destroyed! + return v1Ref.balance + }` + + runtime := newTestInterpreterRuntime() + + accountCodes := map[common.Location][]byte{} + + var events []cadence.Event + + signerAccount := common.MustBytesToAddress([]byte{0x1}) + + storage := newTestLedger(nil, nil) + + runtimeInterface := &testRuntimeInterface{ + getCode: func(location Location) (bytes []byte, err error) { + return accountCodes[location], nil + }, + storage: storage, + getSigningAccounts: func() ([]Address, error) { + return []Address{signerAccount}, nil + }, + resolveLocation: singleIdentifierLocationResolver(t), + getAccountContractCode: func(location common.AddressLocation) (code []byte, err error) { + return accountCodes[location], nil + }, + updateAccountContractCode: func(location common.AddressLocation, code []byte) error { + accountCodes[location] = code + return nil + }, + emitEvent: func(event cadence.Event) error { + events = append(events, event) + return nil + }, + } + runtimeInterface.decodeArgument = func(b []byte, t cadence.Type) (value cadence.Value, err error) { + return json.Decode(nil, b) + } + + _, err := runtime.ExecuteScript( + Script{ + Source: []byte(script), + Arguments: [][]byte{}, + }, + Context{ + Interface: runtimeInterface, + Location: common.ScriptLocation{}, + }, + ) + RequireError(t, err) + + var checkerErr *sema.CheckerError + require.ErrorAs(t, err, &checkerErr) + + errs := checker.RequireCheckerErrors(t, checkerErr, 1) + + assert.IsType(t, &sema.InvalidatedResourceReferenceError{}, errs[0]) + }) +} + func TestRuntimeResourceDuplicationWithContractTransfer(t *testing.T) { t.Parallel() diff --git a/runtime/runtime_test.go b/runtime/runtime_test.go index ee6431a1c6..9176ab2fcf 100644 --- a/runtime/runtime_test.go +++ b/runtime/runtime_test.go @@ -149,7 +149,13 @@ func (r testInterpreterRuntime) ExecuteTransaction(script Script, context Contex func (r testInterpreterRuntime) ExecuteScript(script Script, context Context) (cadence.Value, error) { i := context.Interface.(*testRuntimeInterface) i.onScriptExecutionStart() - return r.interpreterRuntime.ExecuteScript(script, context) + value, err := r.interpreterRuntime.ExecuteScript(script, context) + // If there was a return value, let's also ensure it can be encoded + // TODO: also test CCF + if value != nil && err == nil { + _ = jsoncdc.MustEncode(value) + } + return value, err } type testRuntimeInterface struct { @@ -2764,9 +2770,11 @@ func TestRuntimeScriptReturnSpecial(t *testing.T) { } `, expected: cadence.Function{ - FunctionType: &cadence.FunctionType{ - ReturnType: cadence.IntType{}, - }, + FunctionType: cadence.TypeWithCachedTypeID( + &cadence.FunctionType{ + ReturnType: cadence.IntType{}, + }, + ).(*cadence.FunctionType), }, }, ) @@ -2784,17 +2792,19 @@ func TestRuntimeScriptReturnSpecial(t *testing.T) { } `, expected: cadence.Function{ - FunctionType: &cadence.FunctionType{ - Purity: sema.FunctionPurityView, - Parameters: []cadence.Parameter{ - { - Label: sema.ArgumentLabelNotRequired, - Identifier: "message", - Type: cadence.StringType{}, + FunctionType: cadence.TypeWithCachedTypeID( + &cadence.FunctionType{ + Purity: sema.FunctionPurityView, + Parameters: []cadence.Parameter{ + { + Label: sema.ArgumentLabelNotRequired, + Identifier: "message", + Type: cadence.StringType{}, + }, }, + ReturnType: cadence.NeverType{}, }, - ReturnType: cadence.NeverType{}, - }, + ).(*cadence.FunctionType), }, }, ) @@ -2817,9 +2827,11 @@ func TestRuntimeScriptReturnSpecial(t *testing.T) { } `, expected: cadence.Function{ - FunctionType: &cadence.FunctionType{ - ReturnType: cadence.VoidType{}, - }, + FunctionType: cadence.TypeWithCachedTypeID( + &cadence.FunctionType{ + ReturnType: cadence.VoidType{}, + }, + ).(*cadence.FunctionType), }, }, ) @@ -7929,7 +7941,7 @@ func TestRuntimeTypeMismatchErrorMessage(t *testing.T) { Location: nextScriptLocation(), }, ) - require.Error(t, err) + RequireError(t, err) require.ErrorContains(t, err, "expected type `A.0000000000000002.Foo.Bar`, got `A.0000000000000001.Foo.Bar`") @@ -7972,7 +7984,7 @@ func TestRuntimeErrorExcerpts(t *testing.T) { Location: common.ScriptLocation{}, }, ) - require.Error(t, err) + RequireError(t, err) errorString := `Execution failed: error: unexpectedly found nil while forcing an Optional value @@ -8024,7 +8036,7 @@ func TestRuntimeErrorExcerptsMultiline(t *testing.T) { Location: common.ScriptLocation{}, }, ) - require.Error(t, err) + RequireError(t, err) errorString := `Execution failed: error: unexpectedly found nil while forcing an Optional value @@ -8088,7 +8100,9 @@ func TestRuntimeAccountTypeEquality(t *testing.T) { require.Equal(t, cadence.Bool(true), result) } -func TestUserPanicToError(t *testing.T) { +func TestRuntimeUserPanicToError(t *testing.T) { + t.Parallel() + err := fmt.Errorf( "wrapped: %w", runtimeErrors.NewDefaultUserError("user error"), @@ -8096,3 +8110,548 @@ func TestUserPanicToError(t *testing.T) { retErr := userPanicToError(func() { panic(err) }) require.Equal(t, retErr, err) } + +func TestRuntimeDestructorReentrancyPrevention(t *testing.T) { + + t.Parallel() + + rt := newTestInterpreterRuntime() + + script := []byte(` + access(all) resource Vault { + // Balance of a user's Vault + // we use unsigned fixed point numbers for balances + // because they can represent decimals and do not allow negative values + access(all) var balance: UFix64 + + init(balance: UFix64) { + self.balance = balance + } + + access(all) fun withdraw(amount: UFix64): @Vault { + self.balance = self.balance - amount + return <-create Vault(balance: amount) + } + + access(all) fun deposit(from: @Vault) { + self.balance = self.balance + from.balance + destroy from + } + } + + // --- this code actually makes use of the vuln --- + access(all) resource InnerResource { + access(all) var victim: @Vault; + access(all) var here: Bool; + access(all) var parent: &OuterResource; + init(victim: @Vault, parent: &OuterResource) { + self.victim <- victim; + self.here = false; + self.parent = parent; + } + + destroy() { + if self.here == false { + self.here = true; + self.parent.reenter(); // will cause us to re-enter this destructor + } + self.parent.collect(from: <- self.victim); + } + } + + access(all) resource OuterResource { + access(all) var inner: @InnerResource?; + access(all) var collector: &Vault; + init(victim: @Vault, collector: &Vault) { + self.collector = collector; + self.inner <- create InnerResource(victim: <- victim, parent: &self as &OuterResource); + } + access(all) fun reenter() { + let inner <- self.inner <- nil; + destroy inner; + } + access(all) fun collect(from: @Vault) { + self.collector.deposit(from: <- from); + } + + destroy() { + destroy self.inner; + } + } + + access(all) fun doubleBalanceOfVault(vault: @Vault): @Vault { + var collector <- vault.withdraw(amount: 0.0); + var r <- create OuterResource(victim: <- vault, collector: &collector as &Vault); + destroy r; + return <- collector; + } + + // --- end of vuln code --- + + access(all) fun main(): UFix64 { + var v1 <- create Vault(balance: 1000.0); + var v2 <- doubleBalanceOfVault(vault: <- v1); + var v3 <- doubleBalanceOfVault(vault: <- v2); + let balance = v3.balance + destroy v3 + return balance + } + `) + + runtimeInterface := &testRuntimeInterface{ + storage: newTestLedger(nil, nil), + } + + _, err := rt.ExecuteScript( + Script{ + Source: script, + }, + Context{ + Interface: runtimeInterface, + Location: common.ScriptLocation{}, + }, + ) + RequireError(t, err) + + require.ErrorAs(t, err, &interpreter.InvalidatedResourceReferenceError{}) +} + +func TestRuntimeFlowEventTypes(t *testing.T) { + + t.Parallel() + + rt := newTestInterpreterRuntime() + + script := []byte(` + access(all) fun main(): Type? { + return CompositeType("flow.AccountContractAdded") + } + `) + + runtimeInterface := &testRuntimeInterface{ + storage: newTestLedger(nil, nil), + } + + result, err := rt.ExecuteScript( + Script{ + Source: script, + }, + Context{ + Interface: runtimeInterface, + Location: common.ScriptLocation{}, + }, + ) + require.NoError(t, err) + + accountContractAddedType := ExportType( + stdlib.AccountContractAddedEventType, + map[sema.TypeID]cadence.Type{}, + ) + + require.Equal(t, + cadence.Optional{ + Value: cadence.TypeValue{ + StaticType: accountContractAddedType, + }, + }, + result, + ) +} + +func TestInvalidatedResourceUse(t *testing.T) { + + t.Parallel() + + runtime := newTestInterpreterRuntime() + + signerAccount := common.MustBytesToAddress([]byte{0x1}) + + signers := []Address{signerAccount} + + accountCodes := map[Location][]byte{} + var events []cadence.Event + + runtimeInterface := &testRuntimeInterface{ + getCode: func(location Location) (bytes []byte, err error) { + return accountCodes[location], nil + }, + storage: newTestLedger(nil, nil), + getSigningAccounts: func() ([]Address, error) { + return signers, nil + }, + resolveLocation: singleIdentifierLocationResolver(t), + getAccountContractCode: func(location common.AddressLocation) (code []byte, err error) { + return accountCodes[location], nil + }, + updateAccountContractCode: func(location common.AddressLocation, code []byte) (err error) { + accountCodes[location] = code + return nil + }, + emitEvent: func(event cadence.Event) error { + events = append(events, event) + return nil + }, + } + + nextTransactionLocation := newTransactionLocationGenerator() + + attacker := []byte(fmt.Sprintf(` + import VictimContract from %s + + access(all) contract AttackerContract { + + access(all) resource AttackerResource { + access(all) var vault: @VictimContract.Vault + access(all) var firstCopy: @VictimContract.Vault + + init(vault: @VictimContract.Vault) { + self.vault <- vault + self.firstCopy <- self.vault.withdraw(amount: 0.0) + } + + access(all) fun shenanigans(): UFix64{ + let fullBalance = self.vault.balance + + var withdrawn <- self.vault.withdraw(amount: 0.0) + + // "Rug pull" the vault from under the in-flight + // withdrawal and deposit it into our "first copy" wallet + self.vault <-> withdrawn + self.firstCopy.deposit(from: <- withdrawn) + + // Return the pre-deposit balance for caller to withdraw + return fullBalance + } + + access(all) fun fetchfirstCopy(): @VictimContract.Vault { + var withdrawn <- self.firstCopy.withdraw(amount: 0.0) + self.firstCopy <-> withdrawn + return <- withdrawn + } + + destroy() { + destroy self.vault + destroy self.firstCopy + } + } + + access(all) fun doubleBalanceOfVault(_ victim: @VictimContract.Vault): @VictimContract.Vault { + var r <- create AttackerResource(vault: <- victim) + + // The magic happens during the execution of the following line of code + // var withdrawAmmount = r.shenanigans() + var secondCopy <- r.vault.withdraw(amount: r.shenanigans()) + + // Deposit the second copy of the funds as retained by the AttackerResource instance + secondCopy.deposit(from: <- r.fetchfirstCopy()) + + destroy r + return <- secondCopy + } + + access(all) fun attack() { + var v1 <- VictimContract.faucet() + var v2<- AttackerContract.doubleBalanceOfVault(<- v1) + destroy v2 + } + }`, + signerAccount.HexWithPrefix(), + )) + + victim := []byte(` + access(all) contract VictimContract { + access(all) resource Vault { + + // Balance of a user's Vault + // we use unsigned fixed point numbers for balances + // because they can represent decimals and do not allow negative values + access(all) var balance: UFix64 + + init(balance: UFix64) { + self.balance = balance + } + + access(all) fun withdraw(amount: UFix64): @Vault { + self.balance = self.balance - amount + return <-create Vault(balance: amount) + } + + access(all) fun deposit(from: @Vault) { + self.balance = self.balance + from.balance + destroy from + } + } + + access(all) fun faucet(): @VictimContract.Vault { + return <- create VictimContract.Vault(balance: 5.0) + } + } + `) + + // Deploy Victim + + deployVictim := DeploymentTransaction("VictimContract", victim) + err := runtime.ExecuteTransaction( + Script{ + Source: deployVictim, + }, + Context{ + Interface: runtimeInterface, + Location: nextTransactionLocation(), + }, + ) + require.NoError(t, err) + + // Deploy Attacker + + deployAttacker := DeploymentTransaction("AttackerContract", attacker) + + err = runtime.ExecuteTransaction( + Script{ + Source: deployAttacker, + }, + Context{ + Interface: runtimeInterface, + Location: nextTransactionLocation(), + }, + ) + require.NoError(t, err) + + // Attack + + attackTransaction := []byte(fmt.Sprintf(` + import VictimContract from %s + import AttackerContract from %s + + transaction { + execute { + AttackerContract.attack() + } + }`, + signerAccount.HexWithPrefix(), + signerAccount.HexWithPrefix(), + )) + + signers = nil + + err = runtime.ExecuteTransaction( + Script{ + Source: attackTransaction, + }, + Context{ + Interface: runtimeInterface, + Location: nextTransactionLocation(), + }, + ) + RequireError(t, err) + + var destroyedResourceErr interpreter.DestroyedResourceError + require.ErrorAs(t, err, &destroyedResourceErr) + +} + +func TestInvalidatedResourceUse2(t *testing.T) { + + t.Parallel() + + runtime := newTestInterpreterRuntime() + + signerAccount := common.MustBytesToAddress([]byte{0x1}) + + signers := []Address{signerAccount} + + accountCodes := map[Location][]byte{} + var events []cadence.Event + + runtimeInterface := &testRuntimeInterface{ + getCode: func(location Location) (bytes []byte, err error) { + return accountCodes[location], nil + }, + storage: newTestLedger(nil, nil), + getSigningAccounts: func() ([]Address, error) { + return signers, nil + }, + resolveLocation: singleIdentifierLocationResolver(t), + getAccountContractCode: func(location common.AddressLocation) (code []byte, err error) { + return accountCodes[location], nil + }, + updateAccountContractCode: func(location common.AddressLocation, code []byte) (err error) { + accountCodes[location] = code + return nil + }, + emitEvent: func(event cadence.Event) error { + events = append(events, event) + return nil + }, + } + + nextTransactionLocation := newTransactionLocationGenerator() + + attacker := []byte(fmt.Sprintf(` + import VictimContract from %s + + access(all) contract AttackerContract { + + access(all) resource InnerResource { + access(all) var name: String + access(all) var parent: &OuterResource? + access(all) var vault: @VictimContract.Vault? + + init(_ name: String) { + self.name = name + self.parent = nil + self.vault <- nil + } + + access(all) fun setParent(_ parent: &OuterResource) { + self.parent = parent + } + + access(all) fun setVault(_ vault: @VictimContract.Vault) { + self.vault <-! vault + } + + destroy() { + self.parent!.shenanigans() + var vault: @VictimContract.Vault <- self.vault! + self.parent!.collect(<- vault) + } + } + + access(all) resource OuterResource { + access(all) var inner1: @InnerResource + access(all) var inner2: @InnerResource + access(all) var collector: &VictimContract.Vault + + init(_ victim: @VictimContract.Vault, _ collector: &VictimContract.Vault) { + self.collector = collector + var i1 <- create InnerResource("inner1") + var i2 <- create InnerResource("inner2") + self.inner1 <- i1 + self.inner2 <- i2 + self.inner1.setVault(<- victim) + self.inner1.setParent(&self as &OuterResource) + self.inner2.setParent(&self as &OuterResource) + } + + access(all) fun shenanigans() { + self.inner1 <-> self.inner2 + } + + access(all) fun collect(_ from: @VictimContract.Vault) { + self.collector.deposit(from: <- from) + } + + destroy() { + destroy self.inner1 + // inner1 and inner2 got swapped during the above line + destroy self.inner2 + } + } + + access(all) fun doubleBalanceOfVault(_ vault: @VictimContract.Vault): @VictimContract.Vault { + var collector <- vault.withdraw(amount: 0.0) + var outer <- create OuterResource(<- vault, &collector as &VictimContract.Vault) + destroy outer + return <- collector + } + + access(all) fun attack() { + var v1 <- VictimContract.faucet() + var v2 <- AttackerContract.doubleBalanceOfVault(<- v1) + destroy v2 + } + }`, + signerAccount.HexWithPrefix(), + )) + + victim := []byte(` + access(all) contract VictimContract { + access(all) resource Vault { + + // Balance of a user's Vault + // we use unsigned fixed point numbers for balances + // because they can represent decimals and do not allow negative values + access(all) var balance: UFix64 + + init(balance: UFix64) { + self.balance = balance + } + + access(all) fun withdraw(amount: UFix64): @Vault { + self.balance = self.balance - amount + return <-create Vault(balance: amount) + } + + access(all) fun deposit(from: @Vault) { + self.balance = self.balance + from.balance + destroy from + } + } + + access(all) fun faucet(): @VictimContract.Vault { + return <- create VictimContract.Vault(balance: 5.0) + } + } + `) + + // Deploy Victim + + deployVictim := DeploymentTransaction("VictimContract", victim) + err := runtime.ExecuteTransaction( + Script{ + Source: deployVictim, + }, + Context{ + Interface: runtimeInterface, + Location: nextTransactionLocation(), + }, + ) + require.NoError(t, err) + + // Deploy Attacker + + deployAttacker := DeploymentTransaction("AttackerContract", attacker) + + err = runtime.ExecuteTransaction( + Script{ + Source: deployAttacker, + }, + Context{ + Interface: runtimeInterface, + Location: nextTransactionLocation(), + }, + ) + require.NoError(t, err) + + // Attack + + attackTransaction := []byte(fmt.Sprintf(` + import VictimContract from %s + import AttackerContract from %s + + transaction { + execute { + AttackerContract.attack() + } + }`, + signerAccount.HexWithPrefix(), + signerAccount.HexWithPrefix(), + )) + + signers = nil + + err = runtime.ExecuteTransaction( + Script{ + Source: attackTransaction, + }, + Context{ + Interface: runtimeInterface, + Location: nextTransactionLocation(), + }, + ) + + RequireError(t, err) + + require.ErrorAs(t, err, &interpreter.InvalidatedResourceReferenceError{}) +} diff --git a/runtime/sema/authaccount.cdc b/runtime/sema/authaccount.cdc index b9ba65ae29..e987a4094b 100644 --- a/runtime/sema/authaccount.cdc +++ b/runtime/sema/authaccount.cdc @@ -97,6 +97,14 @@ access(all) struct AuthAccount { /// The path must be a storage path, i.e., only the domain `storage` is allowed access(all) fun borrow(from: StoragePath): T? + /// Returns true if the object in account storage under the given path satisfies the given type, + /// i.e. could be borrowed using the given type. + /// + /// The given type must not necessarily be exactly the same as the type of the borrowed object. + /// + /// The path must be a storage path, i.e., only the domain `storage` is allowed. + access(all) fun check(from: StoragePath): Bool + /// **DEPRECATED**: Instead, use `capabilities.storage.issue`, and `capabilities.publish` if the path is public. /// /// Creates a capability at the given public or private path, diff --git a/runtime/sema/authaccount.gen.go b/runtime/sema/authaccount.gen.go index e326878f45..2106e5b452 100644 --- a/runtime/sema/authaccount.gen.go +++ b/runtime/sema/authaccount.gen.go @@ -319,6 +319,37 @@ The given type must not necessarily be exactly the same as the type of the borro The path must be a storage path, i.e., only the domain ` + "`storage`" + ` is allowed ` +const AuthAccountTypeCheckFunctionName = "check" + +var AuthAccountTypeCheckFunctionTypeParameterT = &TypeParameter{ + Name: "T", + TypeBound: AnyType, +} + +var AuthAccountTypeCheckFunctionType = &FunctionType{ + TypeParameters: []*TypeParameter{ + AuthAccountTypeCheckFunctionTypeParameterT, + }, + Parameters: []Parameter{ + { + Identifier: "from", + TypeAnnotation: NewTypeAnnotation(StoragePathType), + }, + }, + ReturnTypeAnnotation: NewTypeAnnotation( + BoolType, + ), +} + +const AuthAccountTypeCheckFunctionDocString = ` +Returns true if the object in account storage under the given path satisfies the given type, +i.e. could be borrowed using the given type. + +The given type must not necessarily be exactly the same as the type of the borrowed object. + +The path must be a storage path, i.e., only the domain ` + "`storage`" + ` is allowed. +` + const AuthAccountTypeLinkFunctionName = "link" var AuthAccountTypeLinkFunctionTypeParameterT = &TypeParameter{ @@ -1918,6 +1949,13 @@ func init() { AuthAccountTypeBorrowFunctionType, AuthAccountTypeBorrowFunctionDocString, ), + NewUnmeteredFunctionMember( + AuthAccountType, + ast.AccessAll, + AuthAccountTypeCheckFunctionName, + AuthAccountTypeCheckFunctionType, + AuthAccountTypeCheckFunctionDocString, + ), NewUnmeteredFunctionMember( AuthAccountType, ast.AccessAll, diff --git a/runtime/sema/entrypoint.go b/runtime/sema/entrypoint.go index 2c937078f1..f8d081e95f 100644 --- a/runtime/sema/entrypoint.go +++ b/runtime/sema/entrypoint.go @@ -70,5 +70,12 @@ func (checker *Checker) EntryPointParameters() []Parameter { return functionType.Parameters } + compositeDeclarations := checker.Program.CompositeDeclarations() + if len(compositeDeclarations) > 0 { + compositeDeclaration := compositeDeclarations[0] + compositeType := checker.Elaboration.CompositeDeclarationType(compositeDeclaration) + return compositeType.ConstructorParameters + } + return nil } diff --git a/runtime/sema/type.go b/runtime/sema/type.go index 9d60895b8a..5f4cd6bf16 100644 --- a/runtime/sema/type.go +++ b/runtime/sema/type.go @@ -1624,6 +1624,14 @@ var ( Word128TypeAnnotation = NewTypeAnnotation(Word128Type) + // Word256Type represents the 256-bit unsigned integer type `Word256` + // which does NOT check for overflow and underflow + Word256Type = NewNumericType(Word256TypeName). + WithTag(Word256TypeTag). + WithIntRange(Word256TypeMinIntBig, Word256TypeMaxIntBig) + + Word256TypeAnnotation = NewTypeAnnotation(Word256Type) + // FixedPointType represents the super-type of all fixed-point types FixedPointType = NewNumericType(FixedPointTypeName). WithTag(FixedPointTypeTag). @@ -1764,6 +1772,19 @@ var ( return word128TypeMax }() + // 1 << 256 + Word256TypeMaxIntPlusOneBig = func() *big.Int { + word256TypeMaxPlusOne := big.NewInt(1) + word256TypeMaxPlusOne.Lsh(word256TypeMaxPlusOne, 256) + return word256TypeMaxPlusOne + }() + Word256TypeMinIntBig = new(big.Int) + Word256TypeMaxIntBig = func() *big.Int { + word256TypeMax := new(big.Int) + word256TypeMax.Sub(Word256TypeMaxIntPlusOneBig, big.NewInt(1)) + return word256TypeMax + }() + Fix64FactorBig = new(big.Int).SetUint64(uint64(Fix64Factor)) Fix64TypeMinIntBig = fixedpoint.Fix64TypeMinIntBig @@ -3463,6 +3484,7 @@ var AllUnsignedIntegerTypes = []Type{ Word32Type, Word64Type, Word128Type, + Word256Type, } var AllIntegerTypes = append( @@ -5972,7 +5994,7 @@ func checkSubTypeWithoutEquality(subType Type, superType Type) bool { case IntegerType, SignedIntegerType, UIntType, UInt8Type, UInt16Type, UInt32Type, UInt64Type, UInt128Type, UInt256Type, - Word8Type, Word16Type, Word32Type, Word64Type, Word128Type: + Word8Type, Word16Type, Word32Type, Word64Type, Word128Type, Word256Type: return true diff --git a/runtime/sema/type_names.go b/runtime/sema/type_names.go index d156f0d43b..7ac1ac7ca2 100644 --- a/runtime/sema/type_names.go +++ b/runtime/sema/type_names.go @@ -47,6 +47,7 @@ const ( Word32TypeName = "Word32" Word64TypeName = "Word64" Word128TypeName = "Word128" + Word256TypeName = "Word256" Fix64TypeName = "Fix64" UFix64TypeName = "UFix64" diff --git a/runtime/sema/type_tags.go b/runtime/sema/type_tags.go index c7641a7a32..a0629d5210 100644 --- a/runtime/sema/type_tags.go +++ b/runtime/sema/type_tags.go @@ -165,6 +165,7 @@ const ( word32TypeMask word64TypeMask word128TypeMask + word256TypeMask _ // future: Fix8 _ // future: Fix16 @@ -206,7 +207,6 @@ const ( compositeTypeMask referenceTypeMask genericTypeMask - functionTypeMask // ~~ NOTE: End of limit for lower mask type. Any new type should go to upper mask. ~~ ) @@ -222,6 +222,7 @@ const ( accountCapabilityControllerTypeMask interfaceTypeMask + functionTypeMask invalidTypeMask ) @@ -251,7 +252,8 @@ var ( Or(Word16TypeTag). Or(Word32TypeTag). Or(Word64TypeTag). - Or(Word128TypeTag) + Or(Word128TypeTag). + Or(Word256TypeTag) IntegerTypeTag = newTypeTagFromLowerMask(integerTypeMask). Or(SignedIntegerTypeTag). @@ -297,6 +299,7 @@ var ( Word32TypeTag = newTypeTagFromLowerMask(word32TypeMask) Word64TypeTag = newTypeTagFromLowerMask(word64TypeMask) Word128TypeTag = newTypeTagFromLowerMask(word128TypeMask) + Word256TypeTag = newTypeTagFromLowerMask(word256TypeMask) Fix64TypeTag = newTypeTagFromLowerMask(fix64TypeMask) UFix64TypeTag = newTypeTagFromLowerMask(ufix64TypeMask) @@ -330,7 +333,7 @@ var ( CompositeTypeTag = newTypeTagFromLowerMask(compositeTypeMask) ReferenceTypeTag = newTypeTagFromLowerMask(referenceTypeMask) GenericTypeTag = newTypeTagFromLowerMask(genericTypeMask) - FunctionTypeTag = newTypeTagFromLowerMask(functionTypeMask) + FunctionTypeTag = newTypeTagFromUpperMask(functionTypeMask) InterfaceTypeTag = newTypeTagFromUpperMask(interfaceTypeMask) RestrictedTypeTag = newTypeTagFromUpperMask(restrictedTypeMask) @@ -567,6 +570,8 @@ func findSuperTypeFromLowerMask(joinedTypeTag TypeTag, types []Type) Type { return Word64Type case word128TypeMask: return Word128Type + case word256TypeMask: + return Word256Type case fix64TypeMask: return Fix64Type @@ -645,8 +650,7 @@ func findSuperTypeFromLowerMask(joinedTypeTag TypeTag, types []Type) Type { case dictionaryTypeMask: return commonSuperTypeOfDictionaries(types) case referenceTypeMask, - genericTypeMask, - functionTypeMask: + genericTypeMask: return getSuperTypeOfDerivedTypes(types) default: @@ -665,7 +669,8 @@ func findSuperTypeFromUpperMask(joinedTypeTag TypeTag, types []Type) Type { case capabilityTypeMask, restrictedTypeMask, transactionTypeMask, - interfaceTypeMask: + interfaceTypeMask, + functionTypeMask: return getSuperTypeOfDerivedTypes(types) case anyResourceAttachmentMask: diff --git a/runtime/stdlib/account.go b/runtime/stdlib/account.go index a6bca38b3b..3d9c398afd 100644 --- a/runtime/stdlib/account.go +++ b/runtime/stdlib/account.go @@ -510,7 +510,7 @@ func newAccountKeysAddFunction( handler.EmitEvent( inter, - AccountKeyAddedEventType, + AccountKeyAddedFromPublicKeyEventType, []interpreter.Value{ addressValue, publicKeyValue, @@ -782,7 +782,7 @@ func newAccountKeysRevokeFunction( handler.EmitEvent( inter, - AccountKeyRemovedEventType, + AccountKeyRemovedFromPublicKeyIndexEventType, []interpreter.Value{ addressValue, indexValue, @@ -1359,7 +1359,7 @@ func newAuthAccountContractsChangeFunction( constructorArguments := invocation.Arguments[requiredArgumentCount:] constructorArgumentTypes := invocation.ArgumentTypes[requiredArgumentCount:] - code, err := interpreter.ByteArrayValueToByteSlice(gauge, newCodeValue, locationRange) + code, err := interpreter.ByteArrayValueToByteSlice(invocation.Interpreter, newCodeValue, locationRange) if err != nil { panic(errors.NewDefaultUserError("add requires the second argument to be an array")) } @@ -2082,7 +2082,7 @@ func NewHashAlgorithmFromValue( func CodeToHashValue(inter *interpreter.Interpreter, code []byte) *interpreter.ArrayValue { codeHash := sha3.Sum256(code) - return interpreter.ByteSliceToByteArrayValue(inter, codeHash[:]) + return interpreter.ByteSliceToConstantSizedByteArrayValue(inter, codeHash[:]) } func newAuthAccountStorageCapabilitiesValue( diff --git a/runtime/stdlib/contracts/test.cdc b/runtime/stdlib/contracts/test.cdc index fcc98cf095..25392726b3 100644 --- a/runtime/stdlib/contracts/test.cdc +++ b/runtime/stdlib/contracts/test.cdc @@ -95,6 +95,38 @@ access(all) contract Test { access(all) fun useConfiguration(_ configuration: Configuration) { self.backend.useConfiguration(configuration) } + + /// Returns all the logs from the blockchain, up to the calling point. + /// + access(all) fun logs(): [String] { + return self.backend.logs() + } + + /// Returns the service account of the blockchain. Can be used to sign + /// transactions with this account. + /// + access(all) fun serviceAccount(): Account { + return self.backend.serviceAccount() + } + + /// Returns all events emitted from the blockchain. + /// + access(all) fun events(): [AnyStruct] { + return self.backend.events(nil) + } + + /// Returns all events emitted from the blockchain, + /// filtered by type. + /// + access(all) fun eventsOfType(_ type: Type): [AnyStruct] { + return self.backend.events(type) + } + + /// Resets the state of the blockchain. + /// + access(all) fun reset() { + self.backend.reset() + } } access(all) struct Matcher { @@ -258,6 +290,24 @@ access(all) contract Test { /// Overrides any existing configuration. /// access(all) fun useConfiguration(_ configuration: Configuration) + + /// Returns all the logs from the blockchain, up to the calling point. + /// + access(all) fun logs(): [String] + + /// Returns the service account of the blockchain. Can be used to sign + /// transactions with this account. + /// + access(all) fun serviceAccount(): Account + + /// Returns all events emitted from the blockchain, optionally filtered + /// by type. + /// + access(all) fun events(_ type: Type?): [AnyStruct] + + /// Resets the state of the blockchain. + /// + access(all) fun reset() } /// Returns a new matcher that negates the test of the given matcher. diff --git a/runtime/stdlib/flow.go b/runtime/stdlib/flow.go index e299268ab7..d4d5b9ede9 100644 --- a/runtime/stdlib/flow.go +++ b/runtime/stdlib/flow.go @@ -131,6 +131,8 @@ func decodeFlowLocationTypeID(typeID string) (FlowLocation, string, error) { // built-in event types +var FlowEventTypes = map[common.TypeID]*sema.CompositeType{} + func newFlowEventType(identifier string, parameters ...sema.Parameter) *sema.CompositeType { eventType := &sema.CompositeType{ @@ -164,6 +166,8 @@ func newFlowEventType(identifier string, parameters ...sema.Parameter) *sema.Com ) } + FlowEventTypes[eventType.ID()] = eventType + return eventType } @@ -186,9 +190,18 @@ var AccountEventCodeHashParameter = sema.Parameter{ TypeAnnotation: HashTypeAnnotation, } -var AccountEventPublicKeyParameter = sema.Parameter{ - Identifier: "publicKey", - TypeAnnotation: sema.ByteArrayTypeAnnotation, +var AccountEventPublicKeyParameterAsCompositeType = sema.Parameter{ + Identifier: "publicKey", + TypeAnnotation: sema.NewTypeAnnotation( + sema.PublicKeyType, + ), +} + +var AccountEventPublicKeyIndexParameter = sema.Parameter{ + Identifier: "publicKey", + TypeAnnotation: sema.NewTypeAnnotation( + sema.IntType, + ), } var AccountEventContractParameter = sema.Parameter{ @@ -201,16 +214,16 @@ var AccountCreatedEventType = newFlowEventType( AccountEventAddressParameter, ) -var AccountKeyAddedEventType = newFlowEventType( +var AccountKeyAddedFromPublicKeyEventType = newFlowEventType( "AccountKeyAdded", AccountEventAddressParameter, - AccountEventPublicKeyParameter, + AccountEventPublicKeyParameterAsCompositeType, ) -var AccountKeyRemovedEventType = newFlowEventType( +var AccountKeyRemovedFromPublicKeyIndexEventType = newFlowEventType( "AccountKeyRemoved", AccountEventAddressParameter, - AccountEventPublicKeyParameter, + AccountEventPublicKeyIndexParameter, ) var AccountContractAddedEventType = newFlowEventType( diff --git a/runtime/stdlib/flow_test.go b/runtime/stdlib/flow_test.go index da19c17a20..e57c8737dc 100644 --- a/runtime/stdlib/flow_test.go +++ b/runtime/stdlib/flow_test.go @@ -37,8 +37,8 @@ func TestFlowEventTypeIDs(t *testing.T) { for _, ty := range []sema.Type{ AccountCreatedEventType, - AccountKeyAddedEventType, - AccountKeyRemovedEventType, + AccountKeyAddedFromPublicKeyEventType, + AccountKeyRemovedFromPublicKeyIndexEventType, AccountContractAddedEventType, AccountContractUpdatedEventType, AccountContractRemovedEventType, diff --git a/runtime/stdlib/rlp/rlp.go b/runtime/stdlib/rlp/rlp.go index 238bf6c8ed..bbdfc16880 100644 --- a/runtime/stdlib/rlp/rlp.go +++ b/runtime/stdlib/rlp/rlp.go @@ -97,7 +97,7 @@ func ReadSize(inp []byte, startIndex int) (isString bool, dataStartIndex, dataSi var bytesToReadForLen uint // long string mode (55+ long strings) // firstByte minus the end range of short string, returns the number of bytes to read - // for calculating the the len of data. bytesToReadForlen is at least 1 and at most 8. + // for calculating the len of data. bytesToReadForlen is at least 1 and at most 8. if firstByte >= LongStringRangeStart && firstByte <= LongStringRangeEnd { bytesToReadForLen = uint(firstByte - ShortStringRangeEnd) isString = true @@ -105,7 +105,7 @@ func ReadSize(inp []byte, startIndex int) (isString bool, dataStartIndex, dataSi // long list mode // firstByte minus the end range of short list, returns the number of bytes to read - // for calculating the the len of data. bytesToReadForlen is at least 1 and at most 8. + // for calculating the len of data. bytesToReadForlen is at least 1 and at most 8. if firstByte >= LongListRangeStart { bytesToReadForLen = uint(firstByte - ShortListRangeEnd) isString = false diff --git a/runtime/stdlib/test-framework.go b/runtime/stdlib/test-framework.go index 6daa4727cb..dc1b770d15 100644 --- a/runtime/stdlib/test-framework.go +++ b/runtime/stdlib/test-framework.go @@ -28,7 +28,10 @@ import ( // Cadence standard library talks to test providers via this interface. // This is used as a way to inject test provider dependencies dynamically. type TestFramework interface { - RunScript(inter *interpreter.Interpreter, code string, arguments []interpreter.Value) *ScriptResult + RunScript( + inter *interpreter.Interpreter, + code string, arguments []interpreter.Value, + ) *ScriptResult CreateAccount() (*Account, error) @@ -57,6 +60,17 @@ type TestFramework interface { UseConfiguration(configuration *Configuration) StandardLibraryHandler() StandardLibraryHandler + + Logs() []string + + ServiceAccount() (*Account, error) + + Events( + inter *interpreter.Interpreter, + eventType interpreter.StaticType, + ) interpreter.Value + + Reset() } type ScriptResult struct { diff --git a/runtime/stdlib/test.go b/runtime/stdlib/test.go index 56819826b3..cbfd46c98f 100644 --- a/runtime/stdlib/test.go +++ b/runtime/stdlib/test.go @@ -116,7 +116,7 @@ func getNestedTypeConstructorValue(parent interpreter.Value, typeName string) *i return constructor } -func arrayValueToSlice(value interpreter.Value) ([]interpreter.Value, error) { +func arrayValueToSlice(inter *interpreter.Interpreter, value interpreter.Value) ([]interpreter.Value, error) { array, ok := value.(*interpreter.ArrayValue) if !ok { return nil, errors.NewDefaultUserError("value is not an array") @@ -124,7 +124,7 @@ func arrayValueToSlice(value interpreter.Value) ([]interpreter.Value, error) { result := make([]interpreter.Value, 0, array.Count()) - array.Iterate(nil, func(element interpreter.Value) (resume bool) { + array.Iterate(inter, func(element interpreter.Value) (resume bool) { result = append(result, element) return true }) @@ -185,7 +185,7 @@ func getConstructor(inter *interpreter.Interpreter, typeName string) *interprete return resultStatusConstructor } -func addressArrayValueToSlice(accountsValue interpreter.Value) []common.Address { +func addressArrayValueToSlice(inter *interpreter.Interpreter, accountsValue interpreter.Value) []common.Address { accountsArray, ok := accountsValue.(*interpreter.ArrayValue) if !ok { panic(errors.NewUnreachableError()) @@ -193,7 +193,7 @@ func addressArrayValueToSlice(accountsValue interpreter.Value) []common.Address addresses := make([]common.Address, 0) - accountsArray.Iterate(nil, func(element interpreter.Value) (resume bool) { + accountsArray.Iterate(inter, func(element interpreter.Value) (resume bool) { address, ok := element.(interpreter.AddressValue) if !ok { panic(errors.NewUnreachableError()) @@ -220,7 +220,7 @@ func accountsArrayValueToSlice( accounts := make([]*Account, 0) - accountsArray.Iterate(nil, func(element interpreter.Value) (resume bool) { + accountsArray.Iterate(inter, func(element interpreter.Value) (resume bool) { accountValue, ok := element.(interpreter.MemberAccessibleValue) if !ok { panic(errors.NewUnreachableError()) diff --git a/runtime/stdlib/test_contract.go b/runtime/stdlib/test_contract.go index 31c18615cb..6009b18a98 100644 --- a/runtime/stdlib/test_contract.go +++ b/runtime/stdlib/test_contract.go @@ -101,6 +101,75 @@ var testTypeAssertFunction = interpreter.NewUnmeteredHostFunctionValue( }, ) +// 'Test.assertEqual' function + +const testTypeAssertEqualFunctionDocString = ` +Fails the test-case if the given values are not equal, and +reports a message which explains how the two values differ. +` + +const testTypeAssertEqualFunctionName = "assertEqual" + +var testTypeAssertEqualFunctionType = &sema.FunctionType{ + Parameters: []sema.Parameter{ + { + Label: sema.ArgumentLabelNotRequired, + Identifier: "expected", + TypeAnnotation: sema.NewTypeAnnotation( + sema.AnyStructType, + ), + }, + { + Label: sema.ArgumentLabelNotRequired, + Identifier: "actual", + TypeAnnotation: sema.NewTypeAnnotation( + sema.AnyStructType, + ), + }, + }, + RequiredArgumentCount: sema.RequiredArgumentCount(2), + ReturnTypeAnnotation: sema.NewTypeAnnotation( + sema.VoidType, + ), +} + +var testTypeAssertEqualFunction = interpreter.NewUnmeteredHostFunctionValue( + testTypeAssertEqualFunctionType, + func(invocation interpreter.Invocation) interpreter.Value { + expected, ok := invocation.Arguments[0].(interpreter.EquatableValue) + if !ok { + panic(errors.NewUnreachableError()) + } + + inter := invocation.Interpreter + + actual, ok := invocation.Arguments[1].(interpreter.EquatableValue) + if !ok { + panic(errors.NewUnreachableError()) + } + + equal := expected.Equal( + inter, + invocation.LocationRange, + actual, + ) + + if !equal { + message := fmt.Sprintf( + "not equal: expected: %s, actual: %s", + expected, + actual, + ) + panic(AssertionError{ + Message: message, + LocationRange: invocation.LocationRange, + }) + } + + return interpreter.Void + }, +) + // 'Test.fail' function const testTypeFailFunctionDocString = ` @@ -973,6 +1042,17 @@ func newTestContractType() *TestContractType { ), ) + // Test.assertEqual() + compositeType.Members.Set( + testTypeAssertEqualFunctionName, + sema.NewUnmeteredPublicFunctionMember( + compositeType, + testTypeAssertEqualFunctionName, + testTypeAssertEqualFunctionType, + testTypeAssertEqualFunctionDocString, + ), + ) + // Test.fail() compositeType.Members.Set( testTypeFailFunctionName, @@ -1229,6 +1309,7 @@ func (t *TestContractType) NewTestContract( // Inject natively implemented function values compositeValue.Functions[testTypeAssertFunctionName] = testTypeAssertFunction + compositeValue.Functions[testTypeAssertEqualFunctionName] = testTypeAssertEqualFunction compositeValue.Functions[testTypeFailFunctionName] = testTypeFailFunction compositeValue.Functions[testTypeExpectFunctionName] = t.expectFunction compositeValue.Functions[testTypeNewEmulatorBlockchainFunctionName] = diff --git a/runtime/stdlib/test_emulatorbackend.go b/runtime/stdlib/test_emulatorbackend.go index 7760c79ebe..86e9db2038 100644 --- a/runtime/stdlib/test_emulatorbackend.go +++ b/runtime/stdlib/test_emulatorbackend.go @@ -42,6 +42,10 @@ type testEmulatorBackendType struct { commitBlockFunctionType *sema.FunctionType deployContractFunctionType *sema.FunctionType useConfigFunctionType *sema.FunctionType + logsFunctionType *sema.FunctionType + serviceAccountFunctionType *sema.FunctionType + eventsFunctionType *sema.FunctionType + resetFunctionType *sema.FunctionType } func newTestEmulatorBackendType(blockchainBackendInterfaceType *sema.InterfaceType) *testEmulatorBackendType { @@ -80,6 +84,26 @@ func newTestEmulatorBackendType(blockchainBackendInterfaceType *sema.InterfaceTy testEmulatorBackendTypeUseConfigFunctionName, ) + logsFunctionType := interfaceFunctionType( + blockchainBackendInterfaceType, + testEmulatorBackendTypeLogsFunctionName, + ) + + serviceAccountFunctionType := interfaceFunctionType( + blockchainBackendInterfaceType, + testEmulatorBackendTypeServiceAccountFunctionName, + ) + + eventsFunctionType := interfaceFunctionType( + blockchainBackendInterfaceType, + testEmulatorBackendTypeEventsFunctionName, + ) + + resetFunctionType := interfaceFunctionType( + blockchainBackendInterfaceType, + testEmulatorBackendTypeResetFunctionName, + ) + compositeType := &sema.CompositeType{ Identifier: testEmulatorBackendTypeName, Kind: common.CompositeKindStructure, @@ -132,6 +156,30 @@ func newTestEmulatorBackendType(blockchainBackendInterfaceType *sema.InterfaceTy useConfigFunctionType, testEmulatorBackendTypeUseConfigFunctionDocString, ), + sema.NewUnmeteredPublicFunctionMember( + compositeType, + testEmulatorBackendTypeLogsFunctionName, + logsFunctionType, + testEmulatorBackendTypeLogsFunctionDocString, + ), + sema.NewUnmeteredPublicFunctionMember( + compositeType, + testEmulatorBackendTypeServiceAccountFunctionName, + serviceAccountFunctionType, + testEmulatorBackendTypeServiceAccountFunctionDocString, + ), + sema.NewUnmeteredPublicFunctionMember( + compositeType, + testEmulatorBackendTypeEventsFunctionName, + eventsFunctionType, + testEmulatorBackendTypeEventsFunctionDocString, + ), + sema.NewUnmeteredPublicFunctionMember( + compositeType, + testEmulatorBackendTypeResetFunctionName, + resetFunctionType, + testEmulatorBackendTypeResetFunctionDocString, + ), } compositeType.Members = sema.MembersAsMap(members) @@ -146,6 +194,10 @@ func newTestEmulatorBackendType(blockchainBackendInterfaceType *sema.InterfaceTy commitBlockFunctionType: commitBlockFunctionType, deployContractFunctionType: deployContractFunctionType, useConfigFunctionType: useConfigFunctionType, + logsFunctionType: logsFunctionType, + serviceAccountFunctionType: serviceAccountFunctionType, + eventsFunctionType: eventsFunctionType, + resetFunctionType: resetFunctionType, } } @@ -162,18 +214,18 @@ func (t *testEmulatorBackendType) newExecuteScriptFunction(testFramework TestFra return interpreter.NewUnmeteredHostFunctionValue( t.executeScriptFunctionType, func(invocation interpreter.Invocation) interpreter.Value { + inter := invocation.Interpreter + script, ok := invocation.Arguments[0].(*interpreter.StringValue) if !ok { panic(errors.NewUnreachableError()) } - args, err := arrayValueToSlice(invocation.Arguments[1]) + args, err := arrayValueToSlice(inter, invocation.Arguments[1]) if err != nil { panic(errors.NewUnexpectedErrorFromCause(err)) } - inter := invocation.Interpreter - result := testFramework.RunScript(inter, script.Str, args) return newScriptResult(inter, result.Value, result) @@ -294,7 +346,7 @@ func (t *testEmulatorBackendType) newAddTransactionFunction(testFramework TestFr testTransactionTypeAuthorizersFieldName, ) - authorizers := addressArrayValueToSlice(authorizerValue) + authorizers := addressArrayValueToSlice(inter, authorizerValue) // Get signers signersValue := transactionValue.GetMember( @@ -315,13 +367,13 @@ func (t *testEmulatorBackendType) newAddTransactionFunction(testFramework TestFr locationRange, testTransactionTypeArgumentsFieldName, ) - args, err := arrayValueToSlice(argsValue) + args, err := arrayValueToSlice(inter, argsValue) if err != nil { panic(errors.NewUnexpectedErrorFromCause(err)) } err = testFramework.AddTransaction( - invocation.Interpreter, + inter, code.Str, authorizers, signerAccounts, @@ -419,7 +471,7 @@ func (t *testEmulatorBackendType) newDeployContractFunction(testFramework TestFr account := accountFromValue(inter, accountValue, invocation.LocationRange) // Contract init arguments - args, err := arrayValueToSlice(invocation.Arguments[3]) + args, err := arrayValueToSlice(inter, invocation.Arguments[3]) if err != nil { panic(err) } @@ -469,7 +521,7 @@ func (t *testEmulatorBackendType) newUseConfigFunction(testFramework TestFramewo mapping := make(map[string]common.Address, addresses.Count()) - addresses.Iterate(nil, func(locationValue, addressValue interpreter.Value) bool { + addresses.Iterate(inter, func(locationValue, addressValue interpreter.Value) bool { location, ok := locationValue.(*interpreter.StringValue) if !ok { panic(errors.NewUnreachableError()) @@ -494,6 +546,141 @@ func (t *testEmulatorBackendType) newUseConfigFunction(testFramework TestFramewo ) } +// 'EmulatorBackend.logs' function + +const testEmulatorBackendTypeLogsFunctionName = "logs" + +const testEmulatorBackendTypeLogsFunctionDocString = ` +Returns all the logs from the blockchain, up to the calling point. +` + +func (t *testEmulatorBackendType) newLogsFunction( + testFramework TestFramework, +) *interpreter.HostFunctionValue { + return interpreter.NewUnmeteredHostFunctionValue( + t.logsFunctionType, + func(invocation interpreter.Invocation) interpreter.Value { + logs := testFramework.Logs() + inter := invocation.Interpreter + + arrayType := interpreter.NewVariableSizedStaticType( + inter, + interpreter.NewPrimitiveStaticType( + inter, + interpreter.PrimitiveStaticTypeString, + ), + ) + + values := make([]interpreter.Value, len(logs)) + for i, log := range logs { + memoryUsage := common.NewStringMemoryUsage(len(log)) + values[i] = interpreter.NewStringValue( + inter, + memoryUsage, + func() string { + return log + }, + ) + } + + return interpreter.NewArrayValue( + inter, + invocation.LocationRange, + arrayType, + common.ZeroAddress, + values..., + ) + }, + ) +} + +// 'EmulatorBackend.serviceAccount' function + +const testEmulatorBackendTypeServiceAccountFunctionName = "serviceAccount" + +const testEmulatorBackendTypeServiceAccountFunctionDocString = ` +Returns the service account of the blockchain. Can be used to sign +transactions with this account. +` + +func (t *testEmulatorBackendType) newServiceAccountFunction( + testFramework TestFramework, +) *interpreter.HostFunctionValue { + return interpreter.NewUnmeteredHostFunctionValue( + t.serviceAccountFunctionType, + func(invocation interpreter.Invocation) interpreter.Value { + serviceAccount, err := testFramework.ServiceAccount() + if err != nil { + panic(err) + } + + return newTestAccountValue( + testFramework, + invocation.Interpreter, + invocation.LocationRange, + serviceAccount, + ) + }, + ) +} + +// 'EmulatorBackend.events' function + +const testEmulatorBackendTypeEventsFunctionName = "events" + +const testEmulatorBackendTypeEventsFunctionDocString = ` +Returns all events emitted from the blockchain, +optionally filtered by event type. +` + +func (t *testEmulatorBackendType) newEventsFunction( + testFramework TestFramework, +) *interpreter.HostFunctionValue { + return interpreter.NewUnmeteredHostFunctionValue( + t.eventsFunctionType, + func(invocation interpreter.Invocation) interpreter.Value { + var eventType interpreter.StaticType = nil + + switch value := invocation.Arguments[0].(type) { + case interpreter.NilValue: + // Do nothing + case *interpreter.SomeValue: + innerValue := value.InnerValue(invocation.Interpreter, invocation.LocationRange) + typeValue, ok := innerValue.(interpreter.TypeValue) + if !ok { + panic(errors.NewUnreachableError()) + } + + eventType = typeValue.Type + default: + panic(errors.NewUnreachableError()) + } + + return testFramework.Events(invocation.Interpreter, eventType) + }, + ) +} + +// 'EmulatorBackend.reset' function + +const testEmulatorBackendTypeResetFunctionName = "reset" + +const testEmulatorBackendTypeResetFunctionDocString = ` +Resets the state of the blockchain. +` + +func (t *testEmulatorBackendType) newResetFunction( + testFramework TestFramework, +) *interpreter.HostFunctionValue { + return interpreter.NewUnmeteredHostFunctionValue( + t.eventsFunctionType, + func(invocation interpreter.Invocation) interpreter.Value { + testFramework.Reset() + return interpreter.Void + }, + ) +} + func (t *testEmulatorBackendType) newEmulatorBackend( inter *interpreter.Interpreter, testFramework TestFramework, @@ -527,6 +714,22 @@ func (t *testEmulatorBackendType) newEmulatorBackend( Name: testEmulatorBackendTypeUseConfigFunctionName, Value: t.newUseConfigFunction(testFramework), }, + { + Name: testEmulatorBackendTypeLogsFunctionName, + Value: t.newLogsFunction(testFramework), + }, + { + Name: testEmulatorBackendTypeServiceAccountFunctionName, + Value: t.newServiceAccountFunction(testFramework), + }, + { + Name: testEmulatorBackendTypeEventsFunctionName, + Value: t.newEventsFunction(testFramework), + }, + { + Name: testEmulatorBackendTypeResetFunctionName, + Value: t.newResetFunction(testFramework), + }, } // TODO: Use SimpleCompositeValue diff --git a/runtime/stdlib/test_test.go b/runtime/stdlib/test_test.go index fe0db5e454..730cb9a117 100644 --- a/runtime/stdlib/test_test.go +++ b/runtime/stdlib/test_test.go @@ -37,6 +37,14 @@ import ( ) func newTestContractInterpreter(t *testing.T, code string) (*interpreter.Interpreter, error) { + return newTestContractInterpreterWithTestFramework(t, code, nil) +} + +func newTestContractInterpreterWithTestFramework( + t *testing.T, + code string, + testFramework TestFramework, +) (*interpreter.Interpreter, error) { program, err := parser.ParseProgram( nil, []byte(code), @@ -109,7 +117,7 @@ func newTestContractInterpreter(t *testing.T, code string) (*interpreter.Interpr return nil }, - ContractValueHandler: NewTestInterpreterContractValueHandler(nil), + ContractValueHandler: NewTestInterpreterContractValueHandler(testFramework), UUIDHandler: func() (uint64, error) { uuid++ return uuid, nil @@ -661,6 +669,291 @@ func TestTestEqualMatcher(t *testing.T) { }) } +func TestAssertEqual(t *testing.T) { + + t.Parallel() + + t.Run("success", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun test() { + Test.assertEqual("this string", "this string") + } + ` + + inter, err := newTestContractInterpreter(t, script) + require.NoError(t, err) + + _, err = inter.Invoke("test") + require.NoError(t, err) + }) + + t.Run("fail", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun test() { + Test.assertEqual(15, 21) + } + ` + + inter, err := newTestContractInterpreter(t, script) + require.NoError(t, err) + + _, err = inter.Invoke("test") + require.Error(t, err) + assert.ErrorAs(t, err, &AssertionError{}) + assert.ErrorContains( + t, + err, + "assertion failed: not equal: expected: 15, actual: 21", + ) + }) + + t.Run("different types", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun test() { + Test.assertEqual(true, 1) + } + ` + + inter, err := newTestContractInterpreter(t, script) + require.NoError(t, err) + + _, err = inter.Invoke("test") + require.Error(t, err) + assert.ErrorAs(t, err, &AssertionError{}) + assert.ErrorContains( + t, + err, + "assertion failed: not equal: expected: true, actual: 1", + ) + }) + + t.Run("address with address", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun testEqual() { + let expected = Address(0xf8d6e0586b0a20c7) + let actual = Address(0xf8d6e0586b0a20c7) + Test.assertEqual(expected, actual) + } + + access(all) fun testNotEqual() { + let expected = Address(0xf8d6e0586b0a20c7) + let actual = Address(0xee82856bf20e2aa6) + Test.assertEqual(expected, actual) + } + ` + + inter, err := newTestContractInterpreter(t, script) + require.NoError(t, err) + + _, err = inter.Invoke("testEqual") + require.NoError(t, err) + + _, err = inter.Invoke("testNotEqual") + require.Error(t, err) + assert.ErrorAs(t, err, &AssertionError{}) + assert.ErrorContains( + t, + err, + "not equal: expected: 0xf8d6e0586b0a20c7, actual: 0xee82856bf20e2aa6", + ) + }) + + t.Run("struct with struct", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) struct Foo { + access(all) let answer: Int + + init(answer: Int) { + self.answer = answer + } + } + + access(all) fun testEqual() { + let expected = Foo(answer: 42) + let actual = Foo(answer: 42) + Test.assertEqual(expected, actual) + } + + access(all) fun testNotEqual() { + let expected = Foo(answer: 42) + let actual = Foo(answer: 420) + Test.assertEqual(expected, actual) + } + ` + + inter, err := newTestContractInterpreter(t, script) + require.NoError(t, err) + + _, err = inter.Invoke("testEqual") + require.NoError(t, err) + + _, err = inter.Invoke("testNotEqual") + require.Error(t, err) + assert.ErrorAs(t, err, &AssertionError{}) + assert.ErrorContains( + t, + err, + "not equal: expected: S.test.Foo(answer: 42), actual: S.test.Foo(answer: 420)", + ) + }) + + t.Run("array with array", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun testEqual() { + let expected = [1, 2, 3] + let actual = [1, 2, 3] + Test.assertEqual(expected, actual) + } + + access(all) fun testNotEqual() { + let expected = [1, 2, 3] + let actual = [1, 2] + Test.assertEqual(expected, actual) + } + ` + + inter, err := newTestContractInterpreter(t, script) + require.NoError(t, err) + + _, err = inter.Invoke("testEqual") + require.NoError(t, err) + + _, err = inter.Invoke("testNotEqual") + require.Error(t, err) + assert.ErrorAs(t, err, &AssertionError{}) + assert.ErrorContains( + t, + err, + "not equal: expected: [1, 2, 3], actual: [1, 2]", + ) + }) + + t.Run("dictionary with dictionary", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun testEqual() { + let expected = {1: true, 2: false, 3: true} + let actual = {1: true, 2: false, 3: true} + Test.assertEqual(expected, actual) + } + + access(all) fun testNotEqual() { + let expected = {1: true, 2: false} + let actual = {1: true, 2: true} + Test.assertEqual(expected, actual) + } + ` + + inter, err := newTestContractInterpreter(t, script) + require.NoError(t, err) + + _, err = inter.Invoke("testEqual") + require.NoError(t, err) + + _, err = inter.Invoke("testNotEqual") + require.Error(t, err) + assert.ErrorAs(t, err, &AssertionError{}) + assert.ErrorContains( + t, + err, + "not equal: expected: {2: false, 1: true}, actual: {2: true, 1: true}", + ) + }) + + t.Run("resource with resource matcher", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun test() { + let f1 <- create Foo() + let f2 <- create Foo() + Test.assertEqual(<-f1, <-f2) + } + + access(all) resource Foo {} + ` + + _, err := newTestContractInterpreter(t, script) + + errs := checker.RequireCheckerErrors(t, err, 2) + assert.IsType(t, &sema.TypeMismatchError{}, errs[0]) + assert.IsType(t, &sema.TypeMismatchError{}, errs[1]) + }) + + t.Run("resource with struct matcher", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun test() { + let foo <- create Foo() + let bar = Bar() + Test.assertEqual(<-foo, bar) + } + + access(all) resource Foo {} + access(all) struct Bar {} + ` + + _, err := newTestContractInterpreter(t, script) + + errs := checker.RequireCheckerErrors(t, err, 1) + assert.IsType(t, &sema.TypeMismatchError{}, errs[0]) + }) + + t.Run("struct with resource matcher", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun test() { + let foo = Foo() + let bar <- create Bar() + Test.expect(foo, Test.equal(<-bar)) + } + + access(all) struct Foo {} + access(all) resource Bar {} + ` + + _, err := newTestContractInterpreter(t, script) + + errs := checker.RequireCheckerErrors(t, err, 1) + assert.IsType(t, &sema.TypeMismatchError{}, errs[0]) + }) +} + func TestTestBeSucceededMatcher(t *testing.T) { t.Parallel() @@ -1654,3 +1947,235 @@ func TestTestExpectFailure(t *testing.T) { assert.IsType(t, &sema.TypeMismatchError{}, errs[0]) }) } + +func TestBlockchain(t *testing.T) { + + t.Parallel() + + t.Run("all events, empty", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun test(): [AnyStruct] { + var blockchain = Test.newEmulatorBlockchain() + return blockchain.events() + } + ` + + eventsInvoked := false + + testFramework := &mockedTestFramework{ + events: func(inter *interpreter.Interpreter, eventType interpreter.StaticType) interpreter.Value { + eventsInvoked = true + assert.Nil(t, eventType) + return interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + interpreter.NewVariableSizedStaticType(inter, interpreter.PrimitiveStaticTypeAnyStruct), + common.Address{}, + ) + }, + } + + inter, err := newTestContractInterpreterWithTestFramework(t, script, testFramework) + require.NoError(t, err) + + _, err = inter.Invoke("test") + require.NoError(t, err) + + assert.True(t, eventsInvoked) + }) + + t.Run("typed events, empty", func(t *testing.T) { + t.Parallel() + + script := ` + import Test + + access(all) fun test(): [AnyStruct] { + var blockchain = Test.newEmulatorBlockchain() + + // 'Foo' is not an event-type. + // But we just need to test the API, so it doesn't really matter. + var typ = Type() + + return blockchain.eventsOfType(typ) + } + + access(all) struct Foo {} + ` + + eventsInvoked := false + + testFramework := &mockedTestFramework{ + events: func(inter *interpreter.Interpreter, eventType interpreter.StaticType) interpreter.Value { + eventsInvoked = true + assert.NotNil(t, eventType) + + require.IsType(t, interpreter.CompositeStaticType{}, eventType) + compositeType := eventType.(interpreter.CompositeStaticType) + assert.Equal(t, "Foo", compositeType.QualifiedIdentifier) + + return interpreter.NewArrayValue( + inter, + interpreter.EmptyLocationRange, + interpreter.NewVariableSizedStaticType(inter, interpreter.PrimitiveStaticTypeAnyStruct), + common.Address{}, + ) + }, + } + + inter, err := newTestContractInterpreterWithTestFramework(t, script, testFramework) + require.NoError(t, err) + + _, err = inter.Invoke("test") + require.NoError(t, err) + + assert.True(t, eventsInvoked) + }) + + // TODO: Add more tests for the remaining functions. +} + +type mockedTestFramework struct { + runScript func(inter *interpreter.Interpreter, code string, arguments []interpreter.Value) + createAccount func() (*Account, error) + addTransaction func(inter *interpreter.Interpreter, code string, authorizers []common.Address, signers []*Account, arguments []interpreter.Value) error + executeTransaction func() *TransactionResult + commitBlock func() error + deployContract func(inter *interpreter.Interpreter, name string, code string, account *Account, arguments []interpreter.Value) error + readFile func(s string) (string, error) + useConfiguration func(configuration *Configuration) + stdlibHandler func() StandardLibraryHandler + logs func() []string + serviceAccount func() (*Account, error) + events func(inter *interpreter.Interpreter, eventType interpreter.StaticType) interpreter.Value + reset func() +} + +var _ TestFramework = &mockedTestFramework{} + +func (m mockedTestFramework) RunScript( + inter *interpreter.Interpreter, + code string, + arguments []interpreter.Value, +) *ScriptResult { + if m.runScript == nil { + panic("'RunScript' is not implemented") + } + + return m.RunScript(inter, code, arguments) +} + +func (m mockedTestFramework) CreateAccount() (*Account, error) { + if m.createAccount == nil { + panic("'CreateAccount' is not implemented") + } + + return m.createAccount() +} + +func (m mockedTestFramework) AddTransaction( + inter *interpreter.Interpreter, + code string, + authorizers []common.Address, + signers []*Account, + arguments []interpreter.Value, +) error { + if m.addTransaction == nil { + panic("'AddTransaction' is not implemented") + } + + return m.addTransaction(inter, code, authorizers, signers, arguments) +} + +func (m mockedTestFramework) ExecuteNextTransaction() *TransactionResult { + if m.executeTransaction == nil { + panic("'ExecuteNextTransaction' is not implemented") + } + + return m.executeTransaction() +} + +func (m mockedTestFramework) CommitBlock() error { + if m.commitBlock == nil { + panic("'CommitBlock' is not implemented") + } + + return m.commitBlock() +} + +func (m mockedTestFramework) DeployContract( + inter *interpreter.Interpreter, + name string, + code string, + account *Account, + arguments []interpreter.Value, +) error { + if m.deployContract == nil { + panic("'DeployContract' is not implemented") + } + + return m.deployContract(inter, name, code, account, arguments) +} + +func (m mockedTestFramework) ReadFile(fileName string) (string, error) { + if m.readFile == nil { + panic("'ReadFile' is not implemented") + } + + return m.readFile(fileName) +} + +func (m mockedTestFramework) UseConfiguration(configuration *Configuration) { + if m.useConfiguration == nil { + panic("'UseConfiguration' is not implemented") + } + + m.useConfiguration(configuration) +} + +func (m mockedTestFramework) StandardLibraryHandler() StandardLibraryHandler { + if m.stdlibHandler == nil { + panic("'StandardLibraryHandler' is not implemented") + } + + return m.stdlibHandler() +} + +func (m mockedTestFramework) Logs() []string { + if m.logs == nil { + panic("'Logs' is not implemented") + } + + return m.logs() +} + +func (m mockedTestFramework) ServiceAccount() (*Account, error) { + if m.serviceAccount == nil { + panic("'ServiceAccount' is not implemented") + } + + return m.serviceAccount() +} + +func (m mockedTestFramework) Events( + inter *interpreter.Interpreter, + eventType interpreter.StaticType, +) interpreter.Value { + if m.events == nil { + panic("'Events' is not implemented") + } + + return m.events(inter, eventType) +} + +func (m mockedTestFramework) Reset() { + if m.reset == nil { + panic("'Reset' is not implemented") + } + + m.reset() +} diff --git a/runtime/tests/checker/entrypoint_test.go b/runtime/tests/checker/entrypoint_test.go index cf1547ae31..4bea123456 100644 --- a/runtime/tests/checker/entrypoint_test.go +++ b/runtime/tests/checker/entrypoint_test.go @@ -193,4 +193,50 @@ func TestEntryPointParameters(t *testing.T) { require.Empty(t, parameters) }) + + t.Run("contract with init params", func(t *testing.T) { + + t.Parallel() + + checker, err := ParseAndCheck(t, ` + access(all) contract SimpleContract { + access(all) let v: Int + init(a: Int) { + self.v = a + } + } + `) + + require.NoError(t, err) + + parameters := checker.EntryPointParameters() + + require.Equal(t, + []sema.Parameter{ + { + Label: "", + Identifier: "a", + TypeAnnotation: sema.NewTypeAnnotation(sema.IntType), + }, + }, + parameters, + ) + }) + + t.Run("contract init empty", func(t *testing.T) { + + t.Parallel() + + checker, err := ParseAndCheck(t, ` + access(all) contract SimpleContract { + init() {} + } + `) + + require.NoError(t, err) + + parameters := checker.EntryPointParameters() + + require.Empty(t, parameters) + }) } diff --git a/runtime/tests/checker/reference_test.go b/runtime/tests/checker/reference_test.go index 7eae3465a5..4cb952c402 100644 --- a/runtime/tests/checker/reference_test.go +++ b/runtime/tests/checker/reference_test.go @@ -2769,3 +2769,40 @@ func TestCheckReferenceUseAfterCopy(t *testing.T) { assert.ErrorAs(t, errs[0], &invalidatedRefError) }) } + +func TestCheckResourceReferenceMethodInvocationAfterMove(t *testing.T) { + + t.Parallel() + + _, err := ParseAndCheck(t, ` + resource Foo { + + let id: UInt8 + + init() { + self.id = 12 + } + + access(all) fun something() {} + } + + fun main() { + var foo <- create Foo() + var fooRef = &foo as &Foo + + // Invocation should not un-track the reference + fooRef.something() + + // Moving the resource should update the tracking + var newFoo <- foo + + fooRef.id + + destroy newFoo + } + `) + + errs := RequireCheckerErrors(t, err, 1) + invalidatedRefError := &sema.InvalidatedResourceReferenceError{} + assert.ErrorAs(t, errs[0], &invalidatedRefError) +} diff --git a/runtime/tests/interpreter/account_test.go b/runtime/tests/interpreter/account_test.go index f16f4beaac..c301e9c9a2 100644 --- a/runtime/tests/interpreter/account_test.go +++ b/runtime/tests/interpreter/account_test.go @@ -667,6 +667,10 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { account.save(<-r, to: /storage/r) } + fun checkR(): Bool { + return account.check<@R>(from: /storage/r) + } + fun borrowR(): &R? { return account.borrow<&R>(from: /storage/r) } @@ -675,10 +679,18 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { return account.borrow<&R>(from: /storage/r)!.foo } + fun checkR2(): Bool { + return account.check<@R2>(from: /storage/r) + } + fun borrowR2(): &R2? { return account.borrow<&R2>(from: /storage/r) } + fun checkR2WithInvalidPath(): Bool { + return account.check<@R2>(from: /storage/wrongpath) + } + fun changeAfterBorrow(): Int { let ref = account.borrow<&R>(from: /storage/r)! @@ -703,7 +715,15 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { t.Run("borrow R ", func(t *testing.T) { - // first borrow + // first check & borrow + checkRes, err := inter.Invoke("checkR") + require.NoError(t, err) + AssertValuesEqual( + t, + inter, + interpreter.AsBoolValue(true), + checkRes, + ) value, err := inter.Invoke("borrowR") require.NoError(t, err) @@ -734,7 +754,15 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { // TODO: should fail, i.e. return nil - // second borrow + // second check & borrow + checkRes, err = inter.Invoke("checkR") + require.NoError(t, err) + AssertValuesEqual( + t, + inter, + interpreter.AsBoolValue(true), + checkRes, + ) value, err = inter.Invoke("borrowR") require.NoError(t, err) @@ -750,8 +778,16 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { }) t.Run("borrow R2", func(t *testing.T) { + checkRes, err := inter.Invoke("checkR2") + require.NoError(t, err) + AssertValuesEqual( + t, + inter, + interpreter.AsBoolValue(false), + checkRes, + ) - _, err := inter.Invoke("borrowR2") + _, err = inter.Invoke("borrowR2") RequireError(t, err) require.ErrorAs(t, err, &interpreter.ForceCastTypeMismatchError{}) @@ -767,6 +803,17 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { require.ErrorAs(t, err, &interpreter.DereferenceError{}) }) + + t.Run("check R2 with wrong path", func(t *testing.T) { + checkRes, err := inter.Invoke("checkR2WithInvalidPath") + require.NoError(t, err) + AssertValuesEqual( + t, + inter, + interpreter.AsBoolValue(false), + checkRes, + ) + }) }) t.Run("struct", func(t *testing.T) { @@ -801,6 +848,10 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { account.save(s, to: /storage/s) } + fun checkS(): Bool { + return account.check(from: /storage/s) + } + fun borrowS(): &S? { return account.borrow<&S>(from: /storage/s) } @@ -808,8 +859,12 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { fun foo(): Int { return account.borrow<&S>(from: /storage/s)!.foo } - - fun borrowS2(): &S2? { + + fun checkS2(): Bool { + return account.check(from: /storage/s) + } + + fun borrowS2(): &S2? { return account.borrow<&S2>(from: /storage/s) } @@ -844,7 +899,15 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { t.Run("borrow S", func(t *testing.T) { - // first borrow + // first check & borrow + checkRes, err := inter.Invoke("checkS") + require.NoError(t, err) + AssertValuesEqual( + t, + inter, + interpreter.AsBoolValue(true), + checkRes, + ) value, err := inter.Invoke("borrowS") require.NoError(t, err) @@ -875,7 +938,15 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { // TODO: should fail, i.e. return nil - // second borrow + // second check & borrow + checkRes, err = inter.Invoke("checkS") + require.NoError(t, err) + AssertValuesEqual( + t, + inter, + interpreter.AsBoolValue(true), + checkRes, + ) value, err = inter.Invoke("borrowS") require.NoError(t, err) @@ -891,6 +962,14 @@ func TestInterpretAuthAccount_borrow(t *testing.T) { }) t.Run("borrow S2", func(t *testing.T) { + checkRes, err := inter.Invoke("checkS2") + require.NoError(t, err) + AssertValuesEqual( + t, + inter, + interpreter.AsBoolValue(false), + checkRes, + ) _, err = inter.Invoke("borrowS2") RequireError(t, err) diff --git a/runtime/tests/interpreter/arithmetic_test.go b/runtime/tests/interpreter/arithmetic_test.go index 23a9d975bf..d05273f1be 100644 --- a/runtime/tests/interpreter/arithmetic_test.go +++ b/runtime/tests/interpreter/arithmetic_test.go @@ -54,6 +54,7 @@ var integerTestValues = map[string]interpreter.NumberValue{ "Word32": interpreter.NewUnmeteredWord32Value(60), "Word64": interpreter.NewUnmeteredWord64Value(60), "Word128": interpreter.NewUnmeteredWord128ValueFromUint64(60), + "Word256": interpreter.NewUnmeteredWord256ValueFromUint64(60), } func init() { diff --git a/runtime/tests/interpreter/bitwise_test.go b/runtime/tests/interpreter/bitwise_test.go index 00f9d430d1..ad44a095ca 100644 --- a/runtime/tests/interpreter/bitwise_test.go +++ b/runtime/tests/interpreter/bitwise_test.go @@ -88,6 +88,9 @@ var bitwiseTestValueFunctions = map[string]func(int) interpreter.NumberValue{ "Word128": func(v int) interpreter.NumberValue { return interpreter.NewUnmeteredWord128ValueFromUint64(uint64(v)) }, + "Word256": func(v int) interpreter.NumberValue { + return interpreter.NewUnmeteredWord256ValueFromUint64(uint64(v)) + }, } func init() { diff --git a/runtime/tests/interpreter/builtinfunctions_test.go b/runtime/tests/interpreter/builtinfunctions_test.go index a3ad98ffb4..84a521072f 100644 --- a/runtime/tests/interpreter/builtinfunctions_test.go +++ b/runtime/tests/interpreter/builtinfunctions_test.go @@ -510,6 +510,15 @@ func TestInterpretToBigEndianBytes(t *testing.T) { "170141183460469231731687303715884105728": {128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, "340282366920938463463374607431768211455": {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, }, + "Word256": { + "0": {0}, + "42": {42}, + "127": {127}, + "128": {128}, + "57896044618658097711785492504343953926634992332820282019728792003956564819967": {127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, + "57896044618658097711785492504343953926634992332820282019728792003956564819968": {128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + "115792089237316195423570985008687907853269984665640564039457584007913129639935": {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, + }, // Fix* "Fix64": { "0.0": {0, 0, 0, 0, 0, 0, 0, 0}, @@ -715,6 +724,7 @@ func TestInterpretFromBigEndianBytes(t *testing.T) { "[127]": interpreter.NewUnmeteredUInt128ValueFromBigInt(big.NewInt(127)), "[128]": interpreter.NewUnmeteredUInt128ValueFromBigInt(big.NewInt(128)), "[200]": interpreter.NewUnmeteredUInt128ValueFromBigInt(big.NewInt(200)), + "[255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]": interpreter.NewUnmeteredUInt128ValueFromBigInt(big.NewInt(0).SetBytes([]byte{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255})), }, "UInt256": { "[0]": interpreter.NewUnmeteredUInt256ValueFromBigInt(big.NewInt(0)), @@ -722,6 +732,7 @@ func TestInterpretFromBigEndianBytes(t *testing.T) { "[127]": interpreter.NewUnmeteredUInt256ValueFromBigInt(big.NewInt(127)), "[128]": interpreter.NewUnmeteredUInt256ValueFromBigInt(big.NewInt(128)), "[200]": interpreter.NewUnmeteredUInt256ValueFromBigInt(big.NewInt(200)), + "[255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]": interpreter.NewUnmeteredUInt256ValueFromBigInt(big.NewInt(0).SetBytes([]byte{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255})), }, // Word* "Word8": { @@ -761,6 +772,15 @@ func TestInterpretFromBigEndianBytes(t *testing.T) { "[127]": interpreter.NewUnmeteredWord128ValueFromBigInt(big.NewInt(127)), "[128]": interpreter.NewUnmeteredWord128ValueFromBigInt(big.NewInt(128)), "[200]": interpreter.NewUnmeteredWord128ValueFromBigInt(big.NewInt(200)), + "[255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]": interpreter.NewUnmeteredWord128ValueFromBigInt(big.NewInt(0).SetBytes([]byte{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255})), + }, + "Word256": { + "[0]": interpreter.NewUnmeteredWord256ValueFromBigInt(big.NewInt(0)), + "[42]": interpreter.NewUnmeteredWord256ValueFromBigInt(big.NewInt(42)), + "[127]": interpreter.NewUnmeteredWord256ValueFromBigInt(big.NewInt(127)), + "[128]": interpreter.NewUnmeteredWord256ValueFromBigInt(big.NewInt(128)), + "[200]": interpreter.NewUnmeteredWord256ValueFromBigInt(big.NewInt(200)), + "[255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]": interpreter.NewUnmeteredWord256ValueFromBigInt(big.NewInt(0).SetBytes([]byte{255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255})), }, // Fix* "Fix64": { @@ -853,6 +873,10 @@ func TestInterpretFromBigEndianBytes(t *testing.T) { "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]", "[0, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]", }, + "Word256": { + "[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]", + "[0, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]", + }, // Fix* "Fix64": { "[0, 0, 0, 0, 0, 0, 0, 0, 0]", diff --git a/runtime/tests/interpreter/dynamic_casting_test.go b/runtime/tests/interpreter/dynamic_casting_test.go index 99c099b082..dd63709f96 100644 --- a/runtime/tests/interpreter/dynamic_casting_test.go +++ b/runtime/tests/interpreter/dynamic_casting_test.go @@ -72,6 +72,7 @@ func TestInterpretDynamicCastingNumber(t *testing.T) { {sema.Word32Type, "42", interpreter.NewUnmeteredWord32Value(42)}, {sema.Word64Type, "42", interpreter.NewUnmeteredWord64Value(42)}, {sema.Word128Type, "42", interpreter.NewUnmeteredWord128ValueFromUint64(42)}, + {sema.Word256Type, "42", interpreter.NewUnmeteredWord256ValueFromUint64(42)}, {sema.Fix64Type, "1.23", interpreter.NewUnmeteredFix64Value(123000000)}, {sema.UFix64Type, "1.23", interpreter.NewUnmeteredUFix64Value(123000000)}, } diff --git a/runtime/tests/interpreter/equality_test.go b/runtime/tests/interpreter/equality_test.go index 6cc0bff12a..c5307e3e88 100644 --- a/runtime/tests/interpreter/equality_test.go +++ b/runtime/tests/interpreter/equality_test.go @@ -229,6 +229,7 @@ func TestInterpretEqualityOnNumericSuperTypes(t *testing.T) { interpreter.PrimitiveStaticTypeWord32, interpreter.PrimitiveStaticTypeWord64, interpreter.PrimitiveStaticTypeWord128, + interpreter.PrimitiveStaticTypeWord256, } for _, subtype := range intSubtypes { diff --git a/runtime/tests/interpreter/fixedpoint_test.go b/runtime/tests/interpreter/fixedpoint_test.go index b8c4bc6145..4575a32ab9 100644 --- a/runtime/tests/interpreter/fixedpoint_test.go +++ b/runtime/tests/interpreter/fixedpoint_test.go @@ -388,6 +388,7 @@ func TestInterpretFixedPointConversions(t *testing.T) { bigIntegerTypes := []sema.Type{ sema.Word64Type, sema.Word128Type, + sema.Word256Type, sema.UInt64Type, sema.UInt128Type, sema.UInt256Type, diff --git a/runtime/tests/interpreter/integers_test.go b/runtime/tests/interpreter/integers_test.go index 6b8461dc10..01b19a161d 100644 --- a/runtime/tests/interpreter/integers_test.go +++ b/runtime/tests/interpreter/integers_test.go @@ -57,6 +57,7 @@ var testIntegerTypesAndValues = map[string]interpreter.Value{ "Word32": interpreter.NewUnmeteredWord32Value(50), "Word64": interpreter.NewUnmeteredWord64Value(50), "Word128": interpreter.NewUnmeteredWord128ValueFromUint64(50), + "Word256": interpreter.NewUnmeteredWord256ValueFromUint64(50), } func init() { @@ -127,6 +128,7 @@ func TestInterpretWordOverflowConversions(t *testing.T) { "Word32": sema.UInt32TypeMaxInt, "Word64": sema.UInt64TypeMaxInt, "Word128": sema.UInt128TypeMaxIntBig, + "Word256": sema.UInt256TypeMaxIntBig, } for typeName, value := range words { @@ -163,6 +165,7 @@ func TestInterpretWordUnderflowConversions(t *testing.T) { "Word32": sema.UInt32TypeMaxInt, "Word64": sema.UInt64TypeMaxInt, "Word128": sema.Word128TypeMaxIntBig, + "Word256": sema.Word256TypeMaxIntBig, } for typeName, value := range words { @@ -662,6 +665,11 @@ func TestInterpretIntegerConversion(t *testing.T) { min: interpreter.NewUnmeteredWord128ValueFromUint64(0), max: interpreter.NewUnmeteredWord128ValueFromBigInt(sema.Word128TypeMaxIntBig), }, + sema.Word256Type: { + fortyTwo: interpreter.NewUnmeteredWord256ValueFromUint64(42), + min: interpreter.NewUnmeteredWord256ValueFromUint64(0), + max: interpreter.NewUnmeteredWord256ValueFromBigInt(sema.Word256TypeMaxIntBig), + }, sema.Int8Type: { fortyTwo: interpreter.NewUnmeteredInt8Value(42), min: interpreter.NewUnmeteredInt8Value(math.MinInt8), @@ -722,7 +730,8 @@ func TestInterpretIntegerConversion(t *testing.T) { sema.Word16Type, sema.Word32Type, sema.Word64Type, - sema.Word128Type: + sema.Word128Type, + sema.Word256Type: default: t.Run("underflow", func(t *testing.T) { test(t, sourceType, targetType, sourceValues.min, nil, interpreter.UnderflowError{}) @@ -748,7 +757,8 @@ func TestInterpretIntegerConversion(t *testing.T) { sema.Word16Type, sema.Word32Type, sema.Word64Type, - sema.Word128Type: + sema.Word128Type, + sema.Word256Type: default: t.Run("overflow", func(t *testing.T) { test(t, sourceType, targetType, sourceValues.max, nil, interpreter.OverflowError{}) @@ -848,6 +858,10 @@ func TestInterpretIntegerMinMax(t *testing.T) { min: interpreter.NewUnmeteredWord128ValueFromUint64(0), max: interpreter.NewUnmeteredWord128ValueFromBigInt(sema.Word128TypeMaxIntBig), }, + sema.Word256Type: { + min: interpreter.NewUnmeteredWord256ValueFromUint64(0), + max: interpreter.NewUnmeteredWord256ValueFromBigInt(sema.Word256TypeMaxIntBig), + }, sema.Int8Type: { min: interpreter.NewUnmeteredInt8Value(math.MinInt8), max: interpreter.NewUnmeteredInt8Value(math.MaxInt8), diff --git a/runtime/tests/interpreter/interpreter_test.go b/runtime/tests/interpreter/interpreter_test.go index bb27dac614..77715e8155 100644 --- a/runtime/tests/interpreter/interpreter_test.go +++ b/runtime/tests/interpreter/interpreter_test.go @@ -636,7 +636,7 @@ func TestInterpretArrayEquality(t *testing.T) { for _, opStr := range []string{"==", "!="} { op := opStr testname := fmt.Sprintf("test variable size array %s at nesting level %d", op, nestingLevel) - code := fmt.Sprintf(` + code := fmt.Sprintf(` let xs = %s return xs %s xs `, @@ -4336,8 +4336,8 @@ func TestInterpretDictionaryIndexingType(t *testing.T) { resource TestResource {} let x: {Type: String} = { - Type(): "a", - Type(): "b", + Type(): "a", + Type(): "b", Type(): "c", Type<@TestResource>(): "f" } @@ -7264,6 +7264,10 @@ func TestInterpretEmitEventParameterTypes(t *testing.T) { value: interpreter.NewUnmeteredWord128ValueFromUint64(42), ty: sema.Word128Type, }, + "Word256": { + value: interpreter.NewUnmeteredWord256ValueFromUint64(42), + ty: sema.Word256Type, + }, // Fix* "Fix64": { value: interpreter.NewUnmeteredFix64Value(123000000), @@ -10309,3 +10313,107 @@ func TestInterpretReferenceUpAndDowncast(t *testing.T) { testVariableDeclaration(tc) } } + +func TestInterpretCompositeTypeHandler(t *testing.T) { + + t.Parallel() + + testType := &sema.CompositeType{} + + inter, err := parseCheckAndInterpretWithOptions(t, + ` + fun test(): Type? { + return CompositeType("TEST") + } + `, + ParseCheckAndInterpretOptions{ + Config: &interpreter.Config{ + CompositeTypeHandler: func(location common.Location, typeID common.TypeID) *sema.CompositeType { + if typeID == "TEST" { + return testType + } + + return nil + }, + }, + }, + ) + require.NoError(t, err) + + value, err := inter.Invoke("test") + require.NoError(t, err) + + testStaticType := interpreter.ConvertSemaToStaticType(nil, testType) + + require.Equal(t, + interpreter.NewUnmeteredSomeValueNonCopying(interpreter.NewUnmeteredTypeValue(testStaticType)), + value, + ) +} + +func TestInterpretConditionsWrapperFunctionType(t *testing.T) { + + t.Parallel() + + t.Run("interface", func(t *testing.T) { + + t.Parallel() + + inter := parseCheckAndInterpret(t, ` + struct interface SI { + fun test(x: Int) { + pre { true } + } + } + + struct S: SI { + fun test(x: Int) {} + } + + fun test(): fun (Int): Void { + let s = S() + return s.test + } + `) + + _, err := inter.Invoke("test") + require.NoError(t, err) + }) + + t.Run("type requirement", func(t *testing.T) { + + t.Parallel() + + inter, err := parseCheckAndInterpretWithOptions(t, + ` + contract interface CI { + struct S { + fun test(x: Int) { + pre { true } + } + } + } + + contract C: CI { + struct S { + fun test(x: Int) {} + } + } + + fun test(): fun (Int): Void { + let s = C.S() + return s.test + } + `, + ParseCheckAndInterpretOptions{ + Config: &interpreter.Config{ + ContractValueHandler: makeContractValueHandler(nil, nil, nil), + }, + }, + ) + require.NoError(t, err) + + _, err = inter.Invoke("test") + require.NoError(t, err) + }) +} diff --git a/runtime/tests/interpreter/reference_test.go b/runtime/tests/interpreter/reference_test.go index 0aca7b6001..cad8d1892e 100644 --- a/runtime/tests/interpreter/reference_test.go +++ b/runtime/tests/interpreter/reference_test.go @@ -1282,5 +1282,50 @@ func TestInterpretResourceReferenceInvalidationOnDestroy(t *testing.T) { _, err := inter.Invoke("test") RequireError(t, err) require.ErrorAs(t, err, &interpreter.DestroyedResourceError{}) + }) } + +func TestInterpretReferenceTrackingOnInvocation(t *testing.T) { + t.Parallel() + + inter := parseCheckAndInterpret(t, ` + access(all) resource Foo { + + access(all) let id: UInt8 + + init() { + self.id = 12 + } + + access(all) fun something() {} + } + + fun returnSameRef(_ ref: &Foo): &Foo { + return ref + } + + fun main() { + var foo <- create Foo() + var fooRef = &foo as &Foo + + // Invocation should not un-track the reference + fooRef.something() + + // just to trick the checker + fooRef = returnSameRef(fooRef) + + // Moving the resource should update the tracking + var newFoo <- foo + + fooRef.id + + destroy newFoo + } + `) + + _, err := inter.Invoke("main") + require.Error(t, err) + + require.ErrorAs(t, err, &interpreter.InvalidatedResourceReferenceError{}) +} diff --git a/runtime/tests/interpreter/resources_test.go b/runtime/tests/interpreter/resources_test.go index 8748b222b1..a74b00b14b 100644 --- a/runtime/tests/interpreter/resources_test.go +++ b/runtime/tests/interpreter/resources_test.go @@ -2446,3 +2446,348 @@ func TestInterpretResourceDestroyedInPreCondition(t *testing.T) { require.NoError(t, err) require.True(t, didError) } + +func TestInterpretInvalidReentrantResourceDestruction(t *testing.T) { + + t.Parallel() + + t.Run("composite", func(t *testing.T) { + + t.Parallel() + + inter := parseCheckAndInterpret(t, ` + + resource Inner { + let outer: &Outer + + init(outer: &Outer) { + self.outer = outer + } + + destroy() { + self.outer.reenter() + } + } + + resource Outer { + var inner: @Inner? + + init() { + self.inner <-! create Inner(outer: &self as &Outer) + } + + fun reenter() { + let inner <- self.inner <- nil + destroy inner + } + + destroy() { + destroy self.inner + } + } + + fun test() { + let outer <- create Outer() + + destroy outer + } + `) + + _, err := inter.Invoke("test") + RequireError(t, err) + + require.ErrorAs(t, err, &interpreter.InvalidatedResourceReferenceError{}) + }) + + t.Run("array", func(t *testing.T) { + + t.Parallel() + + inter := parseCheckAndInterpret(t, ` + + resource Inner { + let outer: &Outer + + init(outer: &Outer) { + self.outer = outer + } + + destroy() { + self.outer.reenter() + } + } + + resource Outer { + var inner: @[Inner] + + init() { + self.inner <- [<-create Inner(outer: &self as &Outer)] + } + + fun reenter() { + let inner <- self.inner <- [] + destroy inner + } + + destroy() { + destroy self.inner + } + } + + fun test() { + let outer <- create Outer() + + destroy outer + } + `) + + _, err := inter.Invoke("test") + RequireError(t, err) + + require.ErrorAs(t, err, &interpreter.InvalidatedResourceReferenceError{}) + }) + + t.Run("dictionary", func(t *testing.T) { + + t.Parallel() + + inter := parseCheckAndInterpret(t, ` + + resource Inner { + let outer: &Outer + + init(outer: &Outer) { + self.outer = outer + } + + destroy() { + self.outer.reenter() + } + } + + resource Outer { + var inner: @{Int: Inner} + + init() { + self.inner <- {0: <-create Inner(outer: &self as &Outer)} + } + + fun reenter() { + let inner <- self.inner <- {} + destroy inner + } + + destroy() { + destroy self.inner + } + } + + fun test() { + let outer <- create Outer() + + destroy outer + } + `) + + _, err := inter.Invoke("test") + RequireError(t, err) + + require.ErrorAs(t, err, &interpreter.InvalidatedResourceReferenceError{}) + }) +} + +func TestInterpretResourceFunctionInvocationAfterDestruction(t *testing.T) { + + t.Parallel() + + inter := parseCheckAndInterpret(t, ` + access(all) resource Vault { + access(all) fun foo(_ ignored: Bool) {} + } + + access(all) resource Attacker { + access(all) var vault: @Vault + + init() { + self.vault <- create Vault() + } + + access(all) fun shenanigans(): Bool { + var temp <- create Vault() + self.vault <-> temp + destroy temp + return true + } + + destroy() { + destroy self.vault + } + } + + access(all) fun main() { + let a <- create Attacker() + a.vault.foo(a.shenanigans()) + destroy a + } + `) + + _, err := inter.Invoke("main") + RequireError(t, err) + + require.ErrorAs(t, err, &interpreter.DestroyedResourceError{}) +} + +func TestInterpretResourceFunctionReferenceValidity(t *testing.T) { + + t.Parallel() + + inter := parseCheckAndInterpret(t, ` + access(all) resource Vault { + access(all) fun foo(_ ref: &Vault): &Vault { + return ref + } + } + + access(all) resource Attacker { + access(all) var vault: @Vault + + init() { + self.vault <- create Vault() + } + + access(all) fun shenanigans1(): &Vault { + // Create a reference in a nested call + return &self.vault as &Vault + } + + access(all) fun shenanigans2(_ ref: &Vault): &Vault { + return ref + } + + destroy() { + destroy self.vault + } + } + + access(all) fun main() { + let a <- create Attacker() + + // A reference to receiver get created inside the nested call 'shenanigans1()'. + // Same reference is returned eventually. + var vaultRef1 = a.vault.foo(a.shenanigans1()) + // Reference must be still valid, even after the invalidation of the bound function receiver. + vaultRef1.foo(vaultRef1) + + // A reference to receiver is explicitly created as a parameter. + // Same reference is returned eventually. + var vaultRef2 = a.vault.foo(a.shenanigans2(&a.vault as &Vault)) + // Reference must be still valid, even after the invalidation of the bound function receiver. + vaultRef2.foo(vaultRef2) + + destroy a + } + `) + + _, err := inter.Invoke("main") + require.NoError(t, err) +} + +func TestInterpretResourceFunctionResourceFunctionValidity(t *testing.T) { + + t.Parallel() + + inter := parseCheckAndInterpret(t, ` + access(all) resource Vault { + access(all) fun foo(_ dummy: Bool): Bool { + return dummy + } + } + + access(all) resource Attacker { + access(all) var vault: @Vault + + init() { + self.vault <- create Vault() + } + + access(all) fun shenanigans(_ n: Int): Bool { + if n > 0 { + return self.vault.foo(self.shenanigans(n - 1)) + } + return true + } + + destroy() { + destroy self.vault + } + } + + access(all) fun main() { + let a <- create Attacker() + + a.vault.foo(a.shenanigans(10)) + + destroy a + } + `) + + _, err := inter.Invoke("main") + require.NoError(t, err) +} + +func TestInterpretInnerResourceDestruction(t *testing.T) { + + t.Parallel() + + inter := parseCheckAndInterpret(t, ` + access(all) resource InnerResource { + access(all) var name: String + access(all) var parent: &OuterResource? + + init(_ name: String) { + self.name = name + self.parent = nil + } + + access(all) fun setParent(_ parent: &OuterResource) { + self.parent = parent + } + + destroy() { + self.parent!.shenanigans() + } + } + + access(all) resource OuterResource { + access(all) var inner1: @InnerResource + access(all) var inner2: @InnerResource + + init() { + self.inner1 <- create InnerResource("inner1") + self.inner2 <- create InnerResource("inner2") + + self.inner1.setParent(&self as &OuterResource) + self.inner2.setParent(&self as &OuterResource) + } + + access(all) fun shenanigans() { + self.inner1 <-> self.inner2 + } + + destroy() { + destroy self.inner1 + destroy self.inner2 + } + } + + access(all) fun main() { + let a <- create OuterResource() + destroy a + }`, + ) + + _, err := inter.Invoke("main") + RequireError(t, err) + + require.ErrorAs(t, err, &interpreter.InvalidatedResourceReferenceError{}) +} diff --git a/runtime/tests/interpreter/values_test.go b/runtime/tests/interpreter/values_test.go index 7530ee3c00..917affd710 100644 --- a/runtime/tests/interpreter/values_test.go +++ b/runtime/tests/interpreter/values_test.go @@ -1221,6 +1221,8 @@ func generateRandomHashableValue(inter *interpreter.Interpreter, n int) interpre return interpreter.NewUnmeteredWord64Value(rand.Uint64()) case Word128: return interpreter.NewUnmeteredWord128ValueFromUint64(rand.Uint64()) + case Word256: + return interpreter.NewUnmeteredWord256ValueFromUint64(rand.Uint64()) // Fixed point case Fix64: @@ -1490,6 +1492,8 @@ func intSubtype(n int) sema.Type { return sema.Word64Type case Word128: return sema.Word128Type + case Word256: + return sema.Word256Type default: panic(fmt.Sprintf("unsupported: %d", n)) @@ -1522,6 +1526,7 @@ const ( Word32 Word64 Word128 + Word256 Fix64 UFix64 diff --git a/tools/analysis/diagnostic.go b/tools/analysis/diagnostic.go index 2d45664f4e..d273fa75c4 100644 --- a/tools/analysis/diagnostic.go +++ b/tools/analysis/diagnostic.go @@ -23,10 +23,21 @@ import ( "github.com/onflow/cadence/runtime/common" ) +type SuggestedFix struct { + Message string + TextEdits []TextEdit +} + +type TextEdit struct { + Replacement string + ast.Range +} + type Diagnostic struct { Location common.Location Category string Message string SecondaryMessage string + SuggestedFixes []SuggestedFix ast.Range } diff --git a/tools/unkeyed/analyzer.go b/tools/unkeyed/analyzer.go index 00a2120ab3..e9222f9132 100644 --- a/tools/unkeyed/analyzer.go +++ b/tools/unkeyed/analyzer.go @@ -64,7 +64,6 @@ func run(pass *analysis.Pass) (interface{}, error) { continue } - // check if the struct contains an unkeyed field allKeyValue := true var suggestedFixAvailable = len(cl.Elts) == strct.NumFields() diff --git a/tools/update/config.yaml b/tools/update/config.yaml index 278f545535..f5104c3f2b 100644 --- a/tools/update/config.yaml +++ b/tools/update/config.yaml @@ -45,13 +45,12 @@ repos: - repo: onflow/flow-cli needsRelease: false mods: - - path: pkg/flowkit + - path: flowkit deps: - onflow/cadence - onflow/flow-go-sdk - onflow/flow-go - onflow/flow-emulator - - onflow/cadence-tools/test - repo: onflow/cadence-tools needsRelease: true @@ -60,8 +59,6 @@ repos: deps: - onflow/cadence - onflow/flow-go-sdk - - onflow/flow-go - - onflow/flow-cli/pkg/flowkit - repo: onflow/cadence-tools needsRelease: true diff --git a/types.go b/types.go index 5c34bf0335..6fb0e1a742 100644 --- a/types.go +++ b/types.go @@ -20,6 +20,7 @@ package cadence import ( "fmt" + "reflect" "strings" "sync" @@ -845,6 +846,26 @@ func (t Word128Type) Equal(other Type) bool { return t == other } +// Word256Type + +type Word256Type struct{} + +var TheWord256Type = Word256Type{} + +func NewWord256Type() Word256Type { + return TheWord256Type +} + +func (Word256Type) isType() {} + +func (Word256Type) ID() string { + return "Word256" +} + +func (t Word256Type) Equal(other Type) bool { + return t == other +} + // Fix64Type type Fix64Type struct{} @@ -1055,6 +1076,231 @@ func NewField(identifier string, typ Type) Field { } } +type HasFields interface { + GetFields() []Field + GetFieldValues() []Value +} + +func GetFieldByName(v HasFields, fieldName string) Value { + fieldValues := v.GetFieldValues() + fields := v.GetFields() + + if fieldValues == nil || fields == nil { + return nil + } + + for i, field := range v.GetFields() { + if field.Identifier == fieldName { + return v.GetFieldValues()[i] + } + } + return nil +} + +func GetFieldsMappedByName(v HasFields) map[string]Value { + fieldValues := v.GetFieldValues() + fields := v.GetFields() + + if fieldValues == nil || fields == nil { + return nil + } + + fieldsMap := make(map[string]Value, len(fields)) + for i, field := range fields { + fieldsMap[field.Identifier] = fieldValues[i] + } + return fieldsMap +} + +// DecodeFields decodes a HasFields into a struct +func DecodeFields(hasFields HasFields, s interface{}) error { + v := reflect.ValueOf(s) + if !v.IsValid() || v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct { + return fmt.Errorf("s must be a pointer to a struct") + } + + v = v.Elem() + t := v.Type() + + fieldsMap := GetFieldsMappedByName(hasFields) + + for i := 0; i < v.NumField(); i++ { + structField := t.Field(i) + tag := structField.Tag + fieldValue := v.Field(i) + + cadenceFieldNameTag := tag.Get("cadence") + if cadenceFieldNameTag == "" { + continue + } + + if !fieldValue.IsValid() || !fieldValue.CanSet() { + return fmt.Errorf("cannot set field %s", structField.Name) + } + + cadenceField := fieldsMap[cadenceFieldNameTag] + if cadenceField == nil { + return fmt.Errorf("%s field not found", cadenceFieldNameTag) + } + + cadenceFieldValue := reflect.ValueOf(cadenceField) + + var decodeSpecialFieldFunc func(p reflect.Type, value Value) (*reflect.Value, error) + + switch fieldValue.Kind() { + case reflect.Ptr: + decodeSpecialFieldFunc = decodeOptional + case reflect.Map: + decodeSpecialFieldFunc = decodeDict + case reflect.Array, reflect.Slice: + decodeSpecialFieldFunc = decodeSlice + } + + if decodeSpecialFieldFunc != nil { + cadenceFieldValuePtr, err := decodeSpecialFieldFunc(fieldValue.Type(), cadenceField) + if err != nil { + return fmt.Errorf("cannot decode %s field %s: %w", fieldValue.Kind(), structField.Name, err) + } + cadenceFieldValue = *cadenceFieldValuePtr + } + + if !cadenceFieldValue.CanConvert(fieldValue.Type()) { + return fmt.Errorf( + "cannot convert cadence field %s of type %s to struct field %s of type %s", + cadenceFieldNameTag, + cadenceField.Type().ID(), + structField.Name, + fieldValue.Type(), + ) + } + + fieldValue.Set(cadenceFieldValue.Convert(fieldValue.Type())) + } + + return nil +} + +func decodeOptional(valueType reflect.Type, cadenceField Value) (*reflect.Value, error) { + optional, ok := cadenceField.(Optional) + if !ok { + return nil, fmt.Errorf("field is not an optional") + } + + // if optional is nil, skip and default the field to nil + if optional.ToGoValue() == nil { + zeroValue := reflect.Zero(valueType) + return &zeroValue, nil + } + + optionalValue := reflect.ValueOf(optional.Value) + + // Check the type + if valueType.Elem() != optionalValue.Type() && valueType.Elem().Kind() != reflect.Interface { + return nil, fmt.Errorf("cannot set field: expected %v, got %v", + valueType.Elem(), optionalValue.Type()) + } + + if valueType.Elem().Kind() == reflect.Interface { + newInterfaceVal := reflect.New(reflect.TypeOf((*interface{})(nil)).Elem()) + newInterfaceVal.Elem().Set(optionalValue) + + return &newInterfaceVal, nil + } + + // Create a new pointer for optionalValue + newPtr := reflect.New(optionalValue.Type()) + newPtr.Elem().Set(optionalValue) + + return &newPtr, nil +} + +func decodeDict(valueType reflect.Type, cadenceField Value) (*reflect.Value, error) { + dict, ok := cadenceField.(Dictionary) + if !ok { + return nil, fmt.Errorf("field is not a dictionary") + } + + mapKeyType := valueType.Key() + mapValueType := valueType.Elem() + + mapValue := reflect.MakeMap(valueType) + for _, pair := range dict.Pairs { + + // Convert key and value to their Go counterparts + var key, value reflect.Value + if mapKeyType.Kind() == reflect.Ptr { + return nil, fmt.Errorf("map key cannot be a pointer (optional) type") + } + key = reflect.ValueOf(pair.Key) + + if mapValueType.Kind() == reflect.Ptr { + // If the map value is a pointer type, unwrap it from optional + valueOptional, err := decodeOptional(mapValueType, pair.Value) + if err != nil { + return nil, fmt.Errorf("cannot decode optional map value for key %s: %w", pair.Key.String(), err) + } + value = *valueOptional + } else { + value = reflect.ValueOf(pair.Value) + } + + if mapKeyType != key.Type() { + return nil, fmt.Errorf("map key type mismatch: expected %v, got %v", mapKeyType, key.Type()) + } + if mapValueType != value.Type() && mapValueType.Kind() != reflect.Interface { + return nil, fmt.Errorf("map value type mismatch: expected %v, got %v", mapValueType, value.Type()) + } + + // Add key-value pair to the map + mapValue.SetMapIndex(key, value) + } + + return &mapValue, nil +} + +func decodeSlice(valueType reflect.Type, cadenceField Value) (*reflect.Value, error) { + array, ok := cadenceField.(Array) + if !ok { + return nil, fmt.Errorf("field is not an array") + } + + var arrayValue reflect.Value + + constantSizeArray, ok := array.ArrayType.(*ConstantSizedArrayType) + if ok { + arrayValue = reflect.New(reflect.ArrayOf(int(constantSizeArray.Size), valueType.Elem())).Elem() + } else { + // If the array is not constant sized, create a slice + arrayValue = reflect.MakeSlice(valueType, len(array.Values), len(array.Values)) + } + + for i, value := range array.Values { + var elementValue reflect.Value + if valueType.Elem().Kind() == reflect.Ptr { + // If the array value is a pointer type, unwrap it from optional + valueOptional, err := decodeOptional(valueType.Elem(), value) + if err != nil { + return nil, fmt.Errorf("error decoding array element optional: %w", err) + } + elementValue = *valueOptional + } else { + elementValue = reflect.ValueOf(value) + } + if elementValue.Type() != valueType.Elem() && valueType.Elem().Kind() != reflect.Interface { + return nil, fmt.Errorf( + "array element type mismatch at index %d: expected %v, got %v", + i, + valueType.Elem(), + elementValue.Type(), + ) + } + + arrayValue.Index(i).Set(elementValue) + } + + return &arrayValue, nil +} + // Parameter type Parameter struct { diff --git a/types_test.go b/types_test.go index bd29e8663a..89d16107d3 100644 --- a/types_test.go +++ b/types_test.go @@ -22,6 +22,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/onflow/cadence/runtime/common" "github.com/onflow/cadence/runtime/tests/utils" @@ -65,6 +66,7 @@ func TestType_ID(t *testing.T) { {Word32Type{}, "Word32"}, {Word64Type{}, "Word64"}, {Word128Type{}, "Word128"}, + {Word256Type{}, "Word256"}, {UFix64Type{}, "UFix64"}, {Fix64Type{}, "Fix64"}, {VoidType{}, "Void"}, @@ -299,6 +301,7 @@ func TestTypeEquality(t *testing.T) { Word32Type{}, Word64Type{}, Word128Type{}, + Word256Type{}, UFix64Type{}, Fix64Type{}, VoidType{}, @@ -1938,3 +1941,328 @@ func TestTypeEquality(t *testing.T) { }) } + +func TestDecodeFields(t *testing.T) { + t.Parallel() + + simpleEvent := NewEvent( + []Value{ + NewInt(1), + String("foo"), + NewOptional(nil), + NewOptional(NewInt(2)), + NewDictionary([]KeyValuePair{ + {Key: String("k"), Value: NewInt(3)}, + }), + NewDictionary([]KeyValuePair{ + {Key: String("k"), Value: NewOptional(NewInt(4))}, + {Key: String("nilK"), Value: NewOptional(nil)}, + }), + NewDictionary([]KeyValuePair{ + {Key: String("k"), Value: NewInt(3)}, + {Key: String("k2"), Value: String("foo")}, + }), + NewDictionary([]KeyValuePair{ + {Key: String("k"), Value: NewOptional(NewInt(4))}, + {Key: String("k2"), Value: NewOptional(String("foo"))}, + {Key: String("nilK"), Value: NewOptional(nil)}, + }), + NewOptional(NewInt(2)), + String("bar"), + Array{ + ArrayType: NewVariableSizedArrayType(IntType{}), + Values: []Value{NewInt(1), NewInt(2)}, + }, + Array{ + ArrayType: NewVariableSizedArrayType(&OptionalType{Type: IntType{}}), + Values: []Value{ + NewOptional(NewInt(1)), + NewOptional(NewInt(2)), + NewOptional(nil), + }, + }, + Array{ + ArrayType: NewConstantSizedArrayType(2, IntType{}), + Values: []Value{NewInt(1), NewInt(2)}, + }, + Array{ + ArrayType: NewVariableSizedArrayType(AnyStructType{}), + Values: []Value{ + NewInt(3), + String("foo"), + }, + }, + Array{ + ArrayType: NewVariableSizedArrayType(&OptionalType{Type: AnyStructType{}}), + Values: []Value{ + NewOptional(NewInt(1)), + NewOptional(nil), + }, + }, + }, + ).WithType(&EventType{ + Location: utils.TestLocation, + QualifiedIdentifier: "SimpleEvent", + Fields: []Field{ + { + Identifier: "intField", + Type: IntType{}, + }, + { + Identifier: "stringField", + Type: StringType{}, + }, + { + Identifier: "nilOptionalIntField", + Type: &OptionalType{Type: IntType{}}, + }, + { + Identifier: "optionalIntField", + Type: &OptionalType{Type: IntType{}}, + }, + { + Identifier: "dictField", + Type: &DictionaryType{KeyType: StringType{}, ElementType: IntType{}}, + }, + { + Identifier: "dictOptionalField", + Type: &OptionalType{Type: &DictionaryType{KeyType: StringType{}, ElementType: &OptionalType{Type: IntType{}}}}, + }, + { + Identifier: "dictAnyStructField", + Type: &DictionaryType{KeyType: StringType{}, ElementType: AnyStructType{}}, + }, + { + Identifier: "dictOptionalAnyStructField", + Type: &DictionaryType{KeyType: StringType{}, ElementType: &OptionalType{Type: AnyStructType{}}}, + }, + { + Identifier: "optionalAnyStructField", + Type: &OptionalType{Type: AnyStructType{}}, + }, + { + Identifier: "anyStructField", + Type: AnyStructType{}, + }, + { + Identifier: "variableArrayIntField", + Type: NewVariableSizedArrayType(IntType{}), + }, + { + Identifier: "variableArrayOptionalIntField", + Type: NewVariableSizedArrayType(&OptionalType{Type: IntType{}}), + }, + { + Identifier: "fixedArrayIntField", + Type: NewConstantSizedArrayType(2, IntType{}), + }, + { + Identifier: "variableArrayAnyStructField", + Type: NewVariableSizedArrayType(AnyStructType{}), + }, + { + Identifier: "variableArrayOptionalAnyStructField", + Type: NewVariableSizedArrayType(&OptionalType{Type: AnyStructType{}}), + }, + }, + }) + + type eventStruct struct { + Int Int `cadence:"intField"` + String String `cadence:"stringField"` + NilOptionalInt *Int `cadence:"nilOptionalIntField"` + OptionalInt *Int `cadence:"optionalIntField"` + DictAnyStruct map[String]interface{} `cadence:"dictAnyStructField"` + DictOptionalAnyStruct map[String]*interface{} `cadence:"dictOptionalAnyStructField"` + Dict map[String]Int `cadence:"dictField"` + DictOptional map[String]*Int `cadence:"dictOptionalField"` + OptionalAnyStruct *interface{} `cadence:"optionalAnyStructField"` + AnyStructString interface{} `cadence:"anyStructField"` + ArrayInt []Int `cadence:"variableArrayIntField"` + VariableArrayOptional []*Int `cadence:"variableArrayOptionalIntField"` + FixedArrayInt [2]Int `cadence:"fixedArrayIntField"` + VariableArrayAnyStruct []interface{} `cadence:"variableArrayAnyStructField"` + VariableArrayOptionalAnyStruct []*interface{} `cadence:"variableArrayOptionalAnyStructField"` + NonCadenceField Int + } + + evt := eventStruct{} + err := DecodeFields(simpleEvent, &evt) + require.NoError(t, err) + + int1 := NewInt(1) + int2 := NewInt(2) + int3 := NewInt(3) + int4 := NewInt(4) + + assert.Nil(t, evt.NilOptionalInt) + + require.NotNil(t, evt.OptionalInt) + assert.EqualValues(t, int2, *evt.OptionalInt) + + assert.Equal(t, Int{}, evt.NonCadenceField) + + assert.EqualValues(t, map[String]Int{"k": int3}, evt.Dict) + + assert.EqualValues(t, map[String]*Int{"k": &int4, "nilK": nil}, evt.DictOptional) + + assert.EqualValues(t, map[String]interface{}{ + "k": int3, + "k2": String("foo"), + }, evt.DictAnyStruct) + + evtOptionalAnyStruct := evt.OptionalAnyStruct + require.NotNil(t, evtOptionalAnyStruct) + assert.EqualValues(t, int2, *evtOptionalAnyStruct) + + assert.Equal(t, String("bar"), evt.AnyStructString) + + assert.EqualValues(t, []Int{int1, int2}, evt.ArrayInt) + + assert.EqualValues(t, []*Int{&int1, &int2, nil}, evt.VariableArrayOptional) + + assert.Equal(t, [2]Int{int1, int2}, evt.FixedArrayInt) + + assert.Equal(t, []interface{}{int3, String("foo")}, evt.VariableArrayAnyStruct) + + require.NotNil(t, evt.VariableArrayOptionalAnyStruct[0]) + assert.Equal(t, int1, *evt.VariableArrayOptionalAnyStruct[0]) + assert.Nil(t, evt.VariableArrayOptionalAnyStruct[1]) + + require.NotNil(t, evt.DictOptionalAnyStruct["k"]) + assert.Equal(t, int4, *evt.DictOptionalAnyStruct["k"]) + assert.Equal(t, String("foo"), *evt.DictOptionalAnyStruct["k2"]) + require.NotNil(t, evt.DictOptionalAnyStruct["k2"]) + assert.Nil(t, evt.DictOptionalAnyStruct["nilK"]) + + type ErrCases struct { + Value interface{} + ExpectedErr string + Description string + } + + var v interface{} + var i int + var ints []int + + errCases := []ErrCases{ + {Value: nil, + ExpectedErr: "s must be a pointer to a struct", + Description: "should err when mapping to nil", + }, + {Value: v, + ExpectedErr: "s must be a pointer to a struct", + Description: "should err when mapping to nil interface", + }, + {Value: &v, + ExpectedErr: "s must be a pointer to a struct", + Description: "should err when mapping to pointer to nil interface", + }, + {Value: i, + ExpectedErr: "s must be a pointer to a struct", + Description: "should err when mapping to non-struct (int)", + }, + {Value: &i, + ExpectedErr: "s must be a pointer to a struct", + Description: "should err when mapping to pointer to non-struct (&int)", + }, + {Value: ints, + ExpectedErr: "s must be a pointer to a struct", + Description: "should err when mapping to non-struct ([]int)", + }, + {Value: &ints, + ExpectedErr: "s must be a pointer to a struct", + Description: "should err when mapping to pointer to non-struct (&[]int)", + }, + {Value: struct { + A Int `cadence:"intField"` + }{}, + ExpectedErr: "s must be a pointer to a struct", + Description: "should err when mapping to non-pointer", + }, + {Value: &struct { + A String `cadence:"intField"` + }{}, + ExpectedErr: "cannot convert cadence field intField of type Int to struct field A of type cadence.String", + Description: "should err when mapping to invalid type", + }, + {Value: &struct { + a Int `cadence:"intField"` // nolint: unused + }{}, + ExpectedErr: "cannot set field a", + Description: "should err when mapping to private field", + }, + {Value: &struct { + A Int `cadence:"notFoundField"` + }{}, + ExpectedErr: "notFoundField field not found", + Description: "should err when mapping to non-existing field", + }, + {Value: &struct { + O *String `cadence:"optionalIntField"` + }{}, + ExpectedErr: "cannot decode ptr field O: cannot set field: expected cadence.String, got cadence.Int", + Description: "should err when mapping to optional field with wrong type", + }, + {Value: &struct { + DOptional map[*String]*Int `cadence:"dictOptionalField"` + }{}, + ExpectedErr: "cannot decode map field DOptional: map key cannot be a pointer (optional) type", + Description: "should err when mapping to dictionary field with ptr key type", + }, + {Value: &struct { + D map[String]String `cadence:"dictField"` + }{}, + ExpectedErr: "cannot decode map field D: map value type mismatch: expected cadence.String, got cadence.Int", + Description: "should err when mapping to dictionary field with wrong value type", + }, + {Value: &struct { + A []String `cadence:"intField"` + }{}, + ExpectedErr: "cannot decode slice field A: field is not an array", + Description: "should err when mapping to array field with wrong type", + }, + {Value: &struct { + A []String `cadence:"variableArrayIntField"` + }{}, + ExpectedErr: "cannot decode slice field A: array element type mismatch at index 0: expected cadence.String, got cadence.Int", + Description: "should err when mapping to array field with wrong element type", + }, + {Value: &struct { + A []*String `cadence:"variableArrayOptionalIntField"` + }{}, + ExpectedErr: "cannot decode slice field A: error decoding array element optional: cannot set field: expected cadence.String, got cadence.Int", + Description: "should err when mapping to array field with wrong type", + }, + {Value: &struct { + A map[Int]Int `cadence:"dictField"` + }{}, + ExpectedErr: "cannot decode map field A: map key type mismatch: expected cadence.Int, got cadence.String", + Description: "should err when mapping to map field with mismatching key type", + }, + {Value: &struct { + A map[String]*String `cadence:"dictOptionalField"` + }{}, + ExpectedErr: "cannot decode map field A: cannot decode optional map value for key \"k\": cannot set field: expected cadence.String, got cadence.Int", + Description: "should err when mapping to map field with mismatching value type", + }, + {Value: &struct { + A map[String]Int `cadence:"intField"` + }{}, + ExpectedErr: "cannot decode map field A: field is not a dictionary", + Description: "should err when mapping to map with mismatching field type", + }, + {Value: &struct { + A *Int `cadence:"intField"` + }{}, + ExpectedErr: "cannot decode ptr field A: field is not an optional", + Description: "should err when mapping to optional field with mismatching type", + }, + } + for _, errCase := range errCases { + t.Run(errCase.Description, func(t *testing.T) { + err := DecodeFields(simpleEvent, errCase.Value) + assert.Equal(t, errCase.ExpectedErr, err.Error()) + }) + } +} diff --git a/values.go b/values.go index 1714c025e8..7de1c86625 100644 --- a/values.go +++ b/values.go @@ -1297,6 +1297,74 @@ func (v Word128) String() string { return format.BigInt(v.Value) } +// Word256 + +type Word256 struct { + Value *big.Int +} + +var _ Value = Word256{} + +var Word256MemoryUsage = common.NewCadenceBigIntMemoryUsage(32) + +func NewWord256(i uint) Word256 { + return Word256{ + Value: big.NewInt(int64(i)), + } +} + +var word256NegativeError = errors.NewDefaultUserError("invalid negative value for Word256") +var word256MaxExceededError = errors.NewDefaultUserError("value exceeds max of Word256") + +func NewWord256FromBig(i *big.Int) (Word256, error) { + if i.Sign() < 0 { + return Word256{}, word256NegativeError + } + if i.Cmp(sema.Word256TypeMaxIntBig) > 0 { + return Word256{}, word256MaxExceededError + } + return Word256{Value: i}, nil +} + +func NewMeteredWord256FromBig( + memoryGauge common.MemoryGauge, + bigIntConstructor func() *big.Int, +) (Word256, error) { + common.UseMemory(memoryGauge, Word256MemoryUsage) + value := bigIntConstructor() + return NewWord256FromBig(value) +} + +func (Word256) isValue() {} + +func (Word256) Type() Type { + return TheWord256Type +} + +func (v Word256) MeteredType(common.MemoryGauge) Type { + return v.Type() +} + +func (v Word256) ToGoValue() any { + return v.Big() +} + +func (v Word256) Int() int { + return int(v.Value.Uint64()) +} + +func (v Word256) Big() *big.Int { + return v.Value +} + +func (v Word256) ToBigEndianBytes() []byte { + return interpreter.UnsignedBigIntToBigEndianBytes(v.Value) +} + +func (v Word256) String() string { + return format.BigInt(v.Value) +} + // Fix64 type Fix64 int64 @@ -1655,6 +1723,18 @@ func (v Struct) String() string { return formatComposite(v.StructType.ID(), v.StructType.Fields, v.Fields) } +func (v Struct) GetFields() []Field { + if v.StructType == nil { + return nil + } + + return v.StructType.Fields +} + +func (v Struct) GetFieldValues() []Value { + return v.Fields +} + func formatComposite(typeID string, fields []Field, values []Value) string { preparedFields := make([]struct { Name string @@ -1738,6 +1818,18 @@ func (v Resource) String() string { return formatComposite(v.ResourceType.ID(), v.ResourceType.Fields, v.Fields) } +func (v Resource) GetFields() []Field { + if v.ResourceType == nil { + return nil + } + + return v.ResourceType.Fields +} + +func (v Resource) GetFieldValues() []Value { + return v.Fields +} + // Attachment type Attachment struct { @@ -1800,6 +1892,18 @@ func (v Attachment) String() string { return formatComposite(v.AttachmentType.ID(), v.AttachmentType.Fields, v.Fields) } +func (v Attachment) GetFields() []Field { + if v.AttachmentType == nil { + return nil + } + + return v.AttachmentType.Fields +} + +func (v Attachment) GetFieldValues() []Value { + return v.Fields +} + // Event type Event struct { @@ -1861,6 +1965,18 @@ func (v Event) String() string { return formatComposite(v.EventType.ID(), v.EventType.Fields, v.Fields) } +func (v Event) GetFields() []Field { + if v.EventType == nil { + return nil + } + + return v.EventType.Fields +} + +func (v Event) GetFieldValues() []Value { + return v.Fields +} + // Contract type Contract struct { @@ -1923,6 +2039,18 @@ func (v Contract) String() string { return formatComposite(v.ContractType.ID(), v.ContractType.Fields, v.Fields) } +func (v Contract) GetFields() []Field { + if v.ContractType == nil { + return nil + } + + return v.ContractType.Fields +} + +func (v Contract) GetFieldValues() []Value { + return v.Fields +} + // PathLink type PathLink struct { @@ -2286,6 +2414,18 @@ func (v Enum) String() string { return formatComposite(v.EnumType.ID(), v.EnumType.Fields, v.Fields) } +func (v Enum) GetFields() []Field { + if v.EnumType == nil { + return nil + } + + return v.EnumType.Fields +} + +func (v Enum) GetFieldValues() []Value { + return v.Fields +} + // Function type Function struct { FunctionType *FunctionType diff --git a/values_test.go b/values_test.go index 0ba975e323..d8a2182224 100644 --- a/values_test.go +++ b/values_test.go @@ -152,6 +152,11 @@ func newValueTestCases() map[string]valueTestCase { string: "128", expectedType: Word128Type{}, }, + "Word256": { + value: NewWord256(256), + string: "256", + expectedType: Word256Type{}, + }, "UFix64": { value: ufix64, string: "64.01000000", @@ -459,9 +464,15 @@ func TestNumberValue_ToBigEndianBytes(t *testing.T) { uint128LargeValueTestCase, _ := NewUInt128FromBig(new(big.Int).SetBytes([]byte{127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255})) uint128MaxValue, _ := NewUInt128FromBig(sema.UInt128TypeMaxIntBig) + uint256LargeValueTestCase, _ := NewUInt256FromBig(new(big.Int).SetBytes([]byte{127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255})) + uint256MaxValue, _ := NewUInt256FromBig(sema.UInt256TypeMaxIntBig) + word128LargeValueTestCase, _ := NewWord128FromBig(new(big.Int).SetBytes([]byte{127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255})) word128MaxValue, _ := NewWord128FromBig(sema.Word128TypeMaxIntBig) + word256LargeValueTestCase, _ := NewWord256FromBig(new(big.Int).SetBytes([]byte{127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255})) + word256MaxValue, _ := NewWord256FromBig(sema.Word256TypeMaxIntBig) + typeTests := map[string]map[NumberValue][]byte{ // Int* "Int": { @@ -590,12 +601,13 @@ func TestNumberValue_ToBigEndianBytes(t *testing.T) { uint128MaxValue: {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, }, "UInt256": { - NewUInt256(0): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, - NewUInt256(42): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42}, - NewUInt256(127): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 127}, - NewUInt256(128): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128}, - NewUInt256(200): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 200}, - UInt256{sema.UInt256TypeMaxIntBig}: {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, + NewUInt256(0): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + NewUInt256(42): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42}, + NewUInt256(127): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 127}, + NewUInt256(128): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128}, + NewUInt256(200): {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 200}, + uint256LargeValueTestCase: {127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, + uint256MaxValue: {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, }, // Word* "Word8": { @@ -634,6 +646,14 @@ func TestNumberValue_ToBigEndianBytes(t *testing.T) { word128LargeValueTestCase: {127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, word128MaxValue: {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, }, + "Word256": { + NewWord256(0): {0}, + NewWord256(42): {42}, + NewWord256(127): {127}, + NewWord256(200): {200}, + word256LargeValueTestCase: {127, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, + word256MaxValue: {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, + }, // Fix* "Fix64": { Fix64(0): {0, 0, 0, 0, 0, 0, 0, 0}, @@ -833,6 +853,24 @@ func TestNewWord128FromBig(t *testing.T) { require.Error(t, err) } +func TestNewWord256FromBig(t *testing.T) { + t.Parallel() + + _, err := NewWord256FromBig(big.NewInt(1)) + require.NoError(t, err) + + belowMin := big.NewInt(-1) + _, err = NewWord256FromBig(belowMin) + require.Error(t, err) + + aboveMax := new(big.Int).Add( + sema.Word256TypeMaxIntBig, + big.NewInt(1), + ) + _, err = NewWord256FromBig(aboveMax) + require.Error(t, err) +} + func TestValue_Type(t *testing.T) { t.Parallel() @@ -897,3 +935,68 @@ func TestValue_Type(t *testing.T) { test(name, testCase) } } + +func TestValue_HasFields(t *testing.T) { + t.Parallel() + + test := func(name string, testCase valueTestCase) { + + t.Run(name, func(t *testing.T) { + value := testCase.value + switch value.(type) { + case Event, Struct, Contract, Enum, Resource, Attachment: + valueWithType := testCase.withType(value, testCase.exampleType) + assert.Implements(t, (*HasFields)(nil), valueWithType) + fieldedValueWithType := valueWithType.(HasFields) + assert.NotNil(t, fieldedValueWithType.GetFieldValues()) + assert.NotNil(t, fieldedValueWithType.GetFields()) + + fieldedValue := value.(HasFields) + + assert.Nil(t, fieldedValue.GetFields()) + } + }) + + } + + for name, testCase := range newValueTestCases() { + test(name, testCase) + } +} + +func TestEvent_GetFieldByName(t *testing.T) { + t.Parallel() + + simpleEvent := NewEvent( + []Value{ + NewInt(1), + String("foo"), + }, + ) + assert.Nil(t, GetFieldsMappedByName(simpleEvent)) + assert.Nil(t, GetFieldByName(simpleEvent, "a")) + + simpleEventWithType := simpleEvent.WithType(&EventType{ + Location: utils.TestLocation, + QualifiedIdentifier: "SimpleEvent", + Fields: []Field{ + { + Identifier: "a", + Type: IntType{}, + }, + { + Identifier: "b", + Type: StringType{}, + }, + }, + }) + + assert.Equal(t, NewInt(1), GetFieldByName(simpleEventWithType, "a").(Int)) + assert.Equal(t, String("foo"), GetFieldByName(simpleEventWithType, "b").(String)) + assert.Nil(t, GetFieldByName(simpleEventWithType, "c")) + + assert.Equal(t, map[string]Value{ + "a": NewInt(1), + "b": String("foo"), + }, GetFieldsMappedByName(simpleEventWithType)) +} diff --git a/version.go b/version.go index d1f72bb1fa..ab5ca8fdf7 100644 --- a/version.go +++ b/version.go @@ -21,4 +21,4 @@ package cadence -const Version = "v0.38.0" +const Version = "v0.39.12"