diff --git a/.cargo/config.toml b/.cargo/config.toml new file mode 100644 index 000000000..b9533c52f --- /dev/null +++ b/.cargo/config.toml @@ -0,0 +1,2 @@ +[patch.crates-io] +cc = { git = "https://github.com/rust-lang/cc-rs", rev = "e5bbdfa" } diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 6a418cd0b..b84aceea4 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -39,14 +39,15 @@ name: Rust # so you're not wasting money unless several cores are sitting idle for long. on: + # Relevant docs: + # - https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/configuring-pull-request-merges/managing-a-merge-queue#how-merge-queues-work + # - https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#merge_group + merge_group: + types: ["checks_requested"] push: branches: ["nightly", "stable"] - paths-ignore: - - "**.md" pull_request: branches: ["nightly", "stable"] - paths-ignore: - - "**.md" env: CARGO_TERM_COLOR: always @@ -56,13 +57,14 @@ env: # Source: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true + # Except in `nightly` and `stable` branches! Any cancelled job will cause the + # CI run to fail, and we want to keep a clean history for major branches. + cancel-in-progress: ${{ (github.ref != 'refs/heads/nightly') && (github.ref != 'refs/heads/stable') }} jobs: check: name: check - runs-on: - group: 8-cores_32GB_Ubuntu Group + runs-on: buildjet-4vcpu-ubuntu-2204 timeout-minutes: 60 steps: - uses: actions/checkout@v3 @@ -71,6 +73,7 @@ jobs: run: rustup show - uses: Swatinem/rust-cache@v2 with: + cache-provider: "buildjet" shared-key: cargo-check save-if: ${{ github.ref == 'refs/heads/nightly' }} workspaces: | @@ -89,8 +92,7 @@ jobs: # building dependencies, only chceking them, so we can share caches # effectively. needs: check - runs-on: - group: 8-cores_32GB_Ubuntu Group + runs-on: buildjet-4vcpu-ubuntu-2204 timeout-minutes: 60 steps: - uses: actions/checkout@v3 @@ -101,6 +103,7 @@ jobs: uses: taiki-e/install-action@cargo-hack - uses: Swatinem/rust-cache@v2 with: + cache-provider: "buildjet" shared-key: cargo-check save-if: ${{ github.ref == 'refs/heads/nightly' }} workspaces: | @@ -110,8 +113,7 @@ jobs: - name: cargo hack run: make check-features test: - runs-on: - group: 8-cores_32GB_Ubuntu Group + runs-on: buildjet-4vcpu-ubuntu-2204 timeout-minutes: 60 steps: - uses: actions/checkout@v3 @@ -122,6 +124,7 @@ jobs: - uses: taiki-e/install-action@nextest - uses: Swatinem/rust-cache@v2 with: + cache-provider: "buildjet" shared-key: cargo-build save-if: ${{ github.ref == 'refs/heads/nightly' }} workspaces: | @@ -136,8 +139,7 @@ jobs: # `test` has already built dependencies, so we can share # caches (the profile is `dev` in both cases). needs: test - runs-on: - group: 8-cores_32GB_Ubuntu Group + runs-on: buildjet-8vcpu-ubuntu-2204 steps: - uses: actions/checkout@v3 - uses: rui314/setup-mold@v1 @@ -145,6 +147,7 @@ jobs: run: rustup show - uses: Swatinem/rust-cache@v2 with: + cache-provider: "buildjet" shared-key: cargo-build save-if: ${{ github.ref == 'refs/heads/nightly' }} workspaces: | @@ -163,7 +166,7 @@ jobs: SECONDS=0 while ((SECONDS <= 1200)) do - if curl -f -s -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"bank_supplyOf","params":["sov1zdwj8thgev2u3yyrrlekmvtsz4av4tp3m7dm5mx5peejnesga27svq9m72"],"id":1}' http://127.0.0.1:12345; then + if curl -f -s -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"bank_supplyOf","params":["sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft"],"id":1}' http://127.0.0.1:12345; then echo "demo-rollup is up" exit 0 fi @@ -183,7 +186,7 @@ jobs: SECONDS=0 while ((SECONDS <= 300)) do - if curl -f -s -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"bank_supplyOf","params":["sov1zdwj8thgev2u3yyrrlekmvtsz4av4tp3m7dm5mx5peejnesga27svq9m72"],"id":1}' http://127.0.0.1:12345 | grep -q 1000; then + if curl -f -s -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"bank_supplyOf","params":["sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft"],"id":1}' http://127.0.0.1:12345 | grep -q 1000; then echo "demo-rollup test succeeded" exit 0 fi @@ -200,8 +203,7 @@ jobs: # profile from the rest of the codebase, so caches can't be shared. check-demo-prover: name: check demo prover - runs-on: - group: 8-cores_32GB_Ubuntu Group + runs-on: buildjet-4vcpu-ubuntu-2204 timeout-minutes: 90 steps: - uses: actions/checkout@v3 @@ -210,6 +212,7 @@ jobs: run: rustup show - uses: Swatinem/rust-cache@v2 with: + cache-provider: "buildjet" save-if: ${{ github.ref == 'refs/heads/nightly' }} workspaces: | . @@ -225,8 +228,7 @@ jobs: exit 1 fi coverage: - runs-on: - group: 8-cores_32GB_Ubuntu Group + runs-on: buildjet-8vcpu-ubuntu-2204 timeout-minutes: 90 steps: - uses: actions/checkout@v3 @@ -241,6 +243,7 @@ jobs: uses: taiki-e/install-action@cargo-llvm-cov - uses: Swatinem/rust-cache@v2 with: + cache-provider: "buildjet" save-if: ${{ github.ref == 'refs/heads/nightly' }} workspaces: | . @@ -277,6 +280,7 @@ jobs: run: rustup show - uses: Swatinem/rust-cache@v2 with: + cache-provider: "buildjet" shared-key: cargo-check save-if: ${{ github.ref == 'refs/heads/nightly' }} workspaces: | diff --git a/.gitignore b/.gitignore index caf30d0b0..6cdeba8c5 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ .idea/ target/ +fuzz/Cargo.lock .DS_Store diff --git a/Cargo.lock b/Cargo.lock index e0dbfb8c1..59c5012a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -32,15 +32,6 @@ dependencies = [ "gimli 0.26.2", ] -[[package]] -name = "addr2line" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" -dependencies = [ - "gimli 0.27.3", -] - [[package]] name = "addr2line" version = "0.20.0" @@ -99,6 +90,23 @@ dependencies = [ "memchr", ] +[[package]] +name = "allocator-api2" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" + +[[package]] +name = "alloy-rlp" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f938f00332d63a5b0ac687bd6f46d03884638948921d9f8b50c59563d421ae25" +dependencies = [ + "arrayvec 0.7.4", + "bytes", + "smol_str", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -194,10 +202,143 @@ dependencies = [ "itertools 0.10.5", "proc-macro-error", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "arbitrary" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "ark-ff" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" +dependencies = [ + "ark-ff-asm 0.3.0", + "ark-ff-macros 0.3.0", + "ark-serialize 0.3.0", + "ark-std 0.3.0", + "derivative", + "num-bigint 0.4.3", + "num-traits", + "paste", + "rustc_version 0.3.3", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" +dependencies = [ + "ark-ff-asm 0.4.2", + "ark-ff-macros 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "derivative", + "digest 0.10.7", + "itertools 0.10.5", + "num-bigint 0.4.3", + "num-traits", + "paste", + "rustc_version 0.4.0", + "zeroize", +] + +[[package]] +name = "ark-ff-asm" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" +dependencies = [ + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-asm" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" +dependencies = [ + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" +dependencies = [ + "num-bigint 0.4.3", + "num-traits", + "quote 1.0.33", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" +dependencies = [ + "num-bigint 0.4.3", + "num-traits", + "proc-macro2 1.0.66", + "quote 1.0.33", "syn 1.0.109", ] +[[package]] +name = "ark-serialize" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" +dependencies = [ + "ark-std 0.3.0", + "digest 0.9.0", +] + +[[package]] +name = "ark-serialize" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" +dependencies = [ + "ark-std 0.4.0", + "digest 0.10.7", + "num-bigint 0.4.3", +] + +[[package]] +name = "ark-std" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + +[[package]] +name = "ark-std" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + [[package]] name = "array-bytes" version = "4.2.0" @@ -256,7 +397,7 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3203e79f4dd9bdda415ed03cf14dae5a2bf775c683a00f94e9cd1faf0f596e5" dependencies = [ - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -317,15 +458,15 @@ dependencies = [ "polling", "rustix 0.37.23", "slab", - "socket2", + "socket2 0.4.9", "waker-fn", ] [[package]] name = "async-lock" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa24f727524730b077666307f2734b4a1a1c57acb79193127dcc8914d5242dd7" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" dependencies = [ "event-listener", ] @@ -370,8 +511,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -382,7 +523,7 @@ checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" dependencies = [ "futures", "pharos", - "rustc_version", + "rustc_version 0.4.0", ] [[package]] @@ -428,7 +569,7 @@ checksum = "fee3da8ef1276b0bee5dd1c7258010d8fffd31801447323115a25560e1327b89" dependencies = [ "proc-macro-error", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -448,7 +589,7 @@ dependencies = [ "hex", "serde", "serde_json", - "sp-core 16.0.0", + "sp-core", "sp-keyring", "structopt", "subxt", @@ -464,14 +605,14 @@ dependencies = [ "derive_more", "futures", "hex", - "jsonrpsee 0.16.2", + "jsonrpsee 0.16.3", "num_enum 0.5.11", "parity-scale-codec", "scale-info", "schnorrkel", "serde", "serde-hex", - "sp-core 16.0.0", + "sp-core", "structopt", "subxt", "tokio", @@ -531,6 +672,16 @@ dependencies = [ "serde", ] +[[package]] +name = "bcs" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd3ffe8b19a604421a5d461d4a70346223e535903fbc3067138bddbebddcf77" +dependencies = [ + "serde", + "thiserror", +] + [[package]] name = "bech32" version = "0.7.3" @@ -575,11 +726,11 @@ dependencies = [ "peeking_take_while", "prettyplease 0.2.12", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "regex", "rustc-hash", "shlex", - "syn 2.0.27", + "syn 2.0.28", ] [[package]] @@ -718,7 +869,7 @@ dependencies = [ [[package]] name = "boa_ast" version = "0.17.0" -source = "git+https://github.com/boa-dev/boa#33e8c51fc644354ba5510e221257e3fb0d292ae2" +source = "git+https://github.com/boa-dev/boa#9665f8be3be60f475d816ca10430631f43d6c962" dependencies = [ "bitflags 2.3.3", "boa_interner", @@ -731,7 +882,7 @@ dependencies = [ [[package]] name = "boa_engine" version = "0.17.0" -source = "git+https://github.com/boa-dev/boa#33e8c51fc644354ba5510e221257e3fb0d292ae2" +source = "git+https://github.com/boa-dev/boa#9665f8be3be60f475d816ca10430631f43d6c962" dependencies = [ "bitflags 2.3.3", "boa_ast", @@ -769,17 +920,18 @@ dependencies = [ [[package]] name = "boa_gc" version = "0.17.0" -source = "git+https://github.com/boa-dev/boa#33e8c51fc644354ba5510e221257e3fb0d292ae2" +source = "git+https://github.com/boa-dev/boa#9665f8be3be60f475d816ca10430631f43d6c962" dependencies = [ "boa_macros", "boa_profiler", + "hashbrown 0.14.0", "thin-vec", ] [[package]] name = "boa_icu_provider" version = "0.17.0" -source = "git+https://github.com/boa-dev/boa#33e8c51fc644354ba5510e221257e3fb0d292ae2" +source = "git+https://github.com/boa-dev/boa#9665f8be3be60f475d816ca10430631f43d6c962" dependencies = [ "icu_collections", "icu_normalizer", @@ -792,7 +944,7 @@ dependencies = [ [[package]] name = "boa_interner" version = "0.17.0" -source = "git+https://github.com/boa-dev/boa#33e8c51fc644354ba5510e221257e3fb0d292ae2" +source = "git+https://github.com/boa-dev/boa#9665f8be3be60f475d816ca10430631f43d6c962" dependencies = [ "boa_gc", "boa_macros", @@ -807,18 +959,18 @@ dependencies = [ [[package]] name = "boa_macros" version = "0.17.0" -source = "git+https://github.com/boa-dev/boa#33e8c51fc644354ba5510e221257e3fb0d292ae2" +source = "git+https://github.com/boa-dev/boa#9665f8be3be60f475d816ca10430631f43d6c962" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", "synstructure 0.13.0", ] [[package]] name = "boa_parser" version = "0.17.0" -source = "git+https://github.com/boa-dev/boa#33e8c51fc644354ba5510e221257e3fb0d292ae2" +source = "git+https://github.com/boa-dev/boa#9665f8be3be60f475d816ca10430631f43d6c962" dependencies = [ "bitflags 2.3.3", "boa_ast", @@ -838,7 +990,7 @@ dependencies = [ [[package]] name = "boa_profiler" version = "0.17.0" -source = "git+https://github.com/boa-dev/boa#33e8c51fc644354ba5510e221257e3fb0d292ae2" +source = "git+https://github.com/boa-dev/boa#9665f8be3be60f475d816ca10430631f43d6c962" [[package]] name = "bonsai-sdk" @@ -882,7 +1034,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -893,7 +1045,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -962,8 +1114,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdde5c9cd29ebd706ce1b35600920a33550e402fc998a2e53ad3b42c3c47a192" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -1028,7 +1180,7 @@ checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a" dependencies = [ "camino", "cargo-platform", - "semver", + "semver 1.0.18", "serde", "serde_json", "thiserror", @@ -1042,11 +1194,45 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.79" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +checksum = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01" dependencies = [ "jobserver", + "libc", +] + +[[package]] +name = "celestia" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "base64 0.21.2", + "bech32 0.9.1", + "borsh", + "hex", + "hex-literal 0.4.1", + "jsonrpsee 0.18.2", + "nmt-rs", + "postcard", + "proptest", + "prost", + "prost-build", + "prost-types", + "risc0-zkvm", + "risc0-zkvm-platform", + "serde", + "serde_json", + "sha2 0.10.7", + "sov-rollup-interface", + "tendermint", + "tendermint-proto", + "thiserror", + "tokio", + "tracing", + "wiremock", + "zk-cycle-macros", ] [[package]] @@ -1171,8 +1357,8 @@ checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" dependencies = [ "heck 0.4.1", "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -1190,14 +1376,14 @@ checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" [[package]] name = "codecs-derive" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "convert_case 0.6.0", "parity-scale-codec", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "serde", - "syn 2.0.27", + "syn 2.0.28", ] [[package]] @@ -1351,15 +1537,6 @@ dependencies = [ "serde", ] -[[package]] -name = "cranelift-entity" -version = "0.95.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40099d38061b37e505e63f89bab52199037a72b931ad4868d9089ff7268660b0" -dependencies = [ - "serde", -] - [[package]] name = "crc" version = "3.0.1" @@ -1573,6 +1750,34 @@ dependencies = [ "zeroize", ] +[[package]] +name = "curve25519-dalek" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f711ade317dd348950a9910f81c5947e3d8907ebd2b83f76203ff1807e6a2bc2" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "platforms", + "rustc_version 0.4.0", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" +dependencies = [ + "proc-macro2 1.0.66", + "quote 1.0.33", + "syn 2.0.28", +] + [[package]] name = "curve25519-dalek-ng" version = "4.1.1" @@ -1615,7 +1820,7 @@ dependencies = [ "fnv", "ident_case", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "strsim 0.10.0", "syn 1.0.109", ] @@ -1629,9 +1834,9 @@ dependencies = [ "fnv", "ident_case", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "strsim 0.10.0", - "syn 2.0.27", + "syn 2.0.28", ] [[package]] @@ -1641,7 +1846,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ "darling_core 0.14.4", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -1652,8 +1857,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -1700,7 +1905,10 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", + "demo-nft-module", + "jsonrpsee 0.18.2", "serde", + "sov-data-generators", "sov-modules-api", "sov-rollup-interface", "sov-state", @@ -1726,6 +1934,7 @@ dependencies = [ "borsh", "clap 4.3.19", "const-rollup-config", + "demo-stf", "hex", "jsonrpsee 0.18.2", "rand 0.8.5", @@ -1733,7 +1942,10 @@ dependencies = [ "serde_json", "sov-accounts", "sov-bank", - "sov-election", + "sov-blob-storage", + "sov-chain-state", + "sov-cli", + "sov-data-generators", "sov-evm", "sov-modules-api", "sov-modules-stf-template", @@ -1759,6 +1971,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "deranged" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7684a49fb1af197853ef7b2ee694bc1f5b4179556f1e5710e1760c5db6f5e929" +dependencies = [ + "serde", +] + [[package]] name = "derivative" version = "2.2.0" @@ -1766,10 +1987,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] +[[package]] +name = "derive_arbitrary" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53e0efad4403bfc52dc201159c4b842a246a14b98c64b55dfd0f2d89729dfeb8" +dependencies = [ + "proc-macro2 1.0.66", + "quote 1.0.33", + "syn 2.0.28", +] + [[package]] name = "derive_more" version = "0.99.17" @@ -1778,8 +2010,8 @@ checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case 0.4.0", "proc-macro2 1.0.66", - "quote 1.0.32", - "rustc_version", + "quote 1.0.33", + "rustc_version 0.4.0", "syn 1.0.109", ] @@ -1868,8 +2100,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -1901,7 +2133,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "558e40ea573c374cf53507fd240b7ee2f5477df7cfebdb97323ec61c719399c5" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -1926,7 +2158,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e217c6c1435ebf9b88662354589d339192b8eaf506edd22951e75e045c8e8bd" dependencies = [ - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -1960,6 +2192,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fb04eee5d9d907f29e80ee6b0e78f7e2c82342c63e3580d8c4f69d9d5aad963" dependencies = [ "pkcs8", + "serde", "signature 2.1.0", ] @@ -1984,12 +2217,24 @@ checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ "curve25519-dalek 3.2.0", "ed25519 1.5.3", - "rand 0.7.3", - "serde", "sha2 0.9.9", "zeroize", ] +[[package]] +name = "ed25519-dalek" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" +dependencies = [ + "curve25519-dalek 4.0.0", + "ed25519 2.2.1", + "rand_core 0.6.4", + "serde", + "sha2 0.10.7", + "zeroize", +] + [[package]] name = "ed25519-zebra" version = "3.1.0" @@ -2012,7 +2257,7 @@ checksum = "079044df30bb07de7d846d41a184c4b00e66ebdac93ee459253474f3a47e50ae" dependencies = [ "enum-ordinalize", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -2098,8 +2343,8 @@ dependencies = [ "num-bigint 0.4.3", "num-traits", "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -2109,8 +2354,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b893c4eb2dc092c811165f84dc7447fae16fb66521717968c34c509b39b1a5c5" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -2136,9 +2381,9 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" dependencies = [ "errno-dragonfly", "libc", @@ -2287,12 +2532,12 @@ dependencies = [ "hex", "prettyplease 0.2.12", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "regex", "reqwest", "serde", "serde_json", - "syn 2.0.27", + "syn 2.0.28", "toml 0.7.6", "walkdir", ] @@ -2308,9 +2553,9 @@ dependencies = [ "ethers-core", "hex", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "serde_json", - "syn 2.0.27", + "syn 2.0.28", ] [[package]] @@ -2336,7 +2581,7 @@ dependencies = [ "serde", "serde_json", "strum 0.25.0", - "syn 2.0.27", + "syn 2.0.28", "tempfile", "thiserror", "tiny-keccak", @@ -2351,7 +2596,7 @@ checksum = "22b3a8269d3df0ed6364bc05b4735b95f4bf830ce3aef87d5e760fb0e93e5b91" dependencies = [ "ethers-core", "reqwest", - "semver", + "semver 1.0.18", "serde", "serde_json", "thiserror", @@ -2459,7 +2704,7 @@ dependencies = [ "path-slash", "rayon", "regex", - "semver", + "semver 1.0.18", "serde", "serde_json", "solang-parser", @@ -2521,6 +2766,17 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" +[[package]] +name = "fastrlp" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" +dependencies = [ + "arrayvec 0.7.4", + "auto_impl", + "bytes", +] + [[package]] name = "ff" version = "0.13.0" @@ -2546,10 +2802,16 @@ dependencies = [ "num-integer", "num-traits", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] +[[package]] +name = "fiat-crypto" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e825f6987101665dea6ec934c09ec6d721de7bc1bf92248e1d5810c8cd636b77" + [[package]] name = "fixed-hash" version = "0.8.0" @@ -2727,8 +2989,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -2839,11 +3101,6 @@ name = "gimli" version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" -dependencies = [ - "fallible-iterator", - "indexmap 1.9.3", - "stable_deref_trait", -] [[package]] name = "glob" @@ -2853,9 +3110,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aca8bbd8e0707c1887a8bbb7e6b40e228f251ff5d62c8220a4a7a53c73aff006" +checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d" dependencies = [ "aho-corasick", "bstr", @@ -2951,12 +3208,6 @@ version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" -[[package]] -name = "hash-db" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e7d7786361d7425ae2fe4f9e407eb0efaa0840f5212d109cc018c40c35c6ab4" - [[package]] name = "hash256-std-hasher" version = "0.15.2" @@ -2999,6 +3250,10 @@ name = "hashbrown" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +dependencies = [ + "ahash 0.8.3", + "allocator-api2", +] [[package]] name = "hashers" @@ -3017,7 +3272,7 @@ checksum = "db04bc24a18b9ea980628ecf00e6c0264f3c1426dac36c00cb49b6fbad8b0743" dependencies = [ "atomic-polyfill 0.1.11", "hash32", - "rustc_version", + "rustc_version 0.4.0", "serde", "spin 0.9.8", "stable_deref_trait", @@ -3195,29 +3450,13 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.4.9", "tokio", "tower-service", "tracing", "want", ] -[[package]] -name = "hyper-rustls" -version = "0.23.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1788965e61b367cd03a62950836d5cd41560c3577d90e40e0819373194d1661c" -dependencies = [ - "http", - "hyper", - "log", - "rustls 0.20.8", - "rustls-native-certs", - "tokio", - "tokio-rustls 0.23.4", - "webpki-roots 0.22.6", -] - [[package]] name = "hyper-rustls" version = "0.24.1" @@ -3228,10 +3467,11 @@ dependencies = [ "http", "hyper", "log", - "rustls 0.21.5", + "rustls", "rustls-native-certs", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls", + "webpki-roots 0.23.1", ] [[package]] @@ -3365,7 +3605,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd8b728b9421e93eff1d9f8681101b78fa745e0748c95c655c83f337044a7e10" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -3419,7 +3659,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -3439,7 +3679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", ] [[package]] @@ -3509,10 +3749,15 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", + "sov-chain-state", + "sov-data-generators", "sov-modules-api", + "sov-modules-macros", + "sov-modules-stf-template", "sov-rollup-interface", "sov-schema-db", "sov-state", + "sov-value-setter", "tempfile", ] @@ -3546,7 +3791,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ "hermit-abi 0.3.2", - "rustix 0.38.4", + "rustix 0.38.7", "windows-sys 0.48.0", ] @@ -3576,12 +3821,13 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jmt" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1a302f0defd323b833c9848c20ab40c3156128f50d7bf8eebeed2ef58167258" +checksum = "9e49c5d2c13e15f77f22cee3df3dc822b46051b217112035d72687cb57a9cbde" dependencies = [ "anyhow", "borsh", + "digest 0.10.7", "hashbrown 0.13.2", "hex", "ics23", @@ -3615,17 +3861,17 @@ dependencies = [ [[package]] name = "jsonrpsee" -version = "0.16.2" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d291e3a5818a2384645fd9756362e6d89cf0541b0b916fa7702ea4a9833608e" +checksum = "367a292944c07385839818bb71c8d76611138e2dedb0677d035b8da21d29c78b" dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core 0.16.2", - "jsonrpsee-http-client 0.16.2", - "jsonrpsee-proc-macros 0.16.2", - "jsonrpsee-types 0.16.2", - "jsonrpsee-wasm-client", - "jsonrpsee-ws-client", + "jsonrpsee-client-transport 0.16.3", + "jsonrpsee-core 0.16.3", + "jsonrpsee-http-client 0.16.3", + "jsonrpsee-proc-macros 0.16.3", + "jsonrpsee-types 0.16.3", + "jsonrpsee-wasm-client 0.16.3", + "jsonrpsee-ws-client 0.16.3", "tracing", ] @@ -3635,19 +3881,22 @@ version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1822d18e4384a5e79d94dc9e4d1239cfa9fad24e55b44d2efeff5b394c9fece4" dependencies = [ + "jsonrpsee-client-transport 0.18.2", "jsonrpsee-core 0.18.2", "jsonrpsee-http-client 0.18.2", "jsonrpsee-proc-macros 0.18.2", "jsonrpsee-server", "jsonrpsee-types 0.18.2", + "jsonrpsee-wasm-client 0.18.2", + "jsonrpsee-ws-client 0.18.2", "tracing", ] [[package]] name = "jsonrpsee-client-transport" -version = "0.16.2" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "965de52763f2004bc91ac5bcec504192440f0b568a5d621c59d9dbd6f886c3fb" +checksum = "c8b3815d9f5d5de348e5f162b316dc9cdf4548305ebb15b4eb9328e66cf27d7a" dependencies = [ "anyhow", "futures-channel", @@ -3655,24 +3904,46 @@ dependencies = [ "futures-util", "gloo-net", "http", - "jsonrpsee-core 0.16.2", - "jsonrpsee-types 0.16.2", + "jsonrpsee-core 0.16.3", + "jsonrpsee-types 0.16.3", + "pin-project", + "rustls-native-certs", + "soketto", + "thiserror", + "tokio", + "tokio-rustls", + "tokio-util", + "tracing", + "webpki-roots 0.25.2", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11aa5766d5c430b89cb26a99b88f3245eb91534be8126102cea9e45ee3891b22" +dependencies = [ + "futures-channel", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core 0.18.2", "pin-project", "rustls-native-certs", "soketto", "thiserror", "tokio", - "tokio-rustls 0.23.4", + "tokio-rustls", "tokio-util", "tracing", - "webpki-roots 0.22.6", + "webpki-roots 0.23.1", ] [[package]] name = "jsonrpsee-core" -version = "0.16.2" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e70b4439a751a5de7dd5ed55eacff78ebf4ffe0fc009cb1ebb11417f5b536b" +checksum = "2b5dde66c53d6dcdc8caea1874a45632ec0fcf5b437789f1e45766a1512ce803" dependencies = [ "anyhow", "async-lock", @@ -3682,7 +3953,7 @@ dependencies = [ "futures-timer", "futures-util", "hyper", - "jsonrpsee-types 0.16.2", + "jsonrpsee-types 0.16.3", "rustc-hash", "serde", "serde_json", @@ -3699,8 +3970,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64c6832a55f662b5a6ecc844db24b8b9c387453f923de863062c60ce33d62b81" dependencies = [ "anyhow", + "async-lock", "async-trait", "beef", + "futures-timer", "futures-util", "globset", "hyper", @@ -3713,20 +3986,22 @@ dependencies = [ "soketto", "thiserror", "tokio", + "tokio-stream", "tracing", + "wasm-bindgen-futures", ] [[package]] name = "jsonrpsee-http-client" -version = "0.16.2" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc345b0a43c6bc49b947ebeb936e886a419ee3d894421790c969cc56040542ad" +checksum = "7e5f9fabdd5d79344728521bb65e3106b49ec405a78b66fbff073b72b389fa43" dependencies = [ "async-trait", "hyper", - "hyper-rustls 0.23.2", - "jsonrpsee-core 0.16.2", - "jsonrpsee-types 0.16.2", + "hyper-rustls", + "jsonrpsee-core 0.16.3", + "jsonrpsee-types 0.16.3", "rustc-hash", "serde", "serde_json", @@ -3743,7 +4018,7 @@ checksum = "1705c65069729e3dccff6fd91ee431d5d31cabcf00ce68a62a2c6435ac713af9" dependencies = [ "async-trait", "hyper", - "hyper-rustls 0.24.1", + "hyper-rustls", "jsonrpsee-core 0.18.2", "jsonrpsee-types 0.18.2", "serde", @@ -3756,14 +4031,14 @@ dependencies = [ [[package]] name = "jsonrpsee-proc-macros" -version = "0.16.2" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baa6da1e4199c10d7b1d0a6e5e8bd8e55f351163b6f4b3cbb044672a69bd4c1c" +checksum = "44e8ab85614a08792b9bff6c8feee23be78c98d0182d4c622c05256ab553892a" dependencies = [ "heck 0.4.1", "proc-macro-crate 1.3.1", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -3776,7 +4051,7 @@ dependencies = [ "heck 0.4.1", "proc-macro-crate 1.3.1", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -3802,9 +4077,9 @@ dependencies = [ [[package]] name = "jsonrpsee-types" -version = "0.16.2" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bd522fe1ce3702fd94812965d7bb7a3364b1c9aba743944c5a00529aae80f8c" +checksum = "245ba8e5aa633dd1c1e4fae72bce06e71f42d34c14a2767c6b4d173b57bee5e5" dependencies = [ "anyhow", "beef", @@ -3830,25 +4105,48 @@ dependencies = [ [[package]] name = "jsonrpsee-wasm-client" -version = "0.16.2" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18e5df77c8f625d36e4cfb583c5a674eccebe32403fcfe42f7ceff7fac9324dd" +dependencies = [ + "jsonrpsee-client-transport 0.16.3", + "jsonrpsee-core 0.16.3", + "jsonrpsee-types 0.16.3", +] + +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34e6ea7c6d862e60f8baebd946c037b70c6808a4e4e31e792a4029184e3ce13a" +dependencies = [ + "jsonrpsee-client-transport 0.18.2", + "jsonrpsee-core 0.18.2", + "jsonrpsee-types 0.18.2", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a77310456f43c6c89bcba1f6b2fc2a28300da7c341f320f5128f8c83cc63232d" +checksum = "4e1b3975ed5d73f456478681a417128597acd6a2487855fdb7b4a3d4d195bf5e" dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core 0.16.2", - "jsonrpsee-types 0.16.2", + "http", + "jsonrpsee-client-transport 0.16.3", + "jsonrpsee-core 0.16.3", + "jsonrpsee-types 0.16.3", ] [[package]] name = "jsonrpsee-ws-client" -version = "0.16.2" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b83daeecfc6517cfe210df24e570fb06213533dfb990318fae781f4c7119dd9" +checksum = "a64b2589680ba1ad7863f279cd2d5083c1dc0a7c0ea959d22924553050f8ab9f" dependencies = [ "http", - "jsonrpsee-client-transport", - "jsonrpsee-core 0.16.2", - "jsonrpsee-types 0.16.2", + "jsonrpsee-client-transport 0.18.2", + "jsonrpsee-core 0.18.2", + "jsonrpsee-types 0.18.2", ] [[package]] @@ -3865,36 +4163,6 @@ dependencies = [ "simple_asn1", ] -[[package]] -name = "jupiter" -version = "0.1.0" -dependencies = [ - "anyhow", - "async-trait", - "base64 0.21.2", - "bech32 0.9.1", - "borsh", - "hex", - "hex-literal 0.4.1", - "jsonrpsee 0.16.2", - "nmt-rs", - "postcard", - "proptest", - "prost", - "prost-build", - "prost-types", - "serde", - "serde_json", - "sha2 0.10.7", - "sov-rollup-interface", - "tendermint", - "tendermint-proto", - "thiserror", - "tokio", - "tracing", - "wiremock", -] - [[package]] name = "k256" version = "0.13.1" @@ -3973,7 +4241,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8edfc11b8f56ce85e207e62ea21557cfa09bb24a8f6b04ae181b086ff8611c22" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "regex", "syn 1.0.109", ] @@ -4104,9 +4372,9 @@ checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" [[package]] name = "linux-raw-sys" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" [[package]] name = "litemap" @@ -4161,6 +4429,15 @@ dependencies = [ "regex-automata 0.1.10", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + [[package]] name = "matrixmultiply" version = "0.3.7" @@ -4210,15 +4487,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "memoffset" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" -dependencies = [ - "autocfg", -] - [[package]] name = "memoffset" version = "0.9.0" @@ -4234,7 +4502,7 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e0c7cba9ce19ac7ffd2053ac9f49843bbd3f4318feedfd74e85c19d5fb0ba66" dependencies = [ - "hash-db 0.15.2", + "hash-db", "hashbrown 0.12.3", ] @@ -4274,7 +4542,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "731f8ecebd9f3a4aa847dfe75455e4757a45da40a7793d2f0b1f9b6ed18b23f3" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -4333,7 +4601,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a7d5f7076603ebc68de2dc6a650ec331a062a13abaa346975be747bbfa4b789" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -4343,6 +4611,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", + "module-template", "schemars", "serde", "serde_json", @@ -4494,7 +4763,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -4505,8 +4774,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e6a0fd4f737c707bd9086cc16c925f294943eb62eb71499e9fd4cf71f8b9f4e" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -4598,7 +4867,7 @@ checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -4610,8 +4879,8 @@ checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -4635,18 +4904,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "object" -version = "0.30.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" -dependencies = [ - "crc32fast", - "hashbrown 0.13.2", - "indexmap 1.9.3", - "memchr", -] - [[package]] name = "object" version = "0.31.1" @@ -4705,15 +4962,15 @@ checksum = "003b2be5c6c53c1cfeb0a238b8a1c3915cd410feb684457a36c10038f764bb1c" dependencies = [ "bytes", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] [[package]] name = "openssl" -version = "0.10.55" +version = "0.10.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" +checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e" dependencies = [ "bitflags 1.3.2", "cfg-if", @@ -4731,8 +4988,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -4743,9 +5000,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.90" +version = "0.9.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" +checksum = "866b5f16f90776b9bb8dc1e1802ac6f0513de3a7a7465867bfbc563dc737faac" dependencies = [ "cc", "libc", @@ -4798,7 +5055,7 @@ checksum = "2a296c3079b5fefbc499e1de58dc26c09b1b9a5952d26694ee89f04a43ebbb3e" dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -4947,6 +5204,16 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +[[package]] +name = "pest" +version = "2.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1acb4a4365a13f749a93f1a094a7805e5cfa0955373a9de860d962eaa3a5fe5a" +dependencies = [ + "thiserror", + "ucd-trie", +] + [[package]] name = "petgraph" version = "0.6.3" @@ -4964,7 +5231,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" dependencies = [ "futures", - "rustc_version", + "rustc_version 0.4.0", ] [[package]] @@ -4996,8 +5263,8 @@ dependencies = [ "phf_generator", "phf_shared 0.11.2", "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -5020,29 +5287,29 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030ad2bc4db10a8944cb0d837f158bdfec4d4a4873ab701a95046770d11f8842" +checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" +checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] name = "pin-project-lite" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" +checksum = "2c516611246607d0c04186886dbb3a754368ef82c79e9827a802c6d836dd111c" [[package]] name = "pin-utils" @@ -5075,6 +5342,12 @@ dependencies = [ "crunchy", ] +[[package]] +name = "platforms" +version = "3.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4503fa043bf02cee09a9582e9554b4c6403b2ef55e4612e96561d294419429f8" + [[package]] name = "plotters" version = "0.3.5" @@ -5131,14 +5404,14 @@ version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e30165d31df606f5726b090ec7592c308a0eaf61721ff64c9a3018e344a8753e" dependencies = [ - "portable-atomic 1.4.1", + "portable-atomic 1.4.2", ] [[package]] name = "portable-atomic" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edc55135a600d700580e406b4de0d59cb9ad25e344a3a091a97ded2622ec4ec6" +checksum = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e" [[package]] name = "postcard" @@ -5170,6 +5443,7 @@ dependencies = [ "anyhow", "async-trait", "avail-subxt", + "borsh", "bytes", "parity-scale-codec", "primitive-types", @@ -5177,7 +5451,9 @@ dependencies = [ "serde", "serde_json", "sov-rollup-interface", - "sp-core 21.0.0", + "sp-core", + "sp-core-hashing 10.0.0", + "sp-keyring", "subxt", "thiserror", "tokio", @@ -5202,7 +5478,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c64d9ba0963cdcea2e1b2230fbae2bab30eb25a174be395c41e764bfb65dd62" dependencies = [ "proc-macro2 1.0.66", - "syn 2.0.27", + "syn 2.0.28", ] [[package]] @@ -5260,7 +5536,7 @@ checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", "version_check", ] @@ -5272,7 +5548,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "version_check", ] @@ -5395,7 +5671,7 @@ dependencies = [ "anyhow", "itertools 0.10.5", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -5440,9 +5716,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.32" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ "proc-macro2 1.0.66", ] @@ -5598,33 +5874,33 @@ dependencies = [ [[package]] name = "ref-cast" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61ef7e18e8841942ddb1cf845054f8008410030a3997875d9e49b7a363063df1" +checksum = "acde58d073e9c79da00f2b5b84eed919c8326832648a5b109b3fce1bb1175280" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dfaf0c85b766276c797f3791f5bc6d5bd116b41d53049af2789666b0c0bc9fa" +checksum = "7f7473c2cfcf90008193dd0e3e16599455cb601a9fce322b5bb55de799664925" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] name = "regex" -version = "1.9.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.3", + "regex-automata 0.3.6", "regex-syntax 0.7.4", ] @@ -5639,9 +5915,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.3" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" dependencies = [ "aho-corasick", "memchr", @@ -5672,9 +5948,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.18" +version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55" +checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" dependencies = [ "base64 0.21.2", "bytes", @@ -5685,7 +5961,7 @@ dependencies = [ "http", "http-body", "hyper", - "hyper-rustls 0.24.1", + "hyper-rustls", "hyper-tls", "ipnet", "js-sys", @@ -5695,20 +5971,20 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.5", + "rustls", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", "tokio", "tokio-native-tls", - "tokio-rustls 0.24.1", + "tokio-rustls", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots 0.22.6", + "webpki-roots 0.25.2", "winreg", ] @@ -5721,7 +5997,7 @@ checksum = "4389f1d5789befaf6029ebd9f7dac4af7f7e3d61b69d4f30e2ac02b57e7712b0" [[package]] name = "reth-beacon-consensus" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "futures", "reth-consensus-common", @@ -5745,7 +6021,7 @@ dependencies = [ [[package]] name = "reth-codecs" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "bytes", "codecs-derive", @@ -5755,7 +6031,7 @@ dependencies = [ [[package]] name = "reth-consensus-common" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "reth-interfaces", "reth-primitives", @@ -5765,7 +6041,7 @@ dependencies = [ [[package]] name = "reth-db" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "bytes", "derive_more", @@ -5792,7 +6068,7 @@ dependencies = [ [[package]] name = "reth-ecies" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "aes", "block-padding 0.3.3", @@ -5823,7 +6099,7 @@ dependencies = [ [[package]] name = "reth-eth-wire" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "async-trait", "bytes", @@ -5848,7 +6124,7 @@ dependencies = [ [[package]] name = "reth-interfaces" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "async-trait", "auto_impl", @@ -5872,7 +6148,7 @@ dependencies = [ [[package]] name = "reth-libmdbx" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "bitflags 2.3.3", "byteorder", @@ -5887,7 +6163,7 @@ dependencies = [ [[package]] name = "reth-mdbx-sys" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "bindgen", "cc", @@ -5897,7 +6173,7 @@ dependencies = [ [[package]] name = "reth-metrics" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "metrics", "reth-metrics-derive", @@ -5906,19 +6182,19 @@ dependencies = [ [[package]] name = "reth-metrics-derive" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "once_cell", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "regex", - "syn 2.0.27", + "syn 2.0.28", ] [[package]] name = "reth-net-common" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "pin-project", "reth-primitives", @@ -5928,7 +6204,7 @@ dependencies = [ [[package]] name = "reth-network-api" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "async-trait", "reth-eth-wire", @@ -5942,7 +6218,7 @@ dependencies = [ [[package]] name = "reth-payload-builder" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "futures-util", "reth-interfaces", @@ -5962,7 +6238,7 @@ dependencies = [ [[package]] name = "reth-primitives" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "bytes", "crc", @@ -5970,7 +6246,7 @@ dependencies = [ "derive_more", "ethers-core", "fixed-hash", - "hash-db 0.15.2", + "hash-db", "hex", "hex-literal 0.3.4", "impl-serde", @@ -6002,7 +6278,7 @@ dependencies = [ [[package]] name = "reth-provider" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "auto_impl", "derive_more", @@ -6023,7 +6299,7 @@ dependencies = [ [[package]] name = "reth-prune" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "itertools 0.10.5", "rayon", @@ -6038,7 +6314,7 @@ dependencies = [ [[package]] name = "reth-revm" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "reth-consensus-common", "reth-interfaces", @@ -6053,7 +6329,7 @@ dependencies = [ [[package]] name = "reth-revm-inspectors" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "boa_engine", "boa_gc", @@ -6070,7 +6346,7 @@ dependencies = [ [[package]] name = "reth-revm-primitives" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "reth-primitives", "revm", @@ -6079,7 +6355,7 @@ dependencies = [ [[package]] name = "reth-rlp" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "arrayvec 0.7.4", "auto_impl", @@ -6093,17 +6369,17 @@ dependencies = [ [[package]] name = "reth-rlp-derive" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] name = "reth-rpc" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "async-trait", "bytes", @@ -6149,7 +6425,7 @@ dependencies = [ [[package]] name = "reth-rpc-api" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "jsonrpsee 0.18.2", "reth-primitives", @@ -6160,7 +6436,7 @@ dependencies = [ [[package]] name = "reth-rpc-engine-api" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "async-trait", "jsonrpsee-core 0.18.2", @@ -6181,7 +6457,7 @@ dependencies = [ [[package]] name = "reth-rpc-types" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "jsonrpsee-types 0.18.2", "reth-primitives", @@ -6194,7 +6470,7 @@ dependencies = [ [[package]] name = "reth-stages" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "aquamarine", "async-trait", @@ -6220,7 +6496,7 @@ dependencies = [ [[package]] name = "reth-tasks" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "dyn-clone", "futures-util", @@ -6234,7 +6510,7 @@ dependencies = [ [[package]] name = "reth-transaction-pool" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "aquamarine", "async-trait", @@ -6261,7 +6537,7 @@ dependencies = [ [[package]] name = "reth-trie" version = "0.1.0-alpha.4" -source = "git+https://github.com/paradigmxyz/reth#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" +source = "git+https://github.com/paradigmxyz/reth?rev=4ab924c5d361bbfdcdad9f997d16d67b4a1730b7#4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" dependencies = [ "derive_more", "hex", @@ -6372,11 +6648,16 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", + "bytemuck", + "once_cell", + "parking_lot 0.12.1", "risc0-circuit-rv32im", "risc0-zkp", "risc0-zkvm", + "risc0-zkvm-platform", "serde", "sov-rollup-interface", + "zk-cycle-utils", ] [[package]] @@ -6533,7 +6814,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e33d7b2abe0c340d8797fe2907d3f20d3b5ea5908683618bfe80df7f621f672a" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -6559,15 +6840,25 @@ dependencies = [ [[package]] name = "ruint" -version = "1.9.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77e1574d439643c8962edf612a888e7cc5581bcdf36cb64e6bc88466b03b2daa" +checksum = "95294d6e3a6192f3aabf91c38f56505a625aa495533442744185a36d75a790c4" dependencies = [ + "alloy-rlp", + "ark-ff 0.3.0", + "ark-ff 0.4.2", + "bytes", + "fastrlp", + "num-bigint 0.4.3", + "parity-scale-codec", "primitive-types", + "proptest", + "rand 0.8.5", "rlp", "ruint-macro", "serde", - "thiserror", + "valuable", + "zeroize", ] [[package]] @@ -6594,13 +6885,22 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver 0.11.0", +] + [[package]] name = "rustc_version" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver", + "semver 1.0.18", ] [[package]] @@ -6633,34 +6933,22 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.4" +version = "0.38.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +checksum = "172891ebdceb05aa0005f533a6cbfca599ddd7d966f6f5d4d9b2e70478e70399" dependencies = [ "bitflags 2.3.3", "errno", "libc", - "linux-raw-sys 0.4.3", + "linux-raw-sys 0.4.5", "windows-sys 0.48.0", ] [[package]] name = "rustls" -version = "0.20.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" -dependencies = [ - "log", - "ring", - "sct", - "webpki", -] - -[[package]] -name = "rustls" -version = "0.21.5" +version = "0.21.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" +checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" dependencies = [ "log", "ring", @@ -6802,7 +7090,7 @@ checksum = "912e55f6d20e0e80d63733872b40e1227c0bce1e1ab81ba67d696339bfd7fd29" dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -6851,7 +7139,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "109da1e6b197438deb6db99952990c7f959572794b80ff93707d55a232545e7c" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "serde_derive_internals", "syn 1.0.109", ] @@ -6996,6 +7284,15 @@ dependencies = [ "libc", ] +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser", +] + [[package]] name = "semver" version = "1.0.18" @@ -7005,6 +7302,15 @@ dependencies = [ "serde", ] +[[package]] +name = "semver-parser" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7" +dependencies = [ + "pest", +] + [[package]] name = "send_wrapper" version = "0.4.0" @@ -7019,9 +7325,9 @@ checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" [[package]] name = "serde" -version = "1.0.176" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76dc28c9523c5d70816e393136b86d48909cfb27cecaa902d338c19ed47164dc" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] @@ -7048,13 +7354,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.176" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e7b8c5dc823e3b90651ff1d3808419cd14e5ad76de04feaf37da114e7a306f" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -7064,7 +7370,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85bf8229e7920a9f636479437026331ce11aa132b4dde37d121944a44d6e5f3c" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -7097,8 +7403,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -7146,8 +7452,8 @@ checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" dependencies = [ "darling 0.20.3", "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -7327,6 +7633,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "soketto" version = "0.7.1" @@ -7362,6 +7678,7 @@ name = "sov-accounts" version = "0.1.0" dependencies = [ "anyhow", + "arbitrary", "borsh", "clap 4.3.19", "jsonrpsee 0.18.2", @@ -7374,6 +7691,26 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sov-attester-incentives" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "jmt", + "serde", + "serde_json", + "sov-attester-incentives", + "sov-bank", + "sov-chain-state", + "sov-modules-api", + "sov-modules-macros", + "sov-rollup-interface", + "sov-state", + "tempfile", + "thiserror", +] + [[package]] name = "sov-bank" version = "0.1.0" @@ -7399,10 +7736,41 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", + "borsh", + "clap 4.3.19", + "hex", + "jsonrpsee 0.18.2", + "schemars", + "serde", + "serde_json", + "sov-bank", + "sov-modules-api", + "sov-modules-macros", + "sov-rollup-interface", + "sov-sequencer-registry", + "sov-state", + "tempfile", + "tracing", +] + +[[package]] +name = "sov-chain-state" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "jsonrpsee 0.18.2", + "serde", + "serde_json", + "sov-bank", + "sov-chain-state", + "sov-data-generators", "sov-modules-api", "sov-modules-macros", + "sov-modules-stf-template", "sov-rollup-interface", "sov-state", + "sov-value-setter", "tempfile", ] @@ -7414,9 +7782,30 @@ dependencies = [ "borsh", "demo-stf", "directories", + "hex", + "jsonrpsee 0.18.2", "serde", "serde_json", + "sov-accounts", + "sov-bank", "sov-modules-api", + "sov-rollup-interface", + "tempfile", + "tokio", +] + +[[package]] +name = "sov-data-generators" +version = "0.1.0" +dependencies = [ + "borsh", + "proptest", + "sov-bank", + "sov-modules-api", + "sov-modules-stf-template", + "sov-rollup-interface", + "sov-state", + "sov-value-setter", ] [[package]] @@ -7433,6 +7822,7 @@ dependencies = [ "sov-rollup-interface", "sov-schema-db", "tempfile", + "tokio", ] [[package]] @@ -7443,34 +7833,44 @@ dependencies = [ "async-trait", "borsh", "bytes", + "celestia", "clap 4.3.19", "const-rollup-config", "criterion", "demo-stf", + "ethereum-types", + "ethers", + "ethers-contract", + "ethers-core", + "ethers-middleware", + "ethers-providers", + "ethers-signers", "futures", "hex", "jmt", "jsonrpsee 0.18.2", - "jupiter", + "presence", "prettytable-rs", "prometheus 0.11.0", "proptest", "reqwest", + "revm", "risc0-adapter", "serde", "serde_json", "sha2 0.10.7", "sov-bank", + "sov-cli", "sov-db", - "sov-election", + "sov-demo-rollup", "sov-ethereum", + "sov-evm", "sov-modules-api", "sov-modules-stf-template", "sov-rollup-interface", "sov-sequencer", "sov-state", "sov-stf-runner", - "sov-value-setter", "tempfile", "tendermint", "tokio", @@ -7483,65 +7883,25 @@ name = "sov-demo-rollup-avail" version = "0.1.0" dependencies = [ "anyhow", - "borsh", - "bytes", - "const-rollup-config", "demo-stf", - "futures", - "hex", - "jsonrpsee 0.18.2", "presence", - "proptest", - "reqwest", - "risc0-adapter", - "serde", - "serde_json", - "sha2 0.10.7", - "sov-bank", - "sov-db", - "sov-election", - "sov-modules-api", - "sov-modules-stf-template", + "sov-demo-rollup", "sov-rollup-interface", - "sov-sequencer", - "sov-state", - "sov-stf-runner", - "sov-value-setter", - "tempfile", - "tendermint", "tokio", - "tracing", "tracing-subscriber 0.3.17", ] -[[package]] -name = "sov-election" -version = "0.1.0" -dependencies = [ - "anyhow", - "borsh", - "clap 4.3.19", - "hex", - "jsonrpsee 0.18.2", - "schemars", - "serde", - "serde_json", - "sov-modules-api", - "sov-rollup-interface", - "sov-state", - "tempfile", -] - [[package]] name = "sov-ethereum" version = "0.1.0" dependencies = [ + "anyhow", "borsh", + "celestia", "const-rollup-config", "demo-stf", "ethers", "jsonrpsee 0.18.2", - "jupiter", "reth-primitives", "reth-rpc", "serde_json", @@ -7558,14 +7918,13 @@ dependencies = [ "anyhow", "borsh", "bytes", - "clap", + "clap 4.3.19", "derive_more", "ethereum-types", "ethers", "ethers-contract", "ethers-core", "ethers-middleware", - "ethers-providers", "ethers-signers", "hex", "jsonrpsee 0.18.2", @@ -7576,8 +7935,10 @@ dependencies = [ "reth-rpc-types", "revm", "schemars", + "secp256k1 0.27.0", "serde", "serde_json", + "sov-evm", "sov-modules-api", "sov-state", "tempfile", @@ -7600,7 +7961,6 @@ version = "0.1.0" dependencies = [ "sov-accounts", "sov-bank", - "sov-election", "sov-evm", "sov-modules-api", "sov-prover-incentives", @@ -7614,23 +7974,29 @@ name = "sov-modules-api" version = "0.1.0" dependencies = [ "anyhow", + "arbitrary", "bech32 0.9.1", "bincode", "borsh", "clap 4.3.19", "derive_more", - "ed25519-dalek", + "ed25519-dalek 2.0.0", "hex", "jsonrpsee 0.18.2", - "rand 0.7.3", + "rand 0.8.5", + "risc0-zkvm", + "risc0-zkvm-platform", "schemars", "serde", "serde_json", "sha2 0.10.7", + "sov-modules-api", "sov-modules-macros", "sov-rollup-interface", + "sov-sequencer", "sov-state", "thiserror", + "zk-cycle-macros", ] [[package]] @@ -7642,14 +8008,16 @@ dependencies = [ "clap 4.3.19", "jsonrpsee 0.18.2", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "schemars", "serde", "serde_json", "sov-bank", "sov-modules-api", + "sov-modules-macros", "sov-state", "syn 1.0.109", + "tempfile", "trybuild", ] @@ -7661,11 +8029,16 @@ dependencies = [ "borsh", "hex", "jmt", + "risc0-zkvm", + "risc0-zkvm-platform", "serde", "sov-modules-api", "sov-rollup-interface", "sov-state", + "thiserror", "tracing", + "zk-cycle-macros", + "zk-cycle-utils", ] [[package]] @@ -7680,6 +8053,7 @@ dependencies = [ "serde_json", "sov-bank", "sov-modules-api", + "sov-prover-incentives", "sov-rollup-interface", "sov-state", "tempfile", @@ -7691,6 +8065,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "bincode", "borsh", "bytes", "digest 0.10.7", @@ -7700,6 +8075,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.7", + "tokio", ] [[package]] @@ -7726,7 +8102,6 @@ dependencies = [ "hex", "jsonrpsee 0.18.2", "serde", - "sov-modules-api", "sov-rollup-interface", "tokio", "tracing", @@ -7740,14 +8115,19 @@ dependencies = [ "borsh", "clap 4.3.19", "jsonrpsee 0.18.2", + "risc0-zkvm", + "risc0-zkvm-platform", "schemars", "serde", "serde_json", "sov-bank", "sov-modules-api", "sov-rollup-interface", + "sov-sequencer-registry", "sov-state", "tempfile", + "zk-cycle-macros", + "zk-cycle-utils", ] [[package]] @@ -7755,9 +8135,13 @@ name = "sov-state" version = "0.1.0" dependencies = [ "anyhow", + "arbitrary", + "bcs", "borsh", "hex", "jmt", + "risc0-zkvm", + "risc0-zkvm-platform", "serde", "sha2 0.10.7", "sov-db", @@ -7765,6 +8149,7 @@ dependencies = [ "sov-rollup-interface", "tempfile", "thiserror", + "zk-cycle-macros", ] [[package]] @@ -7773,17 +8158,16 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", + "celestia", "futures", "hex", "jsonrpsee 0.18.2", - "jupiter", "rand 0.8.5", "serde", "serde_json", "sov-accounts", "sov-bank", "sov-db", - "sov-election", "sov-modules-api", "sov-modules-stf-template", "sov-rollup-interface", @@ -7815,6 +8199,24 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sov-vec-setter" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "clap 4.3.19", + "jsonrpsee 0.18.2", + "schemars", + "serde", + "serde_json", + "sov-modules-api", + "sov-rollup-interface", + "sov-state", + "tempfile", + "thiserror", +] + [[package]] name = "sp-application-crypto" version = "17.0.0" @@ -7824,9 +8226,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 16.0.0", + "sp-core", "sp-io", - "sp-std 6.0.0", + "sp-std", ] [[package]] @@ -7840,7 +8242,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-std 6.0.0", + "sp-std", "static_assertions", ] @@ -7858,7 +8260,7 @@ dependencies = [ "dyn-clonable", "ed25519-zebra", "futures", - "hash-db 0.15.2", + "hash-db", "hash256-std-hasher", "impl-serde", "lazy_static", @@ -7876,56 +8278,11 @@ dependencies = [ "secrecy", "serde", "sp-core-hashing 6.0.0", - "sp-debug-derive 6.0.0", - "sp-externalities 0.17.0", - "sp-runtime-interface 13.0.0", - "sp-std 6.0.0", - "sp-storage 11.0.0", - "ss58-registry", - "substrate-bip39", - "thiserror", - "tiny-bip39", - "zeroize", -] - -[[package]] -name = "sp-core" -version = "21.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f18d9e2f67d8661f9729f35347069ac29d92758b59135176799db966947a7336" -dependencies = [ - "array-bytes", - "bitflags 1.3.2", - "blake2", - "bounded-collections", - "bs58", - "dyn-clonable", - "ed25519-zebra", - "futures", - "hash-db 0.16.0", - "hash256-std-hasher", - "impl-serde", - "lazy_static", - "libsecp256k1", - "log", - "merlin", - "parity-scale-codec", - "parking_lot 0.12.1", - "paste", - "primitive-types", - "rand 0.8.5", - "regex", - "scale-info", - "schnorrkel", - "secp256k1 0.24.3", - "secrecy", - "serde", - "sp-core-hashing 9.0.0", - "sp-debug-derive 8.0.0", - "sp-externalities 0.19.0", - "sp-runtime-interface 17.0.0", - "sp-std 8.0.0", - "sp-storage 13.0.0", + "sp-debug-derive", + "sp-externalities", + "sp-runtime-interface", + "sp-std", + "sp-storage", "ss58-registry", "substrate-bip39", "thiserror", @@ -7944,22 +8301,21 @@ dependencies = [ "digest 0.10.7", "sha2 0.10.7", "sha3", - "sp-std 6.0.0", + "sp-std", "twox-hash", ] [[package]] name = "sp-core-hashing" -version = "9.0.0" +version = "10.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ee599a8399448e65197f9a6cee338ad192e9023e35e31f22382964c3c174c68" +checksum = "e360755a2706a76886d58776665cad0db793dece3c7d390455b28e8a1efd6285" dependencies = [ "blake2b_simd", "byteorder", "digest 0.10.7", "sha2 0.10.7", "sha3", - "sp-std 8.0.0", "twox-hash", ] @@ -7970,21 +8326,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "66fb9dc63d54de7d7bed62a505b6e0bd66c122525ea1abb348f6564717c3df2d" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] -[[package]] -name = "sp-debug-derive" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f531814d2f16995144c74428830ccf7d94ff4a7749632b83ad8199b181140c" -dependencies = [ - "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", -] - [[package]] name = "sp-externalities" version = "0.17.0" @@ -7993,20 +8338,8 @@ checksum = "57052935c9c9b070ea6b339ef0da3bf241b7e065fc37f9c551669ee83ecfc3c1" dependencies = [ "environmental", "parity-scale-codec", - "sp-std 6.0.0", - "sp-storage 11.0.0", -] - -[[package]] -name = "sp-externalities" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0f71c671e01a8ca60da925d43a1b351b69626e268b8837f8371e320cf1dd100" -dependencies = [ - "environmental", - "parity-scale-codec", - "sp-std 8.0.0", - "sp-storage 13.0.0", + "sp-std", + "sp-storage", ] [[package]] @@ -8017,19 +8350,19 @@ checksum = "578959f9a7e44fd2dd96e8b8bc893cea04fcd7c00a4ffbb0b91c5013899dd02b" dependencies = [ "bytes", "ed25519 1.5.3", - "ed25519-dalek", + "ed25519-dalek 1.0.1", "futures", "libsecp256k1", "log", "parity-scale-codec", "secp256k1 0.24.3", - "sp-core 16.0.0", - "sp-externalities 0.17.0", + "sp-core", + "sp-externalities", "sp-keystore", - "sp-runtime-interface 13.0.0", + "sp-runtime-interface", "sp-state-machine", - "sp-std 6.0.0", - "sp-tracing 8.0.0", + "sp-std", + "sp-tracing", "sp-trie", "tracing", "tracing-core", @@ -8042,7 +8375,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc13a168cdc15e185db5cbe8644e3eaafa534e229593768b3044b60bea00fc8c" dependencies = [ "lazy_static", - "sp-core 16.0.0", + "sp-core", "sp-runtime", "strum 0.24.1", ] @@ -8059,8 +8392,8 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", "schnorrkel", - "sp-core 16.0.0", - "sp-externalities 0.17.0", + "sp-core", + "sp-externalities", "thiserror", ] @@ -8092,9 +8425,9 @@ dependencies = [ "serde", "sp-application-crypto", "sp-arithmetic", - "sp-core 16.0.0", + "sp-core", "sp-io", - "sp-std 6.0.0", + "sp-std", "sp-weights", ] @@ -8108,31 +8441,12 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "primitive-types", - "sp-externalities 0.17.0", - "sp-runtime-interface-proc-macro 9.0.0", - "sp-std 6.0.0", - "sp-storage 11.0.0", - "sp-tracing 8.0.0", - "sp-wasm-interface 10.0.0", - "static_assertions", -] - -[[package]] -name = "sp-runtime-interface" -version = "17.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e676128182f90015e916f806cba635c8141e341e7abbc45d25525472e1bbce8" -dependencies = [ - "bytes", - "impl-trait-for-tuples", - "parity-scale-codec", - "primitive-types", - "sp-externalities 0.19.0", - "sp-runtime-interface-proc-macro 11.0.0", - "sp-std 8.0.0", - "sp-storage 13.0.0", - "sp-tracing 10.0.0", - "sp-wasm-interface 14.0.0", + "sp-externalities", + "sp-runtime-interface-proc-macro", + "sp-std", + "sp-storage", + "sp-tracing", + "sp-wasm-interface", "static_assertions", ] @@ -8145,39 +8459,26 @@ dependencies = [ "Inflector", "proc-macro-crate 1.3.1", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] -[[package]] -name = "sp-runtime-interface-proc-macro" -version = "11.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5d5bd5566fe5633ec48dfa35ab152fd29f8a577c21971e1c6db9f28afb9bbb9" -dependencies = [ - "Inflector", - "proc-macro-crate 1.3.1", - "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", -] - [[package]] name = "sp-state-machine" version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c957b8b4c252507c12674948db427c5e34fd1760ce256922f1ec5f89f781a4f" dependencies = [ - "hash-db 0.15.2", + "hash-db", "log", "parity-scale-codec", "parking_lot 0.12.1", "rand 0.8.5", "smallvec 1.11.0", - "sp-core 16.0.0", - "sp-externalities 0.17.0", + "sp-core", + "sp-externalities", "sp-panic-handler", - "sp-std 6.0.0", + "sp-std", "sp-trie", "thiserror", "tracing", @@ -8189,12 +8490,6 @@ version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af0ee286f98455272f64ac5bb1384ff21ac029fbb669afbaf48477faff12760e" -[[package]] -name = "sp-std" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" - [[package]] name = "sp-storage" version = "11.0.0" @@ -8205,22 +8500,8 @@ dependencies = [ "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 6.0.0", - "sp-std 6.0.0", -] - -[[package]] -name = "sp-storage" -version = "13.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94294be83f11d4958cfea89ed5798f0b6605f5defc3a996948848458abbcc18e" -dependencies = [ - "impl-serde", - "parity-scale-codec", - "ref-cast", - "serde", - "sp-debug-derive 8.0.0", - "sp-std 8.0.0", + "sp-debug-derive", + "sp-std", ] [[package]] @@ -8230,20 +8511,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e46bd547da89a9cda69b4ce4c91a5b7e1f86915190d83cd407b715d0c6bac042" dependencies = [ "parity-scale-codec", - "sp-std 6.0.0", - "tracing", - "tracing-core", - "tracing-subscriber 0.2.25", -] - -[[package]] -name = "sp-tracing" -version = "10.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357f7591980dd58305956d32f8f6646d0a8ea9ea0e7e868e46f53b68ddf00cec" -dependencies = [ - "parity-scale-codec", - "sp-std 8.0.0", + "sp-std", "tracing", "tracing-core", "tracing-subscriber 0.2.25", @@ -8256,7 +8524,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8efbe5b6d29a18fea7c2f52e0098135f2f864b31d335d5105b40a349866ba874" dependencies = [ "ahash 0.8.3", - "hash-db 0.15.2", + "hash-db", "hashbrown 0.12.3", "lazy_static", "memory-db", @@ -8265,8 +8533,8 @@ dependencies = [ "parking_lot 0.12.1", "scale-info", "schnellru", - "sp-core 16.0.0", - "sp-std 6.0.0", + "sp-core", + "sp-std", "thiserror", "tracing", "trie-db", @@ -8283,23 +8551,9 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "sp-std 6.0.0", + "sp-std", "wasmi", - "wasmtime 5.0.1", -] - -[[package]] -name = "sp-wasm-interface" -version = "14.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19c122609ca5d8246be6386888596320d03c7bc880959eaa2c36bcd5acd6846" -dependencies = [ - "anyhow", - "impl-trait-for-tuples", - "log", - "parity-scale-codec", - "sp-std 8.0.0", - "wasmtime 8.0.1", + "wasmtime", ] [[package]] @@ -8313,9 +8567,9 @@ dependencies = [ "serde", "smallvec 1.11.0", "sp-arithmetic", - "sp-core 16.0.0", - "sp-debug-derive 6.0.0", - "sp-std 6.0.0", + "sp-core", + "sp-debug-derive", + "sp-std", ] [[package]] @@ -8351,14 +8605,14 @@ checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" [[package]] name = "ss58-registry" -version = "1.41.0" +version = "1.43.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfc443bad666016e012538782d9e3006213a7db43e9fb1dda91657dc06a6fa08" +checksum = "5e6915280e2d0db8911e5032a5c275571af6bdded2916abd691a659be25d3439" dependencies = [ "Inflector", "num-format", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "serde", "serde_json", "unicode-xid 0.2.4", @@ -8421,7 +8675,7 @@ dependencies = [ "heck 0.3.3", "proc-macro-error", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", ] @@ -8440,7 +8694,7 @@ version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" dependencies = [ - "strum_macros 0.25.1", + "strum_macros 0.25.2", ] [[package]] @@ -8451,22 +8705,22 @@ checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck 0.4.1", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "rustversion", "syn 1.0.109", ] [[package]] name = "strum_macros" -version = "0.25.1" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6069ca09d878a33f883cc06aaa9718ede171841d3832450354410b718b097232" +checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059" dependencies = [ "heck 0.4.1", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "rustversion", - "syn 2.0.27", + "syn 2.0.28", ] [[package]] @@ -8530,7 +8784,7 @@ dependencies = [ "getrandom 0.2.10", "hex", "impl-serde", - "jsonrpsee 0.16.2", + "jsonrpsee 0.16.3", "parity-scale-codec", "parking_lot 0.12.1", "primitive-types", @@ -8540,7 +8794,7 @@ dependencies = [ "scale-value", "serde", "serde_json", - "sp-core 16.0.0", + "sp-core", "sp-core-hashing 6.0.0", "sp-runtime", "subxt-macro", @@ -8559,11 +8813,11 @@ dependencies = [ "frame-metadata", "heck 0.4.1", "hex", - "jsonrpsee 0.16.2", + "jsonrpsee 0.16.3", "parity-scale-codec", "proc-macro-error", "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "scale-info", "subxt-metadata", "syn 1.0.109", @@ -8614,7 +8868,7 @@ dependencies = [ "home", "once_cell", "reqwest", - "semver", + "semver 1.0.18", "serde", "serde_json", "sha2 0.10.7", @@ -8641,18 +8895,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.27" +version = "2.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b60f673f44a8255b9c8c657daf66a596d435f2da81a555b06dc644d080ba45e0" +checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "unicode-ident", ] @@ -8663,7 +8917,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", "unicode-xid 0.2.4", ] @@ -8675,8 +8929,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", "unicode-xid 0.2.4", ] @@ -8694,14 +8948,14 @@ checksum = "9d0e916b1148c8e263850e1ebcbd046f333e0683c724876bb0da63ea4373dc8a" [[package]] name = "tempfile" -version = "3.7.0" +version = "3.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5486094ee78b2e5038a6382ed7645bc084dc2ec433426ca4c3cb61e2007b8998" +checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651" dependencies = [ "cfg-if", "fastrand 2.0.0", "redox_syscall 0.3.5", - "rustix 0.38.4", + "rustix 0.38.7", "windows-sys 0.48.0", ] @@ -8803,8 +9057,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -8819,10 +9073,11 @@ dependencies = [ [[package]] name = "time" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" +checksum = "b0fdd63d58b18d663fbdf70e049f00a22c8e42be082203be7f26589213cd75ea" dependencies = [ + "deranged", "itoa", "libc", "num_threads", @@ -8839,9 +9094,9 @@ checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" [[package]] name = "time-macros" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" +checksum = "eb71511c991639bb078fd5bf97757e03914361c48100d52878b8e52b46fb92cd" dependencies = [ "time-core", ] @@ -8911,11 +9166,10 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.29.1" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" dependencies = [ - "autocfg", "backtrace", "bytes", "libc", @@ -8924,7 +9178,7 @@ dependencies = [ "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.3", "tokio-macros", "windows-sys 0.48.0", ] @@ -8936,8 +9190,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -8950,24 +9204,13 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.23.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" -dependencies = [ - "rustls 0.20.8", - "tokio", - "webpki", -] - [[package]] name = "tokio-rustls" version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.5", + "rustls", "tokio", ] @@ -8991,9 +9234,9 @@ checksum = "ec509ac96e9a0c43427c74f003127d953a265737636129424288d27cb5c4b12c" dependencies = [ "futures-util", "log", - "rustls 0.21.5", + "rustls", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls", "tungstenite", "webpki-roots 0.23.1", ] @@ -9103,8 +9346,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -9157,7 +9400,7 @@ dependencies = [ "ansi_term", "chrono", "lazy_static", - "matchers", + "matchers 0.0.1", "regex", "serde", "serde_json", @@ -9176,10 +9419,14 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" dependencies = [ + "matchers 0.1.0", "nu-ansi-term", + "once_cell", + "regex", "sharded-slab", "smallvec 1.11.0", "thread_local", + "tracing", "tracing-core", "tracing-log", ] @@ -9190,7 +9437,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "004e1e8f92535694b4cb1444dc5a8073ecf0815e3357f729638b9f8fc4062908" dependencies = [ - "hash-db 0.15.2", + "hash-db", "hashbrown 0.12.3", "log", "rustc-hex", @@ -9203,7 +9450,7 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a36c5ca3911ed3c9a5416ee6c679042064b93fc637ded67e25f92e68d783891" dependencies = [ - "hash-db 0.15.2", + "hash-db", ] [[package]] @@ -9212,7 +9459,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1631b201eb031b563d2e85ca18ec8092508e262a3196ce9bd10a67ec87b9f5c" dependencies = [ - "hash-db 0.15.2", + "hash-db", "rlp", ] @@ -9250,7 +9497,7 @@ dependencies = [ "httparse", "log", "rand 0.8.5", - "rustls 0.21.5", + "rustls", "sha1", "thiserror", "url", @@ -9296,10 +9543,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30c49a6815b4f8379c36f06618bc1b80ca77aaf8a3fd4d8549dca6fdb016000f" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + [[package]] name = "uint" version = "0.9.5" @@ -9522,8 +9775,8 @@ dependencies = [ "log", "once_cell", "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", "wasm-bindgen-shared", ] @@ -9545,7 +9798,7 @@ version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ - "quote 1.0.32", + "quote 1.0.33", "wasm-bindgen-macro-support", ] @@ -9556,8 +9809,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -9611,16 +9864,6 @@ dependencies = [ "url", ] -[[package]] -name = "wasmparser" -version = "0.102.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48134de3d7598219ab9eaf6b91b15d8e50d31da76b8519fe4ecfcec2cf35104b" -dependencies = [ - "indexmap 1.9.3", - "url", -] - [[package]] name = "wasmtime" version = "5.0.1" @@ -9639,38 +9882,13 @@ dependencies = [ "psm", "serde", "target-lexicon", - "wasmparser 0.96.0", - "wasmtime-environ 5.0.1", - "wasmtime-jit 5.0.1", - "wasmtime-runtime 5.0.1", + "wasmparser", + "wasmtime-environ", + "wasmtime-jit", + "wasmtime-runtime", "windows-sys 0.42.0", ] -[[package]] -name = "wasmtime" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f907fdead3153cb9bfb7a93bbd5b62629472dc06dee83605358c64c52ed3dda9" -dependencies = [ - "anyhow", - "bincode", - "cfg-if", - "indexmap 1.9.3", - "libc", - "log", - "object 0.30.4", - "once_cell", - "paste", - "psm", - "serde", - "target-lexicon", - "wasmparser 0.102.0", - "wasmtime-environ 8.0.1", - "wasmtime-jit 8.0.1", - "wasmtime-runtime 8.0.1", - "windows-sys 0.45.0", -] - [[package]] name = "wasmtime-asm-macros" version = "5.0.1" @@ -9680,15 +9898,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "wasmtime-asm-macros" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3b9daa7c14cd4fa3edbf69de994408d5f4b7b0959ac13fa69d465f6597f810d" -dependencies = [ - "cfg-if", -] - [[package]] name = "wasmtime-environ" version = "5.0.1" @@ -9696,7 +9905,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9350c919553cddf14f78f9452119c8004d7ef6bfebb79a41a21819ed0c5604d8" dependencies = [ "anyhow", - "cranelift-entity 0.92.1", + "cranelift-entity", "gimli 0.26.2", "indexmap 1.9.3", "log", @@ -9704,27 +9913,8 @@ dependencies = [ "serde", "target-lexicon", "thiserror", - "wasmparser 0.96.0", - "wasmtime-types 5.0.1", -] - -[[package]] -name = "wasmtime-environ" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a990198cee4197423045235bf89d3359e69bd2ea031005f4c2d901125955c949" -dependencies = [ - "anyhow", - "cranelift-entity 0.95.1", - "gimli 0.27.3", - "indexmap 1.9.3", - "log", - "object 0.30.4", - "serde", - "target-lexicon", - "thiserror", - "wasmparser 0.102.0", - "wasmtime-types 8.0.1", + "wasmparser", + "wasmtime-types", ] [[package]] @@ -9744,35 +9934,12 @@ dependencies = [ "rustc-demangle", "serde", "target-lexicon", - "wasmtime-environ 5.0.1", - "wasmtime-jit-icache-coherence 5.0.1", - "wasmtime-runtime 5.0.1", + "wasmtime-environ", + "wasmtime-jit-icache-coherence", + "wasmtime-runtime", "windows-sys 0.42.0", ] -[[package]] -name = "wasmtime-jit" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de48df552cfca1c9b750002d3e07b45772dd033b0b206d5c0968496abf31244" -dependencies = [ - "addr2line 0.19.0", - "anyhow", - "bincode", - "cfg-if", - "cpp_demangle", - "gimli 0.27.3", - "log", - "object 0.30.4", - "rustc-demangle", - "serde", - "target-lexicon", - "wasmtime-environ 8.0.1", - "wasmtime-jit-icache-coherence 8.0.1", - "wasmtime-runtime 8.0.1", - "windows-sys 0.45.0", -] - [[package]] name = "wasmtime-jit-debug" version = "5.0.1" @@ -9782,15 +9949,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "wasmtime-jit-debug" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0554b84c15a27d76281d06838aed94e13a77d7bf604bbbaf548aa20eb93846" -dependencies = [ - "once_cell", -] - [[package]] name = "wasmtime-jit-icache-coherence" version = "5.0.1" @@ -9802,17 +9960,6 @@ dependencies = [ "windows-sys 0.42.0", ] -[[package]] -name = "wasmtime-jit-icache-coherence" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aecae978b13f7f67efb23bd827373ace4578f2137ec110bbf6a4a7cde4121bbd" -dependencies = [ - "cfg-if", - "libc", - "windows-sys 0.45.0", -] - [[package]] name = "wasmtime-runtime" version = "5.0.1" @@ -9831,58 +9978,22 @@ dependencies = [ "paste", "rand 0.8.5", "rustix 0.36.15", - "wasmtime-asm-macros 5.0.1", - "wasmtime-environ 5.0.1", - "wasmtime-jit-debug 5.0.1", + "wasmtime-asm-macros", + "wasmtime-environ", + "wasmtime-jit-debug", "windows-sys 0.42.0", ] -[[package]] -name = "wasmtime-runtime" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658cf6f325232b6760e202e5255d823da5e348fdea827eff0a2a22319000b441" -dependencies = [ - "anyhow", - "cc", - "cfg-if", - "indexmap 1.9.3", - "libc", - "log", - "mach", - "memfd", - "memoffset 0.8.0", - "paste", - "rand 0.8.5", - "rustix 0.36.15", - "wasmtime-asm-macros 8.0.1", - "wasmtime-environ 8.0.1", - "wasmtime-jit-debug 8.0.1", - "windows-sys 0.45.0", -] - [[package]] name = "wasmtime-types" version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86e1e4f66a2b9a114f9def450ab9971828c968db6ea6fccd613724b771fa4913" dependencies = [ - "cranelift-entity 0.92.1", + "cranelift-entity", "serde", "thiserror", - "wasmparser 0.96.0", -] - -[[package]] -name = "wasmtime-types" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4f6fffd2a1011887d57f07654dd112791e872e3ff4a2e626aee8059ee17f06f" -dependencies = [ - "cranelift-entity 0.95.1", - "serde", - "thiserror", - "wasmparser 0.102.0", + "wasmparser", ] [[package]] @@ -9907,21 +10018,18 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.22.6" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" +checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338" dependencies = [ - "webpki", + "rustls-webpki 0.100.1", ] [[package]] name = "webpki-roots" -version = "0.23.1" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338" -dependencies = [ - "rustls-webpki 0.100.1", -] +checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" [[package]] name = "which" @@ -10123,20 +10231,21 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.5.1" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25b5872fa2e10bd067ae946f927e726d7d603eaeb6e02fa6a350e0722d2b8c11" +checksum = "acaaa1190073b2b101e15083c38ee8ec891b5e05cbee516521e94ec008f61e64" dependencies = [ "memchr", ] [[package]] name = "winreg" -version = "0.10.1" +version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "winapi", + "cfg-if", + "windows-sys 0.48.0", ] [[package]] @@ -10184,7 +10293,7 @@ dependencies = [ "js-sys", "log", "pharos", - "rustc_version", + "rustc_version 0.4.0", "send_wrapper 0.6.0", "thiserror", "wasm-bindgen", @@ -10232,7 +10341,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af46c169923ed7516eef0aa32b56d2651b229f57458ebe46b49ddd6efef5b7a2" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", "synstructure 0.12.6", ] @@ -10253,7 +10362,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4eae7c1f7d4b8eafce526bc0771449ddc2f250881ae31c50d22c032b5a1c499" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", "synstructure 0.12.6", ] @@ -10274,8 +10383,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", - "syn 2.0.27", + "quote 1.0.33", + "syn 2.0.28", ] [[package]] @@ -10296,7 +10405,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "486558732d5dde10d0f8cb2936507c1bb21bc539d924c949baf5f36a58e51bac" dependencies = [ "proc-macro2 1.0.66", - "quote 1.0.32", + "quote 1.0.33", "syn 1.0.109", "synstructure 0.12.6", ] @@ -10321,6 +10430,30 @@ dependencies = [ "zstd 0.11.2+zstd.1.5.2", ] +[[package]] +name = "zk-cycle-macros" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "proc-macro2 1.0.66", + "quote 1.0.33", + "risc0-zkvm", + "risc0-zkvm-platform", + "syn 1.0.109", + "trybuild", + "zk-cycle-macros", + "zk-cycle-utils", +] + +[[package]] +name = "zk-cycle-utils" +version = "0.1.0" +dependencies = [ + "risc0-zkvm", + "risc0-zkvm-platform", +] + [[package]] name = "zstd" version = "0.11.2+zstd.1.5.2" @@ -10369,3 +10502,8 @@ dependencies = [ "libc", "pkg-config", ] + +[[patch.unused]] +name = "cc" +version = "1.0.79" +source = "git+https://github.com/rust-lang/cc-rs?rev=e5bbdfa#e5bbdfa1fa468c028cb38fee6c35a3cf2e5a2736" diff --git a/Cargo.toml b/Cargo.toml index 109bda5da..f56472ddc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,11 +11,15 @@ members = [ "examples/demo-rollup-avail", "examples/demo-nft-module", "examples/avail-helper", + "full-node/db/sov-db", "full-node/sov-sequencer", "full-node/sov-ethereum", "full-node/sov-stf-runner", - + + "utils/zk-cycle-macros", + "utils/zk-cycle-utils", + "module-system/sov-cli", "module-system/sov-modules-stf-template", "module-system/sov-modules-macros", @@ -23,15 +27,18 @@ members = [ "module-system/sov-modules-api", "module-system/module-schemas", "module-system/utils/sov-first-read-last-write-cache", + "module-system/utils/sov-data-generators", "module-system/module-implementations/sov-accounts", "module-system/module-implementations/sov-bank", + "module-system/module-implementations/sov-chain-state", "module-system/module-implementations/sov-blob-storage", "module-system/module-implementations/sov-evm", "module-system/module-implementations/sov-prover-incentives", + "module-system/module-implementations/sov-attester-incentives", "module-system/module-implementations/sov-sequencer-registry", "module-system/module-implementations/module-template", "module-system/module-implementations/examples/sov-value-setter", - "module-system/module-implementations/examples/sov-election", + "module-system/module-implementations/examples/sov-vec-setter", "module-system/module-implementations/integration-tests", ] exclude = [ @@ -50,16 +57,18 @@ repository = "https://github.com/sovereign-labs/sovereign-sdk" rust-version = "1.66" [workspace.dependencies] -# Dependencies maintained by sovereign -jmt = "0.6.0" +# Dependencies maintained by Sovereign +jmt = { version = "0.7.0" } # External dependencies async-trait = "0.1.71" anyhow = "1.0.68" +arbitrary = { version = "1.3.0", features = ["derive"] } borsh = { version = "0.10.3", features = ["rc", "bytes"] } # TODO: Consider replacing this serialization format # https://github.com/Sovereign-Labs/sovereign-sdk/issues/283 bincode = "1.3.3" +bcs = "0.1.5" byteorder = "1.4.3" bytes = "1.2.1" hex = "0.4.3" @@ -70,7 +79,7 @@ proptest-derive = "0.3.0" rand = "0.8" rayon = "1.5.2" rocksdb = { version = "0.21.0", features = ["lz4"] } -serde = { version = "1.0.137", features = ["derive", "rc"] } +serde = { version = "1.0.188", features = ["derive", "rc"] } serde_json = { version = "1.0" } sha2 = "0.10.6" digest = "0.10.6" @@ -81,7 +90,7 @@ bech32 = "0.9.1" derive_more = "0.99.11" clap = { version = "4.2.7", features = ["derive"] } toml = "0.7.3" -jsonrpsee = {version = "0.18.2", features = ["jsonrpsee-types"] } +jsonrpsee = { version = "0.18.2", features = ["jsonrpsee-types"] } schemars = { version = "0.8.12", features = ["derive"] } tempfile = "3.5" tokio = { version = "1", features = ["full"] } @@ -95,14 +104,16 @@ ethers-providers = "=2.0.8" ethers-signers = { version = "=2.0.8", default-features = false } ethers-middleware = "=2.0.8" -reth-primitives = { git = "https://github.com/paradigmxyz/reth", version = "0.1.0-alpha.4"} -reth-rpc-types = { git = "https://github.com/paradigmxyz/reth", version = "0.1.0-alpha.4"} -reth-rpc = { git = "https://github.com/paradigmxyz/reth", version = "0.1.0-alpha.4"} -reth-revm = { git = "https://github.com/paradigmxyz/reth", version = "0.1.0-alpha.4"} +reth-primitives = { git = "https://github.com/paradigmxyz/reth", rev = "4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" } +reth-rpc-types = { git = "https://github.com/paradigmxyz/reth", rev = "4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" } +reth-rpc = { git = "https://github.com/paradigmxyz/reth", rev = "4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" } +reth-revm = { git = "https://github.com/paradigmxyz/reth", rev = "4ab924c5d361bbfdcdad9f997d16d67b4a1730b7" } revm = { git = "https://github.com/bluealloy/revm/", branch = "release/v25" } revm-primitives = { git = "https://github.com/bluealloy/revm/", branch = "release/v25" } +secp256k1 = { version = "0.27.0", default-features = false, features = ["global-context", "rand-std", "recovery"] } + [patch.crates-io] # See reth: https://github.com/paradigmxyz/reth/blob/main/Cargo.toml#L79 revm = { git = "https://github.com/bluealloy/revm/", branch = "release/v25" } diff --git a/Makefile b/Makefile index 6b7eb4f4a..96e05665a 100644 --- a/Makefile +++ b/Makefile @@ -9,14 +9,18 @@ build: ## Build the the project clean: ## Cleans compiled @cargo clean -test: ## Runs test suite with output from tests printed +test-legacy: ## Runs test suite with output from tests printed @cargo test -- --nocapture -Zunstable-options --report-time +test: ## Runs test suite using next test + @cargo nextest run --workspace --all-features + install-dev-tools: ## Installs all necessary cargo helpers cargo install cargo-llvm-cov cargo install cargo-hack cargo install cargo-udeps cargo install flaky-finder + cargo install cargo-nextest --locked lint: ## cargo check and clippy ## fmt first, because it's the cheapest @@ -29,8 +33,11 @@ lint-fix: ## cargo fmt, fix and clippy cargo fix --allow-dirty cargo clippy --fix --allow-dirty -check-features: ## Checks that project compiles with all combinations of features - cargo hack --feature-powerset check +check-features: ## Checks that project compiles with all combinations of features. default is not needed because we never check `cfg(default)`, we only use it as an alias. + cargo hack check --workspace --feature-powerset --exclude-features default + +check-fuzz: ## Checks that fuzz member compiles + $(MAKE) -C fuzz check find-unused-deps: ## Prints unused dependencies for project. Note: requires nightly cargo udeps --all-targets --all-features diff --git a/README.md b/README.md index 11df5ec7b..c07d20258 100644 --- a/README.md +++ b/README.md @@ -14,17 +14,17 @@ ## What is the Sovereign SDK? -The Sovereign SDK is a free and open-source toolkit for building zk-rollups **that is currently under development**. Sovereign SDK consists of three +The Sovereign SDK is a free and open-source toolkit for building rollups (both ZK and optimistic) **that is currently under development**. Sovereign SDK consists of three logical components: -1. The Rollup Interface, a minimal set of interfaces that defines a zk-rollup +1. The Rollup Interface, a minimal set of interfaces that defines a rollup 1. The Module System, an opinionated framework for building a rollup with the Rollup Interface 1. The Full Node, a client implementation capable of running any rollup which implements the Rollup Interface. ### The Rollup Interface At the heart of the Sovereign SDK is [the Rollup Interface](./rollup-interface/specs/overview.md), which defines the _interfaces_ that rollups -must implement. In the Sovereign SDK, we define a zk-rollup as the combination of three components: +must implement. In the Sovereign SDK, we define a rollup as the combination of three components: 1. A [State Transition Function](./rollup-interface/specs/interfaces/stf.md) ("STF") which defines the "business logic" of the rollup 1. A [Data Availability Layer](./rollup-interface/specs/interfaces/da.md) ("DA layer") which determines the set of transactions that are fed @@ -80,16 +80,19 @@ issue! All of the core developers can be reached via [Discord](https://discord.g ## Getting Started -### Building a Rollup +### Running a demo rollup -The easiest way to build a rollup is to use the Module System. You can find a tutorial [here](./examples/demo-nft-module/README.md). +Experience how a simple rollup works by exploring our [`demo-rollup`](./examples/demo-rollup/README.md) tutorial. -We also provide two examples - [`demo-stf`](./examples/demo-stf/README.md), which shows how to use the Module System to implement a -state transition, and [`demo-rollup`](./examples/demo-rollup/README.md), which shows how to combine the demo STF with a DA layer and a ZKVM to -get a complete rollup implementation. +### Building a rollup -If you want even more control over your rollup's functionality, you can implement a completely custom State Transition Function -without using the Module System. You can find a tutorial [here](./examples/demo-simple-stf/README.md). +- #### Using the Module System +This is the most straightforward way to create a rollup. + +To construct a single module, follow our [module building tutorial](./examples/demo-nft-module/README.md). +To combine multiple modules into a State Transition Function (STF), see the [`demo-stf`](./examples/demo-stf/README.md) guide. +- #### By building a custom State Transition Function +If you want even more control over your rollup's functionality, you can implement a completely custom State Transition Function without using the Module System. Our dedicated tutorial for this approach can be found [here](./examples/demo-simple-stf/README.md). ### Adding a new Data Availability Layer @@ -102,6 +105,11 @@ Adapters contain the logic integrating 3rd party codebases into the Sovereign SD to have adapters for almost all Data Availability Layers and LLVM-compatible proof systems. Currently, we maintain adapters for [`Risc0`](https://www.risczero.com) (a ZKVM) and [`Celestia`](https://www.celestia.org) a (DA layer). The Avail project also maintains an adapter for their DA layer, which can be found [here](https://github.com/availproject/avail-sovereign-da-adapter). +[Chainway](https://chainway.xyz/) team also maintains an open-source Bitcoin DA adapter for their Sovereign Rollup on Bitcoin, which can be found [here](https://github.com/chainwayxyz/bitcoin-da). + +## Testing + +An implementation of LLVM's libFUZZER is available under [fuzz/README.md](./fuzz/README.md). ## Warning diff --git a/adapters/avail/Cargo.toml b/adapters/avail/Cargo.toml index 8d9dd481f..e6ca2d16d 100644 --- a/adapters/avail/Cargo.toml +++ b/adapters/avail/Cargo.toml @@ -4,10 +4,11 @@ version = "0.1.0" edition = "2021" [dependencies] +borsh = { workspace = true, features = ["bytes"] } sov-rollup-interface = { path = "../../rollup-interface" } bytes = { version = "1.2.1", features = ["serde"]} -sp-core = { version = "21.0.0", optional = true } primitive-types = { version = "0.12.1", features = ["serde"]} +sp-core-hashing = "10.0.0" subxt = { version = "0.27.1", optional = true } avail-subxt = { git = "https://github.com/availproject/avail.git", tag = "v1.6.0", features = ["std"], optional = true } codec = { package = "parity-scale-codec", version = "3.0.0", default-features = false, features = ["derive", "full", "bit-vec"], optional = true } @@ -22,8 +23,10 @@ serde = { workspace = true } serde_json = { workspace = true } reqwest = { version = "0.11", features = ["json"], optional = true } thiserror = { workspace = true } +sp-keyring = { version = "18.0.0", optional = true } +sp-core = { version = "16.0.0", optional = true } [features] default = ["native"] -native = ["dep:tokio", "dep:codec", "dep:reqwest", "dep:avail-subxt", "dep:subxt", "dep:sp-core"] +native = ["dep:tokio", "dep:codec", "dep:reqwest", "dep:avail-subxt", "dep:subxt", "dep:sp-keyring", "dep:sp-core"] verifier = [] diff --git a/adapters/avail/src/service.rs b/adapters/avail/src/service.rs index 440471e93..8c30ef5d8 100644 --- a/adapters/avail/src/service.rs +++ b/adapters/avail/src/service.rs @@ -3,11 +3,17 @@ use core::time::Duration; use anyhow::anyhow; use async_trait::async_trait; use avail_subxt::AvailConfig; +use avail_subxt::api; +use avail_subxt::primitives::AvailExtrinsicParams; +use avail_subxt::api::runtime_types::sp_core::bounded::bounded_vec::BoundedVec; use reqwest::StatusCode; use serde::{Deserialize, Serialize}; use sov_rollup_interface::da::DaSpec; use sov_rollup_interface::services::da::DaService; +use sp_keyring::sr25519::sr25519::{self, Pair}; +use sp_core::crypto::Pair as PairTrait; use subxt::OnlineClient; +use subxt::tx::PairSigner; use tracing::info; use crate::avail::{Confidence, ExtrinsicsData}; @@ -16,10 +22,20 @@ use crate::spec::header::AvailHeader; use crate::spec::transaction::AvailBlobTransaction; use crate::spec::DaLayerSpec; -#[derive(Debug, Clone)] +/// Runtime configuration for the DA service +#[derive(Clone, PartialEq, serde::Deserialize, serde::Serialize)] +pub struct DaServiceConfig { + pub light_client_url: String, + pub node_client_url: String, + //TODO: Safer strategy to load seed so it is not accidentally revealed. + pub seed: String, +} + +#[derive(Clone)] pub struct DaProvider { pub node_client: OnlineClient, pub light_client_url: String, + signer: PairSigner, } impl DaProvider { @@ -32,18 +48,23 @@ impl DaProvider { let light_client_url = self.light_client_url.clone(); format!("{light_client_url}/v1/confidence/{block_num}") } -} -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct RuntimeConfig { - light_client_url: String, - #[serde(skip)] - node_client: Option>, -} + pub async fn new( + config: DaServiceConfig, + ) -> Self { + let pair = Pair::from_string_with_seed(&config.seed, None).unwrap(); + let signer = PairSigner::::new(pair.0.clone()); -impl PartialEq for RuntimeConfig { - fn eq(&self, other: &Self) -> bool { - self.light_client_url == other.light_client_url + let node_client = avail_subxt::build_client(config.node_client_url.to_string(), false) + .await + .unwrap(); + let light_client_url = config.light_client_url; + + DaProvider { + node_client, + light_client_url, + signer, + } } } @@ -107,8 +128,6 @@ async fn wait_for_appdata(appdata_url: &str, block: u32) -> anyhow::Result::ChainParams, - ) -> Self { - let node_client = config.node_client.unwrap(); - let light_client_url = config.light_client_url; + async fn send_transaction(&self, blob: &[u8]) -> Result<(), Self::Error> { + let data_transfer = api::tx() + .data_availability() + .submit_data(BoundedVec(blob.to_vec())); + + let extrinsic_params = AvailExtrinsicParams::new_with_app_id(7.into()); - DaProvider { - node_client, - light_client_url, - } - } + let h = self.node_client + .tx() + .sign_and_submit_then_watch(&data_transfer, &self.signer, extrinsic_params) + .await?; + + println!("Transaction submitted: {:#?}", h.extrinsic_hash()); - async fn send_transaction(&self, _blob: &[u8]) -> Result<(), Self::Error> { - unimplemented!("The avail light client does not currently support sending transactions"); + Ok(()) } } diff --git a/adapters/avail/src/spec/address.rs b/adapters/avail/src/spec/address.rs index b13622b08..0190b4073 100644 --- a/adapters/avail/src/spec/address.rs +++ b/adapters/avail/src/spec/address.rs @@ -1,14 +1,14 @@ use core::fmt::{Display, Formatter}; +use std::str::FromStr; use std::hash::Hash; use primitive_types::H256; use serde::{Deserialize, Serialize}; -use sov_rollup_interface::AddressTrait; #[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Eq, Hash)] pub struct AvailAddress(pub [u8; 32]); -impl AddressTrait for AvailAddress {} +impl sov_rollup_interface::BasicAddress for AvailAddress {} impl Display for AvailAddress { fn fmt(&self, f: &mut Formatter) -> core::fmt::Result { @@ -29,6 +29,17 @@ impl From<[u8; 32]> for AvailAddress { } } +impl FromStr for AvailAddress { + type Err = ::Err; + + fn from_str(s: &str) -> Result { + let h_256 = H256::from_str(s)?; + + + Ok(Self(h_256.to_fixed_bytes())) + } +} + impl<'a> TryFrom<&'a [u8]> for AvailAddress { type Error = anyhow::Error; diff --git a/adapters/avail/src/spec/block.rs b/adapters/avail/src/spec/block.rs index 839a4bc13..1fb230940 100644 --- a/adapters/avail/src/spec/block.rs +++ b/adapters/avail/src/spec/block.rs @@ -1,6 +1,7 @@ use serde::{Deserialize, Serialize}; use sov_rollup_interface::da::BlockHeaderTrait; use sov_rollup_interface::services::da::SlotData; +use crate::verifier::ChainValidityCondition; use super::header::AvailHeader; use super::transaction::AvailBlobTransaction; @@ -13,12 +14,27 @@ pub struct AvailBlock { impl SlotData for AvailBlock { type BlockHeader = AvailHeader; + type Cond = ChainValidityCondition; fn hash(&self) -> [u8; 32] { - self.header.hash().0 .0 + self.header.hash().0.0 } fn header(&self) -> &Self::BlockHeader { &self.header } + + fn validity_condition(&self) -> ChainValidityCondition { + let mut txs_commitment: [u8; 32] = [0u8; 32]; + + for tx in &self.transactions { + txs_commitment = tx.combine_hash(txs_commitment); + } + + ChainValidityCondition { + prev_hash: *self.header().prev_hash().inner(), + block_hash: ::hash(self), + txs_commitment, + } + } } diff --git a/adapters/avail/src/spec/header.rs b/adapters/avail/src/spec/header.rs index 2ec031ca5..740e02fdc 100644 --- a/adapters/avail/src/spec/header.rs +++ b/adapters/avail/src/spec/header.rs @@ -11,6 +11,7 @@ pub struct Header { pub number: u32, pub state_root: H256, pub extrinsics_root: H256, + pub data_root: H256, } use super::hash::AvailHash; @@ -31,10 +32,15 @@ impl AvailHeader { parent_hash: header.parent_hash, number: header.number, state_root: header.state_root, + data_root: header.data_root(), extrinsics_root: header.extrinsics_root, }, } } + + pub fn data_root(&self) -> AvailHash { + self.data_root().clone() + } } impl BlockHeaderTrait for AvailHeader { diff --git a/adapters/avail/src/spec/mod.rs b/adapters/avail/src/spec/mod.rs index 551d3cf2b..43600195f 100644 --- a/adapters/avail/src/spec/mod.rs +++ b/adapters/avail/src/spec/mod.rs @@ -1,14 +1,18 @@ use sov_rollup_interface::da::DaSpec; +use crate::verifier::ChainValidityCondition; -mod address; +pub mod address; pub mod block; mod hash; pub mod header; pub mod transaction; +#[derive(serde::Serialize, serde::Deserialize)] pub struct DaLayerSpec; impl DaSpec for DaLayerSpec { + type ValidityCondition = ChainValidityCondition; + type SlotHash = hash::AvailHash; type ChainParams = (); diff --git a/adapters/avail/src/spec/transaction.rs b/adapters/avail/src/spec/transaction.rs index de3c53189..6a74d29c3 100644 --- a/adapters/avail/src/spec/transaction.rs +++ b/adapters/avail/src/spec/transaction.rs @@ -13,7 +13,7 @@ use sov_rollup_interface::da::{BlobReaderTrait, CountedBufReader}; use super::address::AvailAddress; #[derive(Serialize, Deserialize, Clone, PartialEq, Debug)] -//pub struct AvailBlobTransaction(pub AppUncheckedExtrinsic); + pub struct AvailBlobTransaction { blob: CountedBufReader, hash: [u8; 32], @@ -42,8 +42,8 @@ impl BlobReaderTrait for AvailBlobTransaction { } } -#[cfg(feature = "native")] impl AvailBlobTransaction { + #[cfg(feature = "native")] pub fn new(unchecked_extrinsic: &AppUncheckedExtrinsic) -> Self { let address = match &unchecked_extrinsic.signature { Some((subxt::utils::MultiAddress::Id(id), _, _)) => AvailAddress(id.clone().0), @@ -57,9 +57,17 @@ impl AvailBlobTransaction { }; AvailBlobTransaction { - hash: H256::from(sp_core::blake2_256(&unchecked_extrinsic.encode())).to_fixed_bytes(), + hash: sp_core_hashing::blake2_256(&unchecked_extrinsic.encode()), address, blob, } } + + pub fn combine_hash(&self, hash: [u8; 32]) -> [u8; 32] { + let mut combined_hashes: Vec = Vec::with_capacity(64); + combined_hashes.extend_from_slice(hash.as_ref()); + combined_hashes.extend_from_slice(self.hash().as_ref()); + + sp_core_hashing::blake2_256(&combined_hashes) + } } diff --git a/adapters/avail/src/verifier.rs b/adapters/avail/src/verifier.rs index 82551195c..9b20a938c 100644 --- a/adapters/avail/src/verifier.rs +++ b/adapters/avail/src/verifier.rs @@ -1,6 +1,7 @@ use serde::{Deserialize, Serialize}; use sov_rollup_interface::da::{BlockHeaderTrait, DaSpec, DaVerifier}; use sov_rollup_interface::zk::ValidityCondition; +use borsh::{BorshDeserialize, BorshSerialize}; use thiserror::Error; use crate::spec::DaLayerSpec; @@ -11,22 +12,34 @@ pub enum ValidityConditionError { BlocksNotConsecutive, } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Copy, BorshDeserialize, BorshSerialize)] /// A validity condition expressing that a chain of DA layer blocks is contiguous and canonical pub struct ChainValidityCondition { pub prev_hash: [u8; 32], pub block_hash: [u8; 32], + //Chained or batch txs commitment. + pub txs_commitment: [u8; 32], } impl ValidityCondition for ChainValidityCondition { type Error = ValidityConditionError; fn combine(&self, rhs: Self) -> Result { + let mut combined_hashes: Vec = Vec::with_capacity(64); + combined_hashes.extend_from_slice(self.txs_commitment.as_ref()); + combined_hashes.extend_from_slice(rhs.txs_commitment.as_ref()); + + let combined_root = sp_core_hashing::blake2_256(&combined_hashes); + if self.block_hash != rhs.prev_hash { return Err(ValidityConditionError::BlocksNotConsecutive); } - Ok(rhs) + Ok(Self { + prev_hash: rhs.prev_hash, + block_hash: rhs.block_hash, + txs_commitment: combined_root, + }) } } @@ -37,20 +50,25 @@ impl DaVerifier for Verifier { type Error = ValidityConditionError; - type ValidityCondition = ChainValidityCondition; - // Verify that the given list of blob transactions is complete and correct. // NOTE: Function return unit since application client already verifies application data. fn verify_relevant_tx_list( &self, block_header: &::BlockHeader, - _txs: &[::BlobTransaction], + txs: &[::BlobTransaction], _inclusion_proof: ::InclusionMultiProof, _completeness_proof: ::CompletenessProof, - ) -> Result { + ) -> Result<::ValidityCondition, Self::Error> { + let mut txs_commitment: [u8; 32] = [0u8; 32]; + + for tx in txs { + txs_commitment = tx.combine_hash(txs_commitment); + } + let validity_condition = ChainValidityCondition { prev_hash: *block_header.prev_hash().inner(), block_hash: *block_header.hash().inner(), + txs_commitment, }; Ok(validity_condition) diff --git a/adapters/celestia/Cargo.toml b/adapters/celestia/Cargo.toml index afdb440a4..dae3ab31b 100644 --- a/adapters/celestia/Cargo.toml +++ b/adapters/celestia/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "jupiter" +name = "celestia" version = { workspace = true } edition = { workspace = true } license = { workspace = true } @@ -11,6 +11,8 @@ borsh = { workspace = true, features = ["bytes"] } bech32 = { workspace = true } prost = "0.11" prost-types = "0.11" +# I keep this commented as a reminder to opportunity to optimze this crate for non native compilation +#tendermint = { version = "0.32", default-features = false, features = ["std"] } tendermint = "0.32" tendermint-proto = "0.32" @@ -19,27 +21,33 @@ async-trait = { workspace = true } anyhow = { workspace = true } sha2 = { workspace = true } base64 = "0.21.2" -hex = { version = "0.4.3", features = ["serde"] } +hex = { workspace = true, features = ["serde"] } hex-literal = "0.4.1" -jsonrpsee = { version = "0.16.2", features = ["http-client"], optional = true } +jsonrpsee = { workspace = true, features = ["http-client"], optional = true } serde = { workspace = true } -serde_json = { workspace = true } +serde_json = { workspace = true, optional = true } tokio = { workspace = true, optional = true } thiserror = { workspace = true } tracing = { workspace = true } +zk-cycle-macros = { path = "../../utils/zk-cycle-macros", optional = true } +risc0-zkvm = { version = "0.16", default-features = false, features = ["std"], optional = true } +risc0-zkvm-platform = { version = "0.16", optional = true } sov-rollup-interface = { path = "../../rollup-interface" } nmt-rs = { git = "https://github.com/Sovereign-Labs/nmt-rs.git", rev = "dd37588444fca72825d11fe4a46838f66525c49f", features = ["serde", "borsh"] } + [dev-dependencies] postcard = { version = "1", features = ["use-std"] } proptest = { version = "1.2" } wiremock = "0.5" + [build-dependencies] prost-build = { version = "0.11" } [features] -default = ["native"] -native = ["dep:tokio", "dep:jsonrpsee"] +default = [] +native = ["dep:tokio", "dep:jsonrpsee", "dep:serde_json", "tendermint/default"] +bench = ["zk-cycle-macros/bench", "risc0-zkvm", "risc0-zkvm-platform"] verifier = [] diff --git a/adapters/celestia/README.md b/adapters/celestia/README.md index fe8ff8293..f255b864f 100644 --- a/adapters/celestia/README.md +++ b/adapters/celestia/README.md @@ -41,9 +41,9 @@ about compatibility with these proof systems, then `no_std` isn't a requirement. **Jupiter's DA Verifier** -In Celestia, checking _completeness_ of data is pretty simple. Celestia provides a a "data availability header", +In Celestia, checking _completeness_ of data is pretty simple. Celestia provides a "data availability header", containing the roots of many namespaced merkle tree. The union of the data in each of these namespaced merkle trees -is the data for this Celestia block. So, to prove compleness, we just have to iterate over these roots. At each step, +is the data for this Celestia block. So, to prove completeness, we just have to iterate over these roots. At each step, we verify a "namespace proof" showing the presence (or absence) of data from our namespace in that row. Then, we check that the blob(s) corresponding to that data appear next in the provided list of blobs. @@ -51,7 +51,7 @@ Checking _correctness_, is a bit more complicated. Unfortunately, Celestia does way to associate a blob of data with its sender - so we have to be pretty creative with our solution. (Recall that the Sovereign SDK requires blobs to be attributable to a particular sender for DOS protection). We have to read all of the data from a special reserved namespace on Celestia which contains the Cosmos SDK transactions associated -with the current block. (We accomplish this using the same technique of iterating over the row roots that we descibed previously). Then, we associate each relevant data blob from our rollup namespace with a transaction, using the +with the current block. (We accomplish this using the same technique of iterating over the row roots that we described previously). Then, we associate each relevant data blob from our rollup namespace with a transaction, using the [`share commitment`](https://github.com/celestiaorg/celestia-app/blob/main/proto/celestia/blob/v1/tx.proto#L25) field. ### The DaService Trait diff --git a/adapters/celestia/src/celestia.rs b/adapters/celestia/src/celestia.rs index 370dd9118..67f5a476a 100644 --- a/adapters/celestia/src/celestia.rs +++ b/adapters/celestia/src/celestia.rs @@ -1,6 +1,4 @@ -use std::fmt::{Display, Formatter}; use std::ops::Range; -use std::str::FromStr; use std::sync::{Arc, Mutex}; use base64::engine::general_purpose::STANDARD as B64_ENGINE; @@ -11,7 +9,7 @@ use prost::bytes::Buf; use prost::Message; use serde::{Deserialize, Serialize}; use sov_rollup_interface::da::{BlockHeaderTrait as BlockHeader, CountedBufReader}; -use sov_rollup_interface::AddressTrait; +use sov_rollup_interface::services::da::SlotData; pub use tendermint::block::Header as TendermintHeader; use tendermint::block::Height; use tendermint::crypto::default::Sha256; @@ -29,7 +27,7 @@ use crate::pfb::{BlobTx, MsgPayForBlobs, Tx}; use crate::shares::{read_varint, BlobIterator, BlobRefIterator, NamespaceGroup}; use crate::utils::BoxError; use crate::verifier::address::CelestiaAddress; -use crate::verifier::{TmHash, PFB_NAMESPACE}; +use crate::verifier::{ChainValidityCondition, TmHash, PFB_NAMESPACE}; #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] pub struct MarshalledDataAvailabilityHeader { @@ -303,6 +301,30 @@ impl BlockHeader for CelestiaHeader { } } +/// We implement [`SlotData`] for [`CelestiaHeader`] in a similar fashion as for [`FilteredCelestiaBlock`] +impl SlotData for CelestiaHeader { + type BlockHeader = CelestiaHeader; + type Cond = ChainValidityCondition; + + fn hash(&self) -> [u8; 32] { + match self.header.hash() { + tendermint::Hash::Sha256(h) => h, + tendermint::Hash::None => unreachable!("tendermint::Hash::None should not be possible"), + } + } + + fn header(&self) -> &Self::BlockHeader { + self + } + + fn validity_condition(&self) -> ChainValidityCondition { + ChainValidityCondition { + prev_hash: *self.header().prev_hash().inner(), + block_hash: ::hash(self), + } + } +} + #[derive(Deserialize, Serialize, PartialEq, Debug, Clone)] pub struct CelestiaVersion { pub block: u32, @@ -323,56 +345,6 @@ impl AsRef<[u8]> for Sha2Hash { } } -#[derive(Deserialize, Serialize, PartialEq, Debug, Clone, Eq, Hash)] -pub struct H160(#[serde(deserialize_with = "hex::deserialize")] pub [u8; 20]); - -impl AsRef<[u8]> for H160 { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl<'a> TryFrom<&'a [u8]> for H160 { - type Error = anyhow::Error; - - fn try_from(value: &'a [u8]) -> Result { - if value.len() == 20 { - let mut addr = [0u8; 20]; - addr.copy_from_slice(value); - return Ok(Self(addr)); - } - anyhow::bail!("Adress is not exactly 20 bytes"); - } -} - -impl From<[u8; 32]> for H160 { - fn from(value: [u8; 32]) -> Self { - let mut addr = [0u8; 20]; - addr.copy_from_slice(&value[12..]); - Self(addr) - } -} - -impl FromStr for H160 { - type Err = hex::FromHexError; - - fn from_str(s: &str) -> Result { - // Remove the "0x" prefix, if it exists. - let s = s.strip_prefix("0x").unwrap_or(s); - let mut output = [0u8; 20]; - hex::decode_to_slice(s, &mut output)?; - Ok(H160(output)) - } -} - -impl Display for H160 { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "0x{}", hex::encode(self.0)) - } -} - -impl AddressTrait for H160 {} - pub fn parse_pfb_namespace( group: NamespaceGroup, ) -> Result, BoxError> { diff --git a/adapters/celestia/src/da_service.rs b/adapters/celestia/src/da_service.rs index c547701e9..e319d2646 100644 --- a/adapters/celestia/src/da_service.rs +++ b/adapters/celestia/src/da_service.rs @@ -114,17 +114,8 @@ const fn default_request_timeout_seconds() -> u64 { 60 } -#[async_trait] -impl DaService for CelestiaService { - type RuntimeConfig = DaServiceConfig; - - type Spec = CelestiaSpec; - - type FilteredBlock = FilteredCelestiaBlock; - - type Error = BoxError; - - async fn new(config: Self::RuntimeConfig, chain_params: RollupParams) -> Self { +impl CelestiaService { + pub async fn new(config: DaServiceConfig, chain_params: RollupParams) -> Self { let client = { let mut headers = HeaderMap::new(); headers.insert( @@ -136,7 +127,7 @@ impl DaService for CelestiaService { jsonrpsee::http_client::HttpClientBuilder::default() .set_headers(headers) - .max_request_body_size(config.max_celestia_response_body_size) // 100 MB + .max_request_size(config.max_celestia_response_body_size) .request_timeout(std::time::Duration::from_secs( config.celestia_rpc_timeout_seconds, )) @@ -146,6 +137,15 @@ impl DaService for CelestiaService { Self::with_client(client, chain_params.namespace) } +} + +#[async_trait] +impl DaService for CelestiaService { + type Spec = CelestiaSpec; + + type FilteredBlock = FilteredCelestiaBlock; + + type Error = BoxError; async fn get_finalized_at(&self, height: u64) -> Result { let client = self.client.clone(); @@ -153,7 +153,7 @@ impl DaService for CelestiaService { let _span = span!(Level::TRACE, "fetching finalized block", height = height); // Fetch the header and relevant shares via RPC - info!("Fetching header at height={}...", height); + debug!("Fetching header at height={}...", height); let header = client .request::("header.GetByHeight", vec![height]) .await?; @@ -260,7 +260,7 @@ impl DaService for CelestiaService { async fn send_transaction(&self, blob: &[u8]) -> Result<(), Self::Error> { // https://node-rpc-docs.celestia.org/ let client = self.client.clone(); - info!("Sending {} bytes of raw data to Celestia.", blob.len()); + debug!("Sending {} bytes of raw data to Celestia.", blob.len()); let fee: u64 = 2000; let namespace = self.rollup_namespace.0.to_vec(); let blob = blob.to_vec(); @@ -304,7 +304,7 @@ struct CelestiaBasicResponse { } impl CelestiaBasicResponse { - /// We assume that absence of `code` indicates that request was successfull + /// We assume that absence of `code` indicates that request was successful pub fn is_success(&self) -> bool { self.error_code.is_none() } diff --git a/adapters/celestia/src/lib.rs b/adapters/celestia/src/lib.rs index 23e284287..7638de48e 100644 --- a/adapters/celestia/src/lib.rs +++ b/adapters/celestia/src/lib.rs @@ -1,11 +1,13 @@ pub mod celestia; pub mod shares; -pub use celestia::*; +pub use crate::celestia::*; #[cfg(feature = "native")] -pub mod da_service; +mod da_service; pub mod pfb; pub mod share_commit; pub mod types; mod utils; pub mod verifier; +#[cfg(feature = "native")] +pub use da_service::{CelestiaService, DaServiceConfig}; diff --git a/adapters/celestia/src/shares.rs b/adapters/celestia/src/shares.rs index 44ea1b799..3f543bfca 100644 --- a/adapters/celestia/src/shares.rs +++ b/adapters/celestia/src/shares.rs @@ -278,7 +278,13 @@ impl std::error::Error for ShareParsingError {} impl NamespaceGroup { pub fn from_b64(b64: &str) -> Result { + if b64.is_empty() { + error!("Empty input"); + return Err(ShareParsingError::ErrWrongLength); + } + let mut decoded = Vec::with_capacity((b64.len() + 3) / 4 * 3); + // unsafe { decoded.set_len((b64.len() / 4 * 3)) } if let Err(err) = B64_ENGINE.decode_slice(b64, &mut decoded) { info!("Error decoding NamespaceGroup from base64: {}", err); diff --git a/adapters/celestia/src/types.rs b/adapters/celestia/src/types.rs index 7e5481927..951fca0d7 100644 --- a/adapters/celestia/src/types.rs +++ b/adapters/celestia/src/types.rs @@ -6,6 +6,7 @@ use base64::Engine; use borsh::{BorshDeserialize, BorshSerialize}; pub use nmt_rs::NamespaceId; use serde::{Deserialize, Serialize}; +use sov_rollup_interface::da::BlockHeaderTrait; use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::Bytes; use tendermint::crypto::default::Sha256; @@ -14,7 +15,7 @@ use tendermint::merkle; use crate::pfb::MsgPayForBlobs; use crate::shares::{NamespaceGroup, Share}; use crate::utils::BoxError; -use crate::verifier::PARITY_SHARES_NAMESPACE; +use crate::verifier::{ChainValidityCondition, PARITY_SHARES_NAMESPACE}; use crate::{CelestiaHeader, TxPosition}; #[derive(Debug, PartialEq, Clone, Deserialize, Serialize)] @@ -81,6 +82,7 @@ pub struct FilteredCelestiaBlock { impl SlotData for FilteredCelestiaBlock { type BlockHeader = CelestiaHeader; + type Cond = ChainValidityCondition; fn hash(&self) -> [u8; 32] { match self.header.header.hash() { @@ -92,6 +94,13 @@ impl SlotData for FilteredCelestiaBlock { fn header(&self) -> &Self::BlockHeader { &self.header } + + fn validity_condition(&self) -> ChainValidityCondition { + ChainValidityCondition { + prev_hash: *self.header().prev_hash().inner(), + block_hash: self.hash(), + } + } } impl FilteredCelestiaBlock { @@ -123,6 +132,7 @@ pub enum ValidationError { MissingTx, InvalidRowProof, InvalidSigner, + IncompleteData, } impl CelestiaHeader { diff --git a/adapters/celestia/src/verifier/address.rs b/adapters/celestia/src/verifier/address.rs index 77bacaa6c..f8ea135d9 100644 --- a/adapters/celestia/src/verifier/address.rs +++ b/adapters/celestia/src/verifier/address.rs @@ -1,19 +1,24 @@ use std::fmt::{Display, Formatter}; use std::str::FromStr; +use bech32::WriteBase32; use borsh::{BorshDeserialize, BorshSerialize}; use serde::{Deserialize, Serialize}; -use sov_rollup_interface::AddressTrait; use thiserror::Error; +/// Human Readable Part: "celestia" for Celestia network const HRP: &str = "celestia"; +/// Bech32 variant is used for Celestia and CosmosSDK +const VARIANT: bech32::Variant = bech32::Variant::Bech32; +/// Representation of the address in the Celestia network +/// https://github.com/celestiaorg/celestia-specs/blob/e59efd63a2165866584833e91e1cb8a6ed8c8203/src/specs/data_structures.md#address +/// Spec says: "Addresses have a length of 32 bytes.", but in reality it is 32 `u5` elements, which can be compressed as 20 bytes. +/// TODO: Switch to bech32::u5 when it has repr transparent: https://github.com/Sovereign-Labs/sovereign-sdk/issues/646 #[derive( Debug, PartialEq, Clone, Eq, Serialize, Deserialize, BorshDeserialize, BorshSerialize, Hash, )] -// Raw ASCII bytes, including HRP -// TODO: https://github.com/Sovereign-Labs/sovereign-sdk/issues/469 -pub struct CelestiaAddress(Vec); +pub struct CelestiaAddress([u8; 32]); impl AsRef<[u8]> for CelestiaAddress { fn as_ref(&self) -> &[u8] { @@ -21,36 +26,54 @@ impl AsRef<[u8]> for CelestiaAddress { } } +/// Decodes slice of bytes into CelestiaAddress +/// Treats it as string if it starts with HRP and the rest is valid ASCII +/// Otherwise just checks if it contains valid `u5` elements and has the correct length. impl<'a> TryFrom<&'a [u8]> for CelestiaAddress { type Error = anyhow::Error; fn try_from(value: &'a [u8]) -> Result { - Ok(Self(value.to_vec())) - } -} - -impl From<[u8; 32]> for CelestiaAddress { - fn from(value: [u8; 32]) -> Self { - // TODO: This is completely broken with current implementation. - // https://github.com/Sovereign-Labs/sovereign-sdk/issues/469 - Self(value.to_vec()) + if value.starts_with(HRP.as_bytes()) && value.is_ascii() { + // safety, because we checked that it is ASCII + let s = unsafe { std::str::from_utf8_unchecked(value) }; + return CelestiaAddress::from_str(s).map_err(|e| anyhow::anyhow!("{}", e)); + } + if value.len() != 32 { + anyhow::bail!("An address must be 32 u5 long"); + } + let mut raw_address = [0u8; 32]; + for (idx, &item) in value.iter().enumerate() { + bech32::u5::try_from_u8(item) + .map_err(|e| anyhow::anyhow!("Element at {} is not u5: {}", idx, e))?; + raw_address[idx] = item; + } + Ok(Self(raw_address)) } } impl Display for CelestiaAddress { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - let ascii_string = String::from_utf8_lossy(&self.0); - write!(f, "{}", ascii_string) + let mut w = bech32::Bech32Writer::new(HRP, VARIANT, f)?; + for elem in self.0.iter() { + // It is ok to unwrap, because we always sanitize data + w.write_u5(bech32::u5::try_from_u8(*elem).unwrap())?; + } + w.finalize() } } -#[derive(Clone, Debug, Error)] +#[derive(Clone, Debug, Error, PartialEq)] /// An error which occurs while decoding a `CelestialAddress` from a string. pub enum CelestiaAddressFromStrError { - /// The address has an invalid human readable prefix. Valid addresses must start with the prefix 'celestia'. - #[error("The address has an invalid human readable prefix. Valid addresses must start with the prefix 'celestia', but this one began with {0}")] + /// The address has an invalid human-readable prefix. + /// Valid addresses must start with the prefix 'celestia'. + #[error("The address has an invalid human-readable prefix. Valid addresses must start with the prefix 'celestia', but this one began with {0}")] InvalidHumanReadablePrefix(String), - /// The address could note be decoded as valid bech32 + /// The address has an invalid human-readable prefix. + /// Valid addresses must start with the prefix 'celestia'. + #[error("The address has an invalid bech32 variant. Valid addresses must be encoded in Bech32, but this is encoded in Bech32m")] + InvalidVariant, + /// The address could not be decoded as valid bech32 #[error("The address could not be decoded as valid bech32: {0}")] InvalidBech32(#[from] bech32::Error), } @@ -59,25 +82,42 @@ impl FromStr for CelestiaAddress { type Err = CelestiaAddressFromStrError; fn from_str(s: &str) -> Result { - // This could be the way to save memory: - let (hrp, _raw_address_u5, _variant) = bech32::decode(s)?; + let (hrp, raw_address_u5, variant) = bech32::decode(s)?; if hrp != HRP { return Err(CelestiaAddressFromStrError::InvalidHumanReadablePrefix(hrp)); } - let value = s.as_bytes().to_vec(); + if variant != VARIANT { + return Err(CelestiaAddressFromStrError::InvalidVariant); + } + if raw_address_u5.len() != 32 { + return Err(CelestiaAddressFromStrError::InvalidBech32( + bech32::Error::InvalidLength, + )); + } + + let mut value: [u8; 32] = [0; 32]; + + for (idx, &item) in raw_address_u5.iter().enumerate() { + value[idx] = item.to_u8(); + } Ok(Self(value)) } } -impl AddressTrait for CelestiaAddress {} +impl sov_rollup_interface::BasicAddress for CelestiaAddress {} #[cfg(test)] mod tests { + use std::hint::black_box; + + use bech32::ToBase32; + use proptest::prelude::*; + use super::*; #[test] fn test_address_display_from_string() { - let raw_address_str = "celestia1w7wcupk5gswj25c0khnkey5fwmlndx6t5aarmk"; + let raw_address_str = "celestia1hvp2nfz3r6nqt8mlrzqf9ctwle942tkr0wql75"; let address = CelestiaAddress::from_str(raw_address_str).unwrap(); let output = format!("{}", address); assert_eq!(raw_address_str, output); @@ -91,4 +131,94 @@ mod tests { let output = format!("{}", address); assert_eq!(raw_address_str, output); } + + #[test] + fn test_from_str_and_from_slice_same() { + let raw_address_str = "celestia1w7wcupk5gswj25c0khnkey5fwmlndx6t5aarmk"; + let raw_address_array = *b"celestia1w7wcupk5gswj25c0khnkey5fwmlndx6t5aarmk"; + + let address_from_str = CelestiaAddress::from_str(raw_address_str).unwrap(); + let address_from_slice = CelestiaAddress::try_from(&raw_address_array[..]).unwrap(); + + assert_eq!(address_from_str, address_from_slice); + } + + // 20 u8 -> 32 u5 + fn check_from_bytes_as_ascii(input: [u8; 20]) { + let encoded = bech32::encode("celestia", input.to_base32(), VARIANT).unwrap(); + let bytes = encoded.as_bytes(); + let address = CelestiaAddress::try_from(bytes); + assert!(address.is_ok()); + let address = address.unwrap(); + let output = format!("{}", address); + assert_eq!(encoded, output); + } + + // 20 u8 -> 32 u5 + fn check_from_as_ref(input: [u8; 20]) { + let encoded = bech32::encode("celestia", input.to_base32(), VARIANT).unwrap(); + let address1 = CelestiaAddress::from_str(&encoded).unwrap(); + let bytes = address1.as_ref(); + let address = CelestiaAddress::try_from(bytes); + assert!(address.is_ok()); + let address = address.unwrap(); + let output = format!("{}", address); + assert_eq!(encoded, output); + } + + // 20 u8 -> 32 u5 + fn check_borsh(input: [u8; 20]) { + let address_str = bech32::encode("celestia", input.to_base32(), VARIANT).unwrap(); + + let address = CelestiaAddress::from_str(&address_str).unwrap(); + let serialized = BorshSerialize::try_to_vec(&address).unwrap(); + let deserialized = CelestiaAddress::try_from_slice(&serialized).unwrap(); + + assert_eq!(deserialized, address); + + let address_str2 = format!("{}", deserialized); + assert_eq!(address_str2, address_str); + } + + proptest! { + #[test] + fn test_try_from_any_slice(input in prop::collection::vec(any::(), 0..100)) { + let _ = black_box(CelestiaAddress::try_from(&input[..])); + } + + #[test] + fn test_from_str_anything(input in "\\PC*") { + let _ = black_box(CelestiaAddress::from_str(&input)); + } + + #[test] + // According to spec, alphanumeric characters excluding "1" "b" "i" and "o" + fn test_from_str_lowercase_ascii(input in "celestia1[023456789ac-hj-np-z]{38}") { + let result = CelestiaAddress::from_str(&input); + match result { + Ok(address) => { + let output = format!("{}", address); + assert_eq!(input, output); + } + Err(err) => { + assert_eq!(CelestiaAddressFromStrError::InvalidBech32(bech32::Error::InvalidChecksum), err); + }, + } + } + + #[test] + fn test_try_from_ascii_slice(input in proptest::array::uniform20(0u8..=255)) { + check_from_bytes_as_ascii(input); + } + + #[test] + fn test_try_as_ref_from(input in proptest::array::uniform20(0u8..=255)) { + check_from_as_ref(input); + } + + #[test] + fn test_borsh(input in proptest::array::uniform20(0u8..=255)) { + check_borsh(input); + } + } } diff --git a/adapters/celestia/src/verifier/mod.rs b/adapters/celestia/src/verifier/mod.rs index 819244243..d2e906cc7 100644 --- a/adapters/celestia/src/verifier/mod.rs +++ b/adapters/celestia/src/verifier/mod.rs @@ -1,3 +1,4 @@ +use borsh::{BorshDeserialize, BorshSerialize}; use nmt_rs::NamespaceId; use serde::{Deserialize, Serialize}; use sov_rollup_interface::da::{ @@ -12,6 +13,8 @@ pub mod address; pub mod proofs; use proofs::*; +#[cfg(all(target_os = "zkvm", feature = "bench"))] +use zk_cycle_macros::cycle_tracker; use self::address::CelestiaAddress; use crate::share_commit::recreate_commitment; @@ -83,6 +86,7 @@ impl AsRef for tendermint::Hash { impl BlockHash for TmHash {} +#[derive(serde::Serialize, serde::Deserialize)] pub struct CelestiaSpec; impl DaSpec for CelestiaSpec { @@ -92,6 +96,8 @@ impl DaSpec for CelestiaSpec { type BlobTransaction = BlobWithSender; + type ValidityCondition = ChainValidityCondition; + type InclusionMultiProof = Vec; type CompletenessProof = Vec; @@ -104,7 +110,18 @@ pub struct RollupParams { pub namespace: NamespaceId, } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)] +#[derive( + Debug, + Clone, + Copy, + PartialEq, + Eq, + Serialize, + Deserialize, + Hash, + BorshDeserialize, + BorshSerialize, +)] /// A validity condition expressing that a chain of DA layer blocks is contiguous and canonical pub struct ChainValidityCondition { pub prev_hash: [u8; 32], @@ -131,21 +148,20 @@ impl da::DaVerifier for CelestiaVerifier { type Error = ValidationError; - type ValidityCondition = ChainValidityCondition; - fn new(params: ::ChainParams) -> Self { Self { rollup_namespace: params.namespace, } } + #[cfg_attr(all(target_os = "zkvm", feature = "bench"), cycle_tracker)] fn verify_relevant_tx_list( &self, block_header: &::BlockHeader, txs: &[::BlobTransaction], inclusion_proof: ::InclusionMultiProof, completeness_proof: ::CompletenessProof, - ) -> Result { + ) -> Result<::ValidityCondition, Self::Error> { // Validate that the provided DAH is well-formed block_header.validate_dah()?; let validity_condition = ChainValidityCondition { @@ -227,7 +243,7 @@ impl da::DaVerifier for CelestiaVerifier { continue; } let tx: &BlobWithSender = tx_iter.next().ok_or(ValidationError::MissingTx)?; - if tx.sender.as_ref() != pfb.signer.as_bytes() { + if tx.sender.to_string() != pfb.signer { return Err(ValidationError::InvalidSigner); } @@ -236,9 +252,18 @@ impl da::DaVerifier for CelestiaVerifier { let mut blob_iter = blob_ref.data(); let mut blob_data = vec![0; blob_iter.remaining()]; blob_iter.copy_to_slice(blob_data.as_mut_slice()); - let tx_data = tx.data().acc(); - assert_eq!(blob_data, *tx_data); + let tx_data = tx.data().accumulator(); + + match tx_data { + da::Accumulator::Completed(tx_data) => { + assert_eq!(blob_data, *tx_data); + } + // For now we bail and return, maybe want to change that behaviour in the future + da::Accumulator::InProgress(_) => { + return Err(ValidationError::IncompleteData); + } + } // Link blob commitment to e-tx commitment let expected_commitment = diff --git a/adapters/risc0/Cargo.toml b/adapters/risc0/Cargo.toml index e30c457e2..19a43ccc0 100644 --- a/adapters/risc0/Cargo.toml +++ b/adapters/risc0/Cargo.toml @@ -14,14 +14,18 @@ readme = "README.md" [dependencies] anyhow = { workspace = true } bincode = { workspace = true } -risc0-zkvm = { version = "0.16", default-features = false, features = ['std'] } +risc0-zkvm = { version = "0.16", default-features = false, features = ["std"] } +risc0-zkvm-platform = { version = "0.16" } risc0-zkp = { version = "0.16", optional = true } risc0-circuit-rv32im = { version = "0.16", optional = true } serde = { workspace = true } - +bytemuck = "1.13.1" +once_cell = { version = "1.7.2", optional = true } +parking_lot = { version = "0.12.1", optional = true } +zk-cycle-utils = { path = "../../utils/zk-cycle-utils" } sov-rollup-interface = { path = "../../rollup-interface" } - [features] -default = ["native"] +default = [] native = ["risc0-zkvm/prove", "dep:risc0-zkp", "dep:risc0-circuit-rv32im"] +bench = ["once_cell", "parking_lot"] diff --git a/adapters/risc0/src/guest.rs b/adapters/risc0/src/guest.rs index f664e0ff2..acc7e3d8f 100644 --- a/adapters/risc0/src/guest.rs +++ b/adapters/risc0/src/guest.rs @@ -37,7 +37,7 @@ impl Zkvm for Risc0Guest { _serialized_proof: &'a [u8], _code_commitment: &Self::CodeCommitment, ) -> Result<&'a [u8], Self::Error> { - // Implement this method once risc0 supports recursion - todo!() + // Implement this method once risc0 supports recursion: issue #633 + todo!("Implement once risc0 supports recursion: https://github.com/Sovereign-Labs/sovereign-sdk/issues/633") } } diff --git a/adapters/risc0/src/host.rs b/adapters/risc0/src/host.rs index e7ca82a93..0829435bc 100644 --- a/adapters/risc0/src/host.rs +++ b/adapters/risc0/src/host.rs @@ -6,7 +6,11 @@ use risc0_zkvm::{ Executor, ExecutorEnvBuilder, LocalExecutor, SegmentReceipt, Session, SessionReceipt, }; use sov_rollup_interface::zk::{Zkvm, ZkvmHost}; +#[cfg(feature = "bench")] +use zk_cycle_utils::{cycle_count_callback, get_syscall_name, get_syscall_name_cycles}; +#[cfg(feature = "bench")] +use crate::metrics::metrics_callback; use crate::Risc0MethodId; pub struct Risc0Host<'a> { @@ -15,9 +19,28 @@ pub struct Risc0Host<'a> { } impl<'a> Risc0Host<'a> { + #[cfg(not(feature = "bench"))] pub fn new(elf: &'a [u8]) -> Self { + let default_env = ExecutorEnvBuilder::default(); + + Self { + env: RefCell::new(default_env), + elf, + } + } + + #[cfg(feature = "bench")] + pub fn new(elf: &'a [u8]) -> Self { + let mut default_env = ExecutorEnvBuilder::default(); + + let metrics_syscall_name = get_syscall_name(); + default_env.io_callback(metrics_syscall_name, metrics_callback); + + let cycles_syscall_name = get_syscall_name_cycles(); + default_env.io_callback(cycles_syscall_name, cycle_count_callback); + Self { - env: RefCell::new(ExecutorEnvBuilder::default()), + env: RefCell::new(default_env), elf, } } diff --git a/adapters/risc0/src/lib.rs b/adapters/risc0/src/lib.rs index ee3d48bdc..76c694a2c 100644 --- a/adapters/risc0/src/lib.rs +++ b/adapters/risc0/src/lib.rs @@ -6,6 +6,9 @@ pub mod guest; #[cfg(feature = "native")] pub mod host; +#[cfg(feature = "bench")] +pub mod metrics; + #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Risc0MethodId([u32; 8]); diff --git a/adapters/risc0/src/metrics.rs b/adapters/risc0/src/metrics.rs new file mode 100644 index 000000000..6dd56b0e5 --- /dev/null +++ b/adapters/risc0/src/metrics.rs @@ -0,0 +1,33 @@ +use std::collections::HashMap; + +use once_cell::sync::Lazy; +use parking_lot::Mutex; + +pub static GLOBAL_HASHMAP: Lazy>> = + Lazy::new(|| Mutex::new(HashMap::new())); + +pub fn add_value(metric: String, value: u64) { + let mut hashmap = GLOBAL_HASHMAP.lock(); + hashmap + .entry(metric) + .and_modify(|(sum, count)| { + *sum += value; + *count += 1; + }) + .or_insert((value, 1)); +} + +pub fn deserialize_custom(serialized: &[u8]) -> (String, u64) { + let null_pos = serialized.iter().position(|&b| b == 0).unwrap(); + let (string_bytes, size_bytes_with_null) = serialized.split_at(null_pos); + let size_bytes = &size_bytes_with_null[1..]; // Skip the null terminator + let string = String::from_utf8(string_bytes.to_vec()).unwrap(); + let size = u64::from_ne_bytes(size_bytes.try_into().unwrap()); // Convert bytes back into usize + (string, size) +} + +pub fn metrics_callback(input: &[u8]) -> Vec { + let met_tuple = deserialize_custom(input); + add_value(met_tuple.0, met_tuple.1); + vec![] +} diff --git a/docker/DEMO.md b/docker/DEMO.md new file mode 100644 index 000000000..a7d52222c --- /dev/null +++ b/docker/DEMO.md @@ -0,0 +1,15 @@ + + +```bash +# Sent to node 2, should fail +target/debug/sov-cli submit-transaction examples/test-data/keys/token_deployer_private_key.json Bank examples/test-data/requests/create_token.json 0 http://127.0.0.1:12346 +target/debug/sov-cli publish-batch http://127.0.0.1:12346 + +# Registering second sequencer +target/debug/sov-cli submit-transaction examples/test-data/keys/token_deployer_private_key.json SequencerRegistry examples/test-data/requests/register_sequencer.json 0 http://127.0.0.1:12345 +target/debug/sov-cli publish-batch http://127.0.0.1:12345 + +# Try on second sequencer again +target/debug/sov-cli submit-transaction examples/test-data/keys/token_deployer_private_key.json Bank examples/test-data/requests/transfer.json 1 http://127.0.0.1:12346 +target/debug/sov-cli publish-batch http://127.0.0.1:12346 +``` \ No newline at end of file diff --git a/docker/Makefile b/docker/Makefile new file mode 100644 index 000000000..10a4d5eab --- /dev/null +++ b/docker/Makefile @@ -0,0 +1,21 @@ + +COMPOSE_FILE=docker-compose.celestia.yaml +MOUNT_FOLDER=keyring-test +NODE_1_KEY_FILE=bridge_1_key.txt + +up: + docker-compose --file $(COMPOSE_FILE) up -d + + +down: + docker-compose --file "$(COMPOSE_FILE)" down + rm -rf $(MOUNT_FOLDER)/*.txt + rm -rf config_*.toml + +restart: down up generate_configs + +generate_configs: + bash ./generate_configs.sh + +logs: + docker-compose --file "$(COMPOSE_FILE)" logs --follow \ No newline at end of file diff --git a/docker/docker-compose.celestia.yaml b/docker/docker-compose.celestia.yaml new file mode 100644 index 000000000..704e2a16b --- /dev/null +++ b/docker/docker-compose.celestia.yaml @@ -0,0 +1,142 @@ +version: '3' + +services: + validator: + container_name: sov-celestia-validator + image: ghcr.io/celestiaorg/celestia-app:v0.13.2 + healthcheck: + test: [ "CMD", "curl", "-f", "http://127.0.0.1:26657/block?height=1" ] + interval: 30s + timeout: 10s + retries: 5 + environment: + - VALIDATOR_NAME=validator + - KEY_NAME_1=validator + - KEY_NAME_2=node1 + - KEY_NAME_3=node2 + - CHAIN_ID=test +# - CHAIN_ID=sov-testnet + - CELES_AMOUNT=12300000000000000000000000utia + - STAKING_AMOUNT=1000000000utia + - DEBIAN_FRONTEND=noninteractive + ports: + - "9090:9090" + - "36656:26656" + - "36657:26657" + - "36658:26658" + entrypoint: + - /bin/sh + - -c + - | + apk update -qq && apk add -qq curl && \ + /bin/celestia-appd init $$VALIDATOR_NAME --chain-id $$CHAIN_ID && \ + if [ ! -f "/root/keyring-test/$$KEY_NAME_1.info" ]; then \ + echo "CREATING NEW KEYS" && \ + /bin/celestia-appd keys add $$KEY_NAME_1 --keyring-backend test && \ + /bin/celestia-appd keys add $$KEY_NAME_2 --keyring-backend test && \ + /bin/celestia-appd keys add $$KEY_NAME_3 --keyring-backend test && \ + mkdir -p /root/keyring-test && cp /root/.celestia-app/keyring-test/* /root/keyring-test/; \ + else \ + echo "USING EXISTING KEYS" && \ + mkdir -p /root/.celestia-app/keyring-test/ && \ + cp /root/keyring-test/* /root/.celestia-app/keyring-test/; \ + fi; \ + echo "AVAILABLE KEYS:" && \ + /bin/celestia-appd keys list --keyring-backend test && \ + /bin/celestia-appd add-genesis-account $$KEY_NAME_1 $$CELES_AMOUNT --keyring-backend test && \ + /bin/celestia-appd add-genesis-account $$KEY_NAME_2 $$CELES_AMOUNT --keyring-backend test && \ + /bin/celestia-appd add-genesis-account $$KEY_NAME_3 $$CELES_AMOUNT --keyring-backend test && \ + /bin/celestia-appd gentx $$KEY_NAME_1 $$STAKING_AMOUNT --chain-id $$CHAIN_ID --keyring-backend test --evm-address 0x966e6f22781EF6a6A82BBB4DB3df8E225DfD9488 && \ + /bin/celestia-appd collect-gentxs && \ + echo "NODE_ID:" && \ + /bin/celestia-appd tendermint show-node-id && \ + + /bin/celestia-appd start --rpc.laddr tcp://0.0.0.0:26657 --proxy_app tcp://0.0.0.0:26658 + volumes: + - ./keyring-test:/root/keyring-test/ + + + # cat /root/.celestia-app/config/genesis.json && \ + + bridge: + image: ghcr.io/celestiaorg/celestia-node:v0.7.1 + container_name: sov-celestia-bridge + environment: + - KEY_NAME=validator + - CHAIN_ID=test + - STAKING_AMOUNT=1000000000utia + - DEBIAN_FRONTEND=noninteractive + depends_on: + - validator + ports: + - "26656:26656" + - "26657:26657" + - "26658:26658" + entrypoint: + - /bin/sh + - -c + - | + apt-get -qq update -qq && apt -qq install -y curl jq && rm -rf /var/lib/apt/lists/* && \ + sleep 20 && \ + until curl http://validator:26657/block?height=1; do echo "Waiting for validator..."; sleep 5; done && \ + mkdir -p /bridge/keys/keyring-test/ && cp -r /root/keyring-test/* /bridge/keys/keyring-test/ && \ + /celestia bridge init --node.store /bridge && \ + until curl http://validator:26657/block?height=1; do echo "Waiting for validator..."; sleep 5; done && \ + export GENESIS=$(curl http://validator:26657/block?height=1 | jq '.result.block_id.hash' | tr -d '"') && \ + export CELESTIA_CUSTOM="$$CHAIN_ID:$$GENESIS" && \ + echo "Starting bridge with option=$$CELESTIA_CUSTOM and key name $$KEY_NAME. API KEY:" && \ + /celestia bridge auth admin --node.store /bridge --log.level=ERROR && echo " " && \ + echo "$(/celestia bridge auth admin --node.store /bridge --log.level=ERROR)" > /root/keyring-test/bridge_1_key.txt && \ + /celestia bridge start --node.store /bridge --gateway --gateway.addr 0.0.0.0 --rpc.addr 0.0.0.0 --core.ip validator --keyring.accname $$KEY_NAME + volumes: + - ./keyring-test:/root/keyring-test + + bridge-2: + image: ghcr.io/celestiaorg/celestia-node:v0.7.1 + container_name: sov-celestia-bridge-2 + environment: + - KEY_NAME=node1 + - CHAIN_ID=test + - STAKING_AMOUNT=1000000000utia + - DEBIAN_FRONTEND=noninteractive + depends_on: + - validator + ports: + - "46656:26656" + - "46657:26657" + - "46658:26658" + entrypoint: + - /bin/sh + - -c + - | + apt-get -qq update -qq && apt -qq install -y curl jq && rm -rf /var/lib/apt/lists/* && \ + sleep 20 && \ + until curl http://validator:26657/block?height=1; do echo "Waiting for validator..."; sleep 5; done && \ + mkdir -p /bridge/keys/keyring-test/ && cp -r /root/keyring-test/* /bridge/keys/keyring-test/ && \ + /celestia bridge init --node.store /bridge && \ + until curl http://validator:26657/block?height=1; do echo "Waiting for validator..."; sleep 5; done && \ + export GENESIS=$(curl http://validator:26657/block?height=1 | jq '.result.block_id.hash' | tr -d '"') && \ + export CELESTIA_CUSTOM="$$CHAIN_ID:$$GENESIS" && \ + echo "Starting bridge with option=$$CELESTIA_CUSTOM and key name $$KEY_NAME. API KEY:" && \ + /celestia bridge auth admin --node.store /bridge --log.level=ERROR && echo " " && \ + echo "$(/celestia bridge auth admin --node.store /bridge --log.level=ERROR)" > /root/keyring-test/bridge_2_key.txt && \ + /celestia bridge start --node.store /bridge --gateway --gateway.addr 0.0.0.0 --rpc.addr 0.0.0.0 --core.ip validator --keyring.accname $$KEY_NAME + volumes: + - ./keyring-test:/root/keyring-test + + + +# TODO: +# * +Create keys only if keyring-test does not exist +# * ~Export validator API key to file. Leftovers - generate rollup_config.toml +# * +Multiple bridges +# * Bridge health-check +# * Timeoutable wait for block_height +# * Quite install script +# * Print genesis.json + + + + +# GENESIS=""; CNT=0; MAX=30; while [ "${#GENESIS}" -le 4 -a $CNT -ne $MAX ]; do GENESIS=$(curl -s http://127.0.0.1:26657/block?height=1 | jq '.result.block_id.hash' | tr -d '"'); ((CNT++)); sleep 1; done +# GENESIS=""; CNT=0; MAX=30; while [ "${#GENESIS}" -le 4 -a $CNT -ne $MAX ]; do GENESIS=$(curl -s http://127.0.0.1:26657/block?height=1 | jq '.result.block_id.hash' | tr -d '"'); ((CNT++)); sleep 1; done && \ \ No newline at end of file diff --git a/docker/generate_configs.sh b/docker/generate_configs.sh new file mode 100755 index 000000000..8d3f4bc29 --- /dev/null +++ b/docker/generate_configs.sh @@ -0,0 +1,19 @@ +MOUNT_FOLDER=keyring-test +NODE_1_KEY_FILE=bridge_1_key.txt +NODE_2_KEY_FILE=bridge_2_key.txt + +count=0; while [[ ! -f "$MOUNT_FOLDER/$NODE_1_KEY_FILE" && $count -lt 300 ]]; do sleep 1; ((count++)); done + +NODE_1_KEY="$(cat "$MOUNT_FOLDER/$NODE_1_KEY_FILE" | egrep -v '^$|^WARNING|^\*\*DO NOT')"; +sed "s/^celestia_rpc_auth_token = .*/celestia_rpc_auth_token = \"$NODE_1_KEY\"/g" template.toml | \ + sed "s/^path = .*/path = \"demo_data_1\"/g" \ + > config_1.toml; + +count=0; while [[ ! -f "$MOUNT_FOLDER/$NODE_2_KEY_FILE" && $count -lt 300 ]]; do sleep 1; ((count++)); done + +NODE_1_KEY="$(cat "$MOUNT_FOLDER/$NODE_2_KEY_FILE" | egrep -v '^$|^WARNING|^\*\*DO NOT')"; +sed "s/^celestia_rpc_auth_token = .*/celestia_rpc_auth_token = \"$NODE_1_KEY\"/g" template.toml | \ + sed "s/^path = .*/path = \"demo_data_2\"/g" | \ + sed "s/^celestia_rpc_address = .*/celestia_rpc_address = \"http:\/\/127.0.0.1:46658\"/g" | \ + sed "s/^bind_port = .*/bind_port = 12346/g" \ + > config_2.toml; \ No newline at end of file diff --git a/docker/keyring-test/00958f6e5478febcc9119b15f5dc492868cbdc8d.address b/docker/keyring-test/00958f6e5478febcc9119b15f5dc492868cbdc8d.address new file mode 100644 index 000000000..e7e0293e7 --- /dev/null +++ b/docker/keyring-test/00958f6e5478febcc9119b15f5dc492868cbdc8d.address @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMy0wOC0xOCAxNToxNDozOC4wNDYxNzcwMDcgKzAwMDAgVVRDIG09KzAuMDY5MjIxOTE3IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiTW5CSTAxLWp3MlMxd1U5bCJ9.yFz0NVVhsX379F1XQQC69JLmMT5JOFlRUey6_L4XzqUk6qPrdkmy6g.5jY9L2FkpGJ70RU7.Q8hsXaRmXDLw0crcz4lUX1U00Y4Mt52tzXC3_hz6GeXsNJc8zSfZTA9RtCWpbEl4I0-BqYdfS4yjAy39Q86EhxhsmQn7HX5mhJdfXwFTh69-3JNpmNEbWRenSFB1zWuwnfJM0rz-TbA72J0tKu6lsWW9YZNxpUL-Ii6_rTTe-_SVhFbwYqy88z_XnYybnaEFJhPos6wk7gHYz6zsXUYLk0V8TCb08oSstistoXz4KbR7EeePLlw.4xpT0Rk6Bc4LzZrDkq_Q1A \ No newline at end of file diff --git a/docker/keyring-test/6973fe78bba1baa7768cb366102d2fbb5c14ed2e.address b/docker/keyring-test/6973fe78bba1baa7768cb366102d2fbb5c14ed2e.address new file mode 100644 index 000000000..5f07bf4a9 --- /dev/null +++ b/docker/keyring-test/6973fe78bba1baa7768cb366102d2fbb5c14ed2e.address @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMy0wOC0xOCAxNToxNDozNy45NzA5NzQ5NjUgKzAwMDAgVVRDIG09KzAuMDY4MTc4MDg1IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiWGh6OTQ1UGhad29xU0JreSJ9.vzf7oKrTeEfCcw5gzPVL2D7aTLkzbZfBcB0W3haILTp5r2MBrV2-iQ.48VwpB_BvhYUFhs4.FwpwIIDrQb6capMXtJYtUNcFMX6jpGwILxweWaaIScQkRT7S3wkQnIg5lrYnh9Dnt_jbkQ3QgdqrdyA5YNMWGYQ2FQbpV1M7YVQJ9VEZXbws6R61SDQQMyfDrWUHkmdrtd-s1F9G4g5wThN3YOUOdgugMxpal5m-X2nUuRqMNzKN3leeYS_xuh5KPzOJ-ohdIISu6LfCg5Kab36ncJoYxfmurDID2B6Yy6NXAv5stQSlirzvWrk._EBC1TTZUhKwFCodNdWCkQ \ No newline at end of file diff --git a/docker/keyring-test/779d8e06d4441d25530fb5e76c928976ff369b4b.address b/docker/keyring-test/779d8e06d4441d25530fb5e76c928976ff369b4b.address new file mode 100644 index 000000000..fa91aec5e --- /dev/null +++ b/docker/keyring-test/779d8e06d4441d25530fb5e76c928976ff369b4b.address @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMy0wNi0wNSAwOToxMTo1My40OTQ4NDQ4MzYgKzAwMDAgVVRDIG09KzExLjAwMzk0NDI1NSIsImVuYyI6IkEyNTZHQ00iLCJwMmMiOjgxOTIsInAycyI6Im1RNi03eFpYYlI5REIzMVIifQ.r8PyyoLcrQqziDGTX3aysFEtR57UyfzmIAMAsuGDz53SgbjMepmqaQ.OqrZtJXHdYE3xp9n.5TMEqd_eyHFTWzcJDzDgcEajOfkN2nHSzJfhDHfGHvk7uu61sZRM1iaqO4-F2nIumILtX1L8j5q31AvtegfZnYCkgqkTi7i1Has-IvFMo6bA1Jj5MYofLWLwoW1A7TCmCN4IEFnoE743Z3xfDidqsXfyp30YTUpnWTxignxUyvcRGO_DFeAFYMOq6rTJMm2nGYDLbxpOO5DtsYNrx13kIDQHw-kU4JMPpPsqFGeb4DapqMCSM2Hu1Atw.2eYgHAYuNe3KMgDWPCItkA \ No newline at end of file diff --git a/docker/keyring-test/a73f3d1c9988502b80e6082cf70a0f352baec2cd.address b/docker/keyring-test/a73f3d1c9988502b80e6082cf70a0f352baec2cd.address new file mode 100644 index 000000000..567c584ab --- /dev/null +++ b/docker/keyring-test/a73f3d1c9988502b80e6082cf70a0f352baec2cd.address @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMy0wOC0xOCAxNToxNjo0NC44NzIxOTk2MzUgKzAwMDAgVVRDIG09KzAuMTAwMzM5NDYwIiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiWVp2V3E1VEp1UnlCTFVraiJ9.zPIhQ_XLX81N7xjbmnjHAhuiG9Mm6szNmJ-e_sdklGlMPCia_kyi7w.mKDtRZg7cAcqKM9A.tcHEUzMh4UvyZphj1vwnQfLVX-xCzh0NtpEzVF3qF-kduy7uPrkLAYalgoWvqRF9b8l1ku7EmVuMoPEYglWRKCauiMAoBmSvljJJ1hdA3VRkcEtauk56FiFXURgAdb7Noeq56FDJHyGH2QQfUXBaB9hCBB60sDYvb3YWONyvQR8X6vGZ119qSekP05ol9p8VwZNfNujnesv6O_ER3lhDg4ZPmyvgC7Tt2Hh-M628oITCdSCjtoo_scIGCcIVSg.XySLfGathevNdEtK1nzoeg \ No newline at end of file diff --git a/docker/keyring-test/my_celes_key.info b/docker/keyring-test/my_celes_key.info new file mode 100644 index 000000000..21e24cb7e --- /dev/null +++ b/docker/keyring-test/my_celes_key.info @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMy0wOC0xOCAxNToxNjo0NC44NzA4NzQzMDIgKzAwMDAgVVRDIG09KzAuMDk5MDE0MTI2IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoic1l2UnRGQjVPS21mOHBtWiJ9.eJ1kkpI4FjAAJKcKiJAFHrBH1TKOaIHFiPQv_ScKJFHE7xU02LaZ1w.u5eKGvbYWkgINQkg.H4oRpMcC6qi3DimQOfVt2-8gpaN6ru-6SUSMqtOls8HhopQ0M-hpcdJbzK3OAJyLZ_z5WOLVsFXQLTCf6sdjbkAXdlGn2Hps660cxyDnuy3bcoAvYJAyagxrSSZVib0sfdrYYzVVl-VS9wrCe5hvka4LBzEXsFIEoxuQhRqC228W71hniKX1NgKXL87HURjUX42pzNk7SyrvTI30ngeuIQGQ3LecDsW2OKQ2uh3a799RIz1howGmtAIQGnV_xNI2-_RqX1UsM-KUwikNPZXBnLigjtPLLntj6eBTcxyvFznSx75OtxPhRMnNqm0ARYN3Zuqt7wrwVCkrRw_PpwGK331LuTzg02zVjTsM1YMgEn196MFWXM8z25qaPFoZFUFwmhUnCm8SZDjz8nAGybNTw0ZV3TwjyE8e8V4xrM4zNsdXqQgQCcDdPW460XNET2csW4a99VSBXYME2H8Cwv9g.4tmPQ3IF0FxwvRytzqH31g \ No newline at end of file diff --git a/docker/keyring-test/node1.info b/docker/keyring-test/node1.info new file mode 100644 index 000000000..2562e47e2 --- /dev/null +++ b/docker/keyring-test/node1.info @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMy0wOC0xOCAxNToxNDozNy45Njk0MDY2MzIgKzAwMDAgVVRDIG09KzAuMDY2NjA5NzUxIiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiVk1leDU4TEhGN3dDbVJMQyJ9.Kz4IkUEWfsCYc6C27WYHt-jCbiLad1gbPUQIihV8s5Gyl_WLFeWBug.swUUp4n6eURByky1.E3zebuaNE-1M6SNPe0ZDz_zaKpIlMIfU2OtAczY7HTrOh6G4D5cJmS6679ZnhctpmRKKgShFz_3ZxeCgrADEM_qgWw8uhnEsVCQDglw1W2s5NdF8-5E0F8I2Cp22SR9jpJ2Ef3XTt4jHZihW_cX8E3Ra4oSHov4De6v9GD-ThbgTuTSmpSHzeFHI7YKyKzzyqSXuxRyh5TeHg1vcuXjce3wjlwftg6w8E0l9HRSBiIGp6sE4Zi0uKOirkRP2E1_IREIab455zNp7ME0zn1JkzsRP-Y3Wpyyezoj5ua_mGcFKIHy6r9NYNfwjX4Bhd4vXZZB21sZQny1TllS8qAaxCj2pkGFts7aA1lIwzZNAQBdbfvWCFlFygisKkoz6DxG_shpO_oY6YpvMebKQvZsBKitcllz3nsQ7gleTjCNmyZfyP1bnnrTwPLKt8q0.IeMg-RFNJ13NHZIJSMuwxw \ No newline at end of file diff --git a/docker/keyring-test/node2.info b/docker/keyring-test/node2.info new file mode 100644 index 000000000..a92b571a3 --- /dev/null +++ b/docker/keyring-test/node2.info @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMy0wOC0xOCAxNToxNDozOC4wNDQ2ODM3MTUgKzAwMDAgVVRDIG09KzAuMDY3NzI4NTg0IiwiZW5jIjoiQTI1NkdDTSIsInAyYyI6ODE5MiwicDJzIjoiS1JvakRIUW5YRmdfM19pQyJ9.dj-sZHrMz0a5H1mf8j_VDV1GRQXjOa_HTKlGnkuKrDB4SmS16DSxSw.ODdL_jac1zwFhV1G.fzzpZhyeDEJno_iupe5xOux8thDW_t_4VOKynsEMpgf6GZnQLFaykRtpoHyLv8wwBY4GBVYJKQa7JSm9BTihEMwjUcVhZEh0seRv9hhK6lKJJQ5VV5n4NvzXdgt_scou9hn99z34wZRlEGT0bM-vK577rhX-orr6sGZpFTB-uy2KBSIN60zDOJHBzSnMRj9Euyh_5cVzAqt-PL_OBSNlThZwypTOHqB_4tKcgYTRTVBgYc0XjeAGqmEeqQ6I4dhqyewGjKcsFxaQe7kz1UgQyK5t-b69JmY49wRKB_xYL7Ld8A5fRFRVqE_wKwZ9xwabLzqKaxobm8LZGum7VVdMoIZyJ7DnNVLVrNbwfS5Pt0Fy7d_rKEHH0pbjCz1TeY-BGP1OW1JwIYHh2iR8qr4kQTn0RYnoQSrM_sxNLZG6P8UnTK3LBZKwu0nD6ZY.ZRNFW5_65Qv8L-_dGHllmA \ No newline at end of file diff --git a/docker/keyring-test/validator.info b/docker/keyring-test/validator.info new file mode 100644 index 000000000..8490e5dbc --- /dev/null +++ b/docker/keyring-test/validator.info @@ -0,0 +1 @@ +eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJjcmVhdGVkIjoiMjAyMy0wNi0wNSAwOToxMTo1My40NTIyNjg3NTIgKzAwMDAgVVRDIG09KzEwLjk2MTM2ODUwNSIsImVuYyI6IkEyNTZHQ00iLCJwMmMiOjgxOTIsInAycyI6InpjZDlSZ1NvNHFWZWNoa0MifQ.qGafV2C7i9Zqa3TW_ti874R4FjevWRxqCspsRcOPyM3ZrUwOuSwX2w.6vFFL7LFJgP2LJIC.K9rAuw1qq9wU91AipIb_11LZr7RpQX2s3JJspKMC3hi5ou9aZqCdY2z9AOcSiA7Ynnc-bnoG1dBuysUILGZXas9OGiq7KUgAEEAGENeA3jF3kIQ7VCmrL-OgmsgBj17WT2f9Fr4WskX_5HoQQxxuCmHk70eGdBiZSFYAnII5XMG9skeGyn5I4cwV1NCoU3ZRN5EufuLFGsS4wSk2SgHp2nnegwG-EIDo1W5Ci-GFE2hoNW2vxGVyoc9OCPQllU1f_2WUe7MhW74TfHqqbE_xvvoIdl1vWZYEONmku46O2iu00GuWWLleJGy9XaBrvkOZ-gqB2fXWZAX7fqXwMiKiFidjI5RINeUGBaQjyi0LoGDHQsGXsIZ0xRF1G1Z-NrqwcSTZX63naRMpLq13K0NHA9DRYeFrP_Vi2qO47fNU9Vv-Cmpsr8FmET7FQEN8cq6XG07ZbtNYLlA.m4DGni1Ma4vlfuwXUvEnUw \ No newline at end of file diff --git a/docker/template.toml b/docker/template.toml new file mode 100644 index 000000000..57280b6a0 --- /dev/null +++ b/docker/template.toml @@ -0,0 +1,15 @@ +[da] +celestia_rpc_auth_token = "" +celestia_rpc_address = "http://127.0.0.1:26658" +max_celestia_response_body_size = 104_857_600 +celestia_rpc_timeout_seconds = 60 + +[storage] +path = "demo_data" + +[runner] +start_height = 1 + +[runner.rpc_config] +bind_host = "127.0.0.1" +bind_port = 12345 diff --git a/examples/const-rollup-config/src/lib.rs b/examples/const-rollup-config/src/lib.rs index 9644224c8..88febd471 100644 --- a/examples/const-rollup-config/src/lib.rs +++ b/examples/const-rollup-config/src/lib.rs @@ -1,3 +1,10 @@ -// The rollup stores its data in the namespace b"sov-test" on Celestia. +/// The namespace used by the rollup to store its data. This is a raw slice of 8 bytes. +/// The rollup stores its data in the namespace b"sov-test" on Celestia. Which in this case is encoded using the +/// ascii representation of each character. pub const ROLLUP_NAMESPACE_RAW: [u8; 8] = [115, 111, 118, 45, 116, 101, 115, 116]; -pub const SEQUENCER_DA_ADDRESS: [u8; 47] = *b"celestia1w7wcupk5gswj25c0khnkey5fwmlndx6t5aarmk"; + +/// The DA address of the sequencer (for now we use a centralized sequencer) in the tests. +/// Here this is the address of the sequencer on the celestia blockchain. +pub const SEQUENCER_DA_ADDRESS: &str = "celestia1w7wcupk5gswj25c0khnkey5fwmlndx6t5aarmk"; + +pub const SEQUENCER_AVAIL_DA_ADDRESS: &str = "b4dc7fc57630d2a7be7f358cbefc1e52bd6d0f250d19647cf264ecf2d8764d7b"; diff --git a/examples/demo-nft-module/Cargo.toml b/examples/demo-nft-module/Cargo.toml index 077ed8298..251b8a697 100644 --- a/examples/demo-nft-module/Cargo.toml +++ b/examples/demo-nft-module/Cargo.toml @@ -14,15 +14,20 @@ anyhow = { workspace = true } borsh = { workspace = true, features = ["rc"] } serde = { workspace = true, optional = true } -sov-modules-api = { path = "../../module-system/sov-modules-api", default-features = false, features = ["macros"] } -sov-state = { path = "../../module-system/sov-state", default-features = false } +sov-modules-api = { path = "../../module-system/sov-modules-api" } +sov-state = { path = "../../module-system/sov-state" } + +jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], optional = true } [dev-dependencies] sov-rollup-interface = { path = "../../rollup-interface" } +sov-data-generators = { path = "../../module-system/utils/sov-data-generators" } tempfile = { workspace = true } +demo-nft-module = { version = "*", features = ["native"], path = "." } [features] -default = ["native"] +default = [] serde = ["dep:serde"] -native = ["serde", "sov-state/native", "sov-modules-api/native"] +native = ["serde", "sov-state/native", "sov-modules-api/native", "jsonrpsee"] +test = ["native"] diff --git a/examples/demo-nft-module/README.md b/examples/demo-nft-module/README.md index 32503522f..d9dc312fc 100644 --- a/examples/demo-nft-module/README.md +++ b/examples/demo-nft-module/README.md @@ -24,7 +24,7 @@ simplicity, each token represents only an ID and won't hold any metadata. The Sovereign SDK provides a [module-template](../../module-system/module-implementations/module-template/README.md), which is boilerplate that can be customized to easily build modules. -``` +```ignore ├── Cargo.toml ├── README.md @@ -38,7 +38,7 @@ which is boilerplate that can be customized to easily build modules. Here are defining basic dependencies in `Cargo.toml` that module needs to get started: -```toml +```toml, ignore [dependencies] anyhow = { anyhow = "1.0.62" } sov-modules-api = { git = "https://github.com/Sovereign-Labs/sovereign-sdk.git", branch = "stable", features = ["macros"] } @@ -54,22 +54,20 @@ has private state, which it updates in response to input messages. NFT module is defined as the following: ```rust -use sov_modules_api::{Context, ModuleInfo}; - -#[derive(ModuleInfo, Clone)] -pub struct NonFungibleToken { +#[derive(sov_modules_api::ModuleInfo, Clone)] +pub struct NonFungibleToken { #[address] - pub address: C::Address, + address: C::Address, #[state] - pub(crate) admin: sov_state::StateValue, + admin: sov_state::StateValue, #[state] - pub(crate) owners: sov_state::StateMap, + owners: sov_state::StateMap, // If the module needs to refer to another module // #[module] - // pub(crate) bank: sov_bank::Bank, + // bank: sov_bank::Bank, } ``` @@ -112,7 +110,7 @@ Before we start implementing the `Module` trait, there are several preparatory s 1. Define `native` feature in `Cargo.toml` and add additional dependencies: - ```toml + ```toml, ignore [dependencies] anyhow = "1.0.62" borsh = { version = "0.10.3", features = ["bytes"] } @@ -137,11 +135,9 @@ Before we start implementing the `Module` trait, there are several preparatory s ```rust // in call.rs - use sov_modules_api::Context; - #[cfg_attr(feature = "native", derive(serde::Serialize), derive(serde::Deserialize))] #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] - pub enum CallMessage { + pub enum CallMessage { Mint { /// The id of new token. Caller is an owner id: u64 @@ -168,7 +164,7 @@ Before we start implementing the `Module` trait, there are several preparatory s ```rust // in lib.rs - pub struct NonFungibleTokenConfig { + pub struct NonFungibleTokenConfig { pub admin: C::Address, pub owners: Vec<(u64, C::Address)>, } @@ -178,19 +174,17 @@ Before we start implementing the `Module` trait, there are several preparatory s Plugging together all types and features, we get this `Module` trait implementation in `lib.rs`: -```rust -impl Module for NonFungibleToken { +```rust, ignore +impl Module for NonFungibleToken { type Context = C; - type Config = NonFungibleTokenConfig; - - type CallMessage = call::CallMessage; + type CallMessage = CallMessage; fn genesis( &self, _config: &Self::Config, _working_set: &mut WorkingSet, - ) -> Result<(), Error> { + ) -> anyhow::Result<(), Error> { Ok(()) } @@ -199,8 +193,8 @@ impl Module for NonFungibleToken { _msg: Self::CallMessage, _context: &Self::Context, _working_set: &mut WorkingSet, - ) -> Result { - Ok(CallResponse::default()) + ) -> anyhow::Result { + Ok(sov_modules_api::CallResponse::default()) } } ``` @@ -214,11 +208,15 @@ which takes a config argument specifying the initial state to configure. Since it modifies state, `genesis` also takes a working set as an argument. `Genesis` is called only once, during the rollup deployment. -```rust +```rust, ignore +use sov_state::WorkingSet; // in lib.rs -impl Module for NonFungibleToken { - // ... +impl sov_modules_api::Module for NonFungibleToken { + type Context = C; + type Config = NonFungibleTokenConfig; + type CallMessage = CallMessage; + fn genesis( &self, config: &Self::Config, @@ -229,16 +227,16 @@ impl Module for NonFungibleToken { } // in genesis.rs -impl NonFungibleToken { +impl NonFungibleToken { pub(crate) fn init_module( &self, config: &::Config, working_set: &mut WorkingSet, - ) -> Result<()> { + ) -> anyhow::Result<()> { self.admin.set(&config.admin, working_set); for (id, owner) in config.owners.iter() { if self.owners.get(id, working_set).is_some() { - bail!("Token id {} already exists", id); + anyhow::bail!("Token id {} already exists", id); } self.owners.set(id, owner, working_set); } @@ -252,15 +250,16 @@ impl NonFungibleToken { First, we need to implement actual logic of handling different cases. Let's add `mint`, `transfer` and `burn` methods: -```rust +```rust, ignore +use sov_state::WorkingSet; -impl NonFungibleToken { +impl NonFungibleToken { pub(crate) fn mint( &self, id: u64, context: &C, working_set: &mut WorkingSet, - ) -> Result { + ) -> anyhow::Result { if self.owners.get(&id, working_set).is_some() { bail!("Token with id {} already exists", id); } @@ -268,7 +267,7 @@ impl NonFungibleToken { self.owners.set(&id, context.sender(), working_set); working_set.add_event("NFT mint", &format!("A token with id {id} was minted")); - Ok(CallResponse::default()) + Ok(sov_modules_api::CallResponse::default()) } pub(crate) fn transfer( @@ -277,22 +276,22 @@ impl NonFungibleToken { to: C::Address, context: &C, working_set: &mut WorkingSet, - ) -> Result { + ) -> anyhow::Result { let token_owner = match self.owners.get(&id, working_set) { None => { - bail!("Token with id {} does not exist", id); + anyhow::bail!("Token with id {} does not exist", id); } Some(owner) => owner, }; if &token_owner != context.sender() { - bail!("Only token owner can transfer token"); + anyhow::bail!("Only token owner can transfer token"); } self.owners.set(&id, &to, working_set); working_set.add_event( "NFT transfer", &format!("A token with id {id} was transferred"), ); - Ok(CallResponse::default()) + Ok(sov_modules_api::CallResponse::default()) } pub(crate) fn burn( @@ -300,40 +299,41 @@ impl NonFungibleToken { id: u64, context: &C, working_set: &mut WorkingSet, - ) -> Result { + ) -> anyhow::Result { let token_owner = match self.owners.get(&id, working_set) { None => { - bail!("Token with id {} does not exist", id); + anyhow::bail!("Token with id {} does not exist", id); } Some(owner) => owner, }; if &token_owner != context.sender() { - bail!("Only token owner can burn token"); + anyhow::bail!("Only token owner can burn token"); } self.owners.remove(&id, working_set); working_set.add_event("NFT burn", &format!("A token with id {id} was burned")); - Ok(CallResponse::default()) + Ok(sov_modules_api::CallResponse::default()) } } ``` And then make them accessible to users via the `call` function: -```rust -impl Module for NonFungibleToken { - // ... +```rust, ignore +impl sov_modules_api::Module for NonFungibleToken { + type Context = C; + type Config = NonFungibleTokenConfig; fn call( &self, msg: Self::CallMessage, context: &Self::Context, working_set: &mut WorkingSet, - ) -> Result { + ) -> Result { let call_result = match msg { - call::CallMessage::Mint { id } => self.mint(id, context, working_set), - call::CallMessage::Transfer { to, id } => self.transfer(id, to, context, working_set), - call::CallMessage::Burn { id } => self.burn(id, context, working_set), + CallMessage::Mint { id } => self.mint(id, context, working_set), + CallMessage::Transfer { to, id } => self.transfer(id, to, context, working_set), + CallMessage::Burn { id } => self.burn(id, context, working_set), }; Ok(call_result?) } @@ -345,16 +345,31 @@ impl Module for NonFungibleToken { We also want other modules to be able to query the owner of a token, so we add a public method for that. This method is only available to other modules: it is not currently exposed via RPC. -```rust -impl NonFungibleToken { +```rust, ignore +use jsonrpsee::core::RpcResult; +use sov_modules_api::macros::rpc_gen; +use sov_modules_api::Context; +use sov_state::WorkingSet; + + +#[derive(Clone, Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize)] +/// Response for `getOwner` method +pub struct OwnerResponse { + /// Optional owner address + pub owner: Option, +} + +#[rpc_gen(client, server, namespace = "nft")] +impl NonFungibleToken { + #[rpc_method(name = "getOwner")] pub fn get_owner( &self, token_id: u64, working_set: &mut WorkingSet, - ) -> OwnerResponse { - OwnerResponse { + ) -> RpcResult> { + Ok(OwnerResponse { owner: self.owners.get(&token_id, working_set), - } + }) } } ``` @@ -366,7 +381,7 @@ that all public APIs function as intended. Temporary storage is needed for testing, so we enable the `temp` feature of `sov-state` as a `dev-dependency` -```toml +```toml, ignore [dev-dependencies] sov-state = { git = "https://github.com/Sovereign-Labs/sovereign-sdk.git", branch = "stable", features = ["temp"] } ``` @@ -374,12 +389,10 @@ sov-state = { git = "https://github.com/Sovereign-Labs/sovereign-sdk.git", branc Here is some boilerplate for NFT module integration tests: ```rust -use demo_nft_module::CallMessage; -use demo_nft_module::OwnerResponse; -use demo_nft_module::{NonFungibleToken, NonFungibleTokenConfig}; -use serde::de::DeserializeOwned; +use demo_nft_module::{CallMessage, NonFungibleToken, NonFungibleTokenConfig, OwnerResponse}; use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::{Address, Context, Hasher, Module, ModuleInfo, Spec, test_utils::generate_address}; +use sov_modules_api::{Address, Context, Module}; +use sov_rollup_interface::stf::Event; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; pub type C = DefaultContext; @@ -427,7 +440,7 @@ fn transfer() { let transfer_attempt = nft.call(transfer_message.clone(), &admin_context, &mut working_set); assert!(transfer_attempt.is_err()); - /// ... rest of the tests + // ... rest of the tests } ``` @@ -435,10 +448,12 @@ fn transfer() { Now this module can be added to rollup's `Runtime`: -```rust +```rust, ignore +use sov_modules_api::{DispatchCall, Genesis, MessageCodec}; + #[derive(Genesis, DispatchCall, MessageCodec)] #[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] -pub struct Runtime { +pub struct Runtime { #[allow(unused)] sequencer: sov_sequencer_registry::Sequencer, @@ -446,25 +461,6 @@ pub struct Runtime { bank: sov_bank::Bank, #[allow(unused)] - nft: nft::NonFungibleToken, + nft: demo_nft_module::NonFungibleToken, } -``` - -And then this `Runtime` can be used in the State Transition Function runner to execute transactions. -Here's an example of how to do it with `AppTemplate` from `sov-default-stf`: - -```rust - fn new(runtime_config: Self::RuntimeConfig) -> Self { - let runtime = Runtime::new(); - let storage = ZkStorage::with_config(runtime_config).expect("Failed to open zk storage"); - let app: AppTemplate< - ZkDefaultContext, - Runtime, - Vm, - > = AppTemplate::new(storage, runtime); - Self(app) -} -``` - -The `AppTemplate` uses `runtime` to dispatch calls during execution of the `apply_batch` method. -Detailed instructions on how to set up a rollup can be found in the [`demo-rollup` documentation](../demo-rollup/README.md). +``` \ No newline at end of file diff --git a/examples/demo-nft-module/src/call.rs b/examples/demo-nft-module/src/call.rs index 7e9df6378..bb8fb8e81 100644 --- a/examples/demo-nft-module/src/call.rs +++ b/examples/demo-nft-module/src/call.rs @@ -10,18 +10,23 @@ use crate::NonFungibleToken; derive(serde::Deserialize) )] #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] +/// A transaction handled by the NFT module. Mints, Transfers, or Burns an NFT by id pub enum CallMessage { + /// Mint a new token Mint { /// The id of new token. Caller is an owner id: u64, }, + /// Transfer existing token to the new owner Transfer { /// The address to which the token will be transferred. to: C::Address, /// The token id to transfer id: u64, }, + /// Burn existing token Burn { + /// The token id to burn id: u64, }, } diff --git a/examples/demo-nft-module/src/lib.rs b/examples/demo-nft-module/src/lib.rs index 9b1f680ca..69ab3c5d4 100644 --- a/examples/demo-nft-module/src/lib.rs +++ b/examples/demo-nft-module/src/lib.rs @@ -1,24 +1,39 @@ -pub mod call; -pub mod genesis; -#[cfg(feature = "native")] -pub mod query; +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] +mod call; +pub use call::CallMessage; +mod genesis; +#[cfg(feature = "native")] +mod query; +#[cfg(feature = "native")] +pub use query::{NonFungibleTokenRpcImpl, NonFungibleTokenRpcServer, OwnerResponse}; use sov_modules_api::{CallResponse, Context, Error, Module, ModuleInfo}; use sov_state::WorkingSet; #[derive(ModuleInfo, Clone)] +/// Module for non-fungible tokens (NFT). +/// Each token is represented by a unique ID. pub struct NonFungibleToken { #[address] - pub address: C::Address, + /// The address of the NonFungibleToken module. + address: C::Address, #[state] - pub(crate) admin: sov_state::StateValue, + /// Admin of the NonFungibleToken module. + admin: sov_state::StateValue, #[state] - pub(crate) owners: sov_state::StateMap, + /// Mapping of tokens to their owners + owners: sov_state::StateMap, } + +/// Config for the NonFungibleToken module. +/// Sets admin and existing owners. pub struct NonFungibleTokenConfig { + /// Admin of the NonFungibleToken module. pub admin: C::Address, + /// Existing owners of the NonFungibleToken module. pub owners: Vec<(u64, C::Address)>, } @@ -27,7 +42,7 @@ impl Module for NonFungibleToken { type Config = NonFungibleTokenConfig; - type CallMessage = call::CallMessage; + type CallMessage = CallMessage; fn genesis( &self, @@ -44,9 +59,9 @@ impl Module for NonFungibleToken { working_set: &mut WorkingSet, ) -> Result { let call_result = match msg { - call::CallMessage::Mint { id } => self.mint(id, context, working_set), - call::CallMessage::Transfer { to, id } => self.transfer(id, to, context, working_set), - call::CallMessage::Burn { id } => self.burn(id, context, working_set), + CallMessage::Mint { id } => self.mint(id, context, working_set), + CallMessage::Transfer { to, id } => self.transfer(id, to, context, working_set), + CallMessage::Burn { id } => self.burn(id, context, working_set), }; Ok(call_result?) } diff --git a/examples/demo-nft-module/src/query.rs b/examples/demo-nft-module/src/query.rs index 3f002dfc2..aaeb7806f 100644 --- a/examples/demo-nft-module/src/query.rs +++ b/examples/demo-nft-module/src/query.rs @@ -1,21 +1,28 @@ +use jsonrpsee::core::RpcResult; +use sov_modules_api::macros::rpc_gen; use sov_modules_api::Context; use sov_state::WorkingSet; use crate::NonFungibleToken; -#[derive(Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize)] +#[derive(Clone, Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize)] +/// Response for `getOwner` method pub struct OwnerResponse { + /// Optional owner address pub owner: Option, } +#[rpc_gen(client, server, namespace = "nft")] impl NonFungibleToken { + #[rpc_method(name = "getOwner")] + /// Get the owner of a token pub fn get_owner( &self, token_id: u64, working_set: &mut WorkingSet, - ) -> OwnerResponse { - OwnerResponse { + ) -> RpcResult> { + Ok(OwnerResponse { owner: self.owners.get(&token_id, working_set), - } + }) } } diff --git a/examples/demo-nft-module/tests/nft_test.rs b/examples/demo-nft-module/tests/nft_test.rs index d904e23f5..41e0ee68e 100644 --- a/examples/demo-nft-module/tests/nft_test.rs +++ b/examples/demo-nft-module/tests/nft_test.rs @@ -1,8 +1,6 @@ -use demo_nft_module::call::CallMessage; -use demo_nft_module::query::OwnerResponse; -use demo_nft_module::{NonFungibleToken, NonFungibleTokenConfig}; +use demo_nft_module::{CallMessage, NonFungibleToken, NonFungibleTokenConfig, OwnerResponse}; use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::test_utils::generate_address as gen_addr_generic; +use sov_modules_api::utils::generate_address as gen_addr_generic; use sov_modules_api::{Address, Context, Module}; use sov_rollup_interface::stf::Event; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; @@ -21,7 +19,7 @@ fn genesis_and_mint() { let owner2 = generate_address("owner2"); let config: NonFungibleTokenConfig = NonFungibleTokenConfig { admin, - owners: vec![(0, owner1.clone())], + owners: vec![(0, owner1)], }; let tmpdir = tempfile::tempdir().unwrap(); @@ -32,15 +30,15 @@ fn genesis_and_mint() { let genesis_result = nft.genesis(&config, &mut working_set); assert!(genesis_result.is_ok()); - let query1: OwnerResponse = nft.get_owner(0, &mut working_set); + let query1: OwnerResponse = nft.get_owner(0, &mut working_set).unwrap(); assert_eq!(query1.owner, Some(owner1)); - let query2: OwnerResponse = nft.get_owner(1, &mut working_set); + let query2: OwnerResponse = nft.get_owner(1, &mut working_set).unwrap(); assert!(query2.owner.is_none()); // Mint, anybody can mint let mint_message = CallMessage::Mint { id: 1 }; - let owner2_context = C::new(owner2.clone()); + let owner2_context = C::new(owner2); nft.call(mint_message.clone(), &owner2_context, &mut working_set) .expect("Minting failed"); @@ -48,7 +46,7 @@ fn genesis_and_mint() { working_set.events()[0], Event::new("NFT mint", "A token with id 1 was minted") ); - let query3: OwnerResponse = nft.get_owner(1, &mut working_set); + let query3: OwnerResponse = nft.get_owner(1, &mut working_set).unwrap(); assert_eq!(query3.owner, Some(owner2)); // Try to mint again same token, should fail @@ -63,23 +61,20 @@ fn genesis_and_mint() { fn transfer() { // Preparation let admin = generate_address("admin"); - let admin_context = C::new(admin.clone()); + let admin_context = C::new(admin); let owner1 = generate_address("owner2"); - let owner1_context = C::new(owner1.clone()); + let owner1_context = C::new(owner1); let owner2 = generate_address("owner2"); let config: NonFungibleTokenConfig = NonFungibleTokenConfig { - admin: admin.clone(), - owners: vec![(0, admin.clone()), (1, owner1.clone()), (2, owner2.clone())], + admin, + owners: vec![(0, admin), (1, owner1), (2, owner2)], }; let tmpdir = tempfile::tempdir().unwrap(); let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); let nft = NonFungibleToken::default(); nft.genesis(&config, &mut working_set).unwrap(); - let transfer_message = CallMessage::Transfer { - id: 1, - to: owner2.clone(), - }; + let transfer_message = CallMessage::Transfer { id: 1, to: owner2 }; // admin cannot transfer token of the owner1 let transfer_attempt = nft.call(transfer_message.clone(), &admin_context, &mut working_set); @@ -90,7 +85,7 @@ fn transfer() { let query_token_owner = |token_id: u64, working_set: &mut WorkingSet| -> Option
{ - let query: OwnerResponse = nft.get_owner(token_id, working_set); + let query: OwnerResponse = nft.get_owner(token_id, working_set).unwrap(); query.owner }; @@ -122,9 +117,9 @@ fn transfer() { fn burn() { // Preparation let admin = generate_address("admin"); - let admin_context = C::new(admin.clone()); + let admin_context = C::new(admin); let owner1 = generate_address("owner2"); - let owner1_context = C::new(owner1.clone()); + let owner1_context = C::new(owner1); let config: NonFungibleTokenConfig = NonFungibleTokenConfig { admin, owners: vec![(0, owner1)], @@ -153,7 +148,7 @@ fn burn() { working_set.events()[0], Event::new("NFT burn", "A token with id 0 was burned") ); - let query: OwnerResponse = nft.get_owner(0, &mut working_set); + let query: OwnerResponse = nft.get_owner(0, &mut working_set).unwrap(); assert!(query.owner.is_none()); diff --git a/examples/demo-prover-avail/Cargo.lock b/examples/demo-prover-avail/Cargo.lock index a0a2f4c36..792283895 100644 --- a/examples/demo-prover-avail/Cargo.lock +++ b/examples/demo-prover-avail/Cargo.lock @@ -32,15 +32,6 @@ dependencies = [ "gimli 0.26.2", ] -[[package]] -name = "addr2line" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" -dependencies = [ - "gimli 0.27.3", -] - [[package]] name = "addr2line" version = "0.20.0" @@ -173,9 +164,9 @@ dependencies = [ [[package]] name = "anstyle-wincon" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c" dependencies = [ "anstyle", "windows-sys 0.48.0", @@ -187,6 +178,12 @@ version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +[[package]] +name = "arc-swap" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" + [[package]] name = "array-bytes" version = "4.2.0" @@ -274,7 +271,7 @@ dependencies = [ "schnorrkel", "serde", "serde-hex", - "sp-core 16.0.0", + "sp-core", "structopt", "subxt", "tokio", @@ -319,6 +316,16 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bcs" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd3ffe8b19a604421a5d461d4a70346223e535903fbc3067138bddbebddcf77" +dependencies = [ + "serde", + "thiserror", +] + [[package]] name = "bech32" version = "0.9.1" @@ -349,7 +356,7 @@ version = "0.65.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5" dependencies = [ - "bitflags", + "bitflags 1.3.2", "cexpr", "clang-sys", "lazy_static", @@ -370,6 +377,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" + [[package]] name = "bitvec" version = "1.0.1" @@ -510,12 +523,6 @@ dependencies = [ "serde", ] -[[package]] -name = "bs58" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" - [[package]] name = "bstr" version = "1.5.0" @@ -642,6 +649,33 @@ dependencies = [ "libc", ] +[[package]] +name = "celestia" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "base64 0.21.2", + "bech32", + "borsh", + "hex", + "hex-literal", + "jsonrpsee 0.18.2", + "nmt-rs", + "prost", + "prost-build", + "prost-types", + "serde", + "serde_json", + "sha2 0.10.6", + "sov-rollup-interface", + "tendermint", + "tendermint-proto", + "thiserror", + "tokio", + "tracing", +] + [[package]] name = "cexpr" version = "0.6.0" @@ -665,7 +699,10 @@ checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" dependencies = [ "android-tzdata", "iana-time-zone", + "js-sys", "num-traits", + "time 0.1.45", + "wasm-bindgen", "winapi", ] @@ -698,7 +735,7 @@ checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ "ansi_term", "atty", - "bitflags", + "bitflags 1.3.2", "strsim 0.8.0", "textwrap", "unicode-width", @@ -707,9 +744,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.3.0" +version = "4.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93aae7a4192245f70fe75dd9157fc7b4a5bf53e88d30bd4396f7d8f9284d5acc" +checksum = "5fd304a20bff958a57f04c4e96a2e7594cc4490a0e809cbd48bb6437edaa452d" dependencies = [ "clap_builder", "clap_derive", @@ -718,22 +755,21 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.3.0" +version = "4.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f423e341edefb78c9caba2d9c7f7687d0e72e89df3ce3394554754393ac3990" +checksum = "01c6a3f08f1fe5662a35cfe393aec09c4df95f60ee93b7556505260f75eee9e1" dependencies = [ "anstream", "anstyle", - "bitflags", "clap_lex", "strsim 0.10.0", ] [[package]] name = "clap_derive" -version = "4.3.0" +version = "4.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "191d9573962933b4027f932c600cd252ce27a8ad5979418fe78e43c07996f27b" +checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" dependencies = [ "heck 0.4.1", "proc-macro2", @@ -824,15 +860,6 @@ dependencies = [ "serde", ] -[[package]] -name = "cranelift-entity" -version = "0.95.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40099d38061b37e505e63f89bab52199037a72b931ad4868d9089ff7268660b0" -dependencies = [ - "serde", -] - [[package]] name = "crc32fast" version = "1.3.2" @@ -931,6 +958,27 @@ dependencies = [ "subtle", ] +[[package]] +name = "csv" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626ae34994d3d8d668f4269922248239db4ae42d538b14c398b74a52208e8086" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" +dependencies = [ + "memchr", +] + [[package]] name = "curve25519-dalek" version = "2.1.3" @@ -957,6 +1005,34 @@ dependencies = [ "zeroize", ] +[[package]] +name = "curve25519-dalek" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f711ade317dd348950a9910f81c5947e3d8907ebd2b83f76203ff1807e6a2bc2" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "platforms", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + [[package]] name = "curve25519-dalek-ng" version = "4.1.1" @@ -1011,7 +1087,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap 4.3.0", + "clap 4.3.19", "const-rollup-config", "hex", "jsonrpsee 0.18.2", @@ -1019,7 +1095,9 @@ dependencies = [ "serde_json", "sov-accounts", "sov-bank", - "sov-election", + "sov-blob-storage", + "sov-chain-state", + "sov-cli", "sov-modules-api", "sov-modules-stf-template", "sov-rollup-interface", @@ -1067,6 +1145,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "destructure_traitobject" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c877555693c14d2f84191cfd3ad8582790fc52b5e2274b40b59cf5f5cea25c7" + [[package]] name = "digest" version = "0.8.1" @@ -1105,6 +1189,16 @@ dependencies = [ "dirs-sys", ] +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + [[package]] name = "dirs-sys" version = "0.4.1" @@ -1117,6 +1211,17 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + [[package]] name = "downcast-rs" version = "1.2.0" @@ -1198,6 +1303,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fb04eee5d9d907f29e80ee6b0e78f7e2c82342c63e3580d8c4f69d9d5aad963" dependencies = [ "pkcs8", + "serde", "signature 2.1.0", ] @@ -1222,12 +1328,24 @@ checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ "curve25519-dalek 3.2.0", "ed25519 1.5.3", - "rand 0.7.3", - "serde", "sha2 0.9.9", "zeroize", ] +[[package]] +name = "ed25519-dalek" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" +dependencies = [ + "curve25519-dalek 4.0.0", + "ed25519 2.2.1", + "rand_core 0.6.4", + "serde", + "sha2 0.10.6", + "zeroize", +] + [[package]] name = "ed25519-zebra" version = "3.1.0" @@ -1254,6 +1372,12 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2b183d6ce6ca4cf30e3db37abf5b52568b5f9015c97d9fbdd7026aa5dcdd758" +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + [[package]] name = "encoding_rs" version = "0.8.32" @@ -1263,6 +1387,19 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "env_logger" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + [[package]] name = "environmental" version = "1.1.4" @@ -1329,12 +1466,9 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" [[package]] name = "fastrand" -version = "1.9.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" [[package]] name = "ff" @@ -1365,6 +1499,12 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "fiat-crypto" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e825f6987101665dea6ec934c09ec6d721de7bc1bf92248e1d5810c8cd636b77" + [[package]] name = "fixed-hash" version = "0.8.0" @@ -1609,11 +1749,6 @@ name = "gimli" version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" -dependencies = [ - "fallible-iterator", - "indexmap", - "stable_deref_trait", -] [[package]] name = "glob" @@ -1704,12 +1839,6 @@ version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" -[[package]] -name = "hash-db" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e7d7786361d7425ae2fe4f9e407eb0efaa0840f5212d109cc018c40c35c6ab4" - [[package]] name = "hash256-std-hasher" version = "0.15.2" @@ -1865,6 +1994,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + [[package]] name = "hyper" version = "0.14.26" @@ -1902,7 +2037,7 @@ dependencies = [ "rustls-native-certs", "tokio", "tokio-rustls 0.23.4", - "webpki-roots", + "webpki-roots 0.22.6", ] [[package]] @@ -2115,12 +2250,13 @@ checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "jmt" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1a302f0defd323b833c9848c20ab40c3156128f50d7bf8eebeed2ef58167258" +checksum = "9e49c5d2c13e15f77f22cee3df3dc822b46051b217112035d72687cb57a9cbde" dependencies = [ "anyhow", "borsh", + "digest 0.10.7", "hashbrown 0.13.2", "hex", "ics23", @@ -2158,14 +2294,14 @@ version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d291e3a5818a2384645fd9756362e6d89cf0541b0b916fa7702ea4a9833608e" dependencies = [ - "jsonrpsee-client-transport", + "jsonrpsee-client-transport 0.16.2", "jsonrpsee-core 0.16.2", "jsonrpsee-http-client 0.16.2", "jsonrpsee-proc-macros 0.16.2", "jsonrpsee-server 0.16.2", "jsonrpsee-types 0.16.2", - "jsonrpsee-wasm-client", - "jsonrpsee-ws-client", + "jsonrpsee-wasm-client 0.16.2", + "jsonrpsee-ws-client 0.16.2", "tracing", ] @@ -2175,11 +2311,14 @@ version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1822d18e4384a5e79d94dc9e4d1239cfa9fad24e55b44d2efeff5b394c9fece4" dependencies = [ + "jsonrpsee-client-transport 0.18.2", "jsonrpsee-core 0.18.2", "jsonrpsee-http-client 0.18.2", "jsonrpsee-proc-macros 0.18.2", "jsonrpsee-server 0.18.2", "jsonrpsee-types 0.18.2", + "jsonrpsee-wasm-client 0.18.2", + "jsonrpsee-ws-client 0.18.2", "tracing", ] @@ -2205,7 +2344,29 @@ dependencies = [ "tokio-rustls 0.23.4", "tokio-util", "tracing", - "webpki-roots", + "webpki-roots 0.22.6", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11aa5766d5c430b89cb26a99b88f3245eb91534be8126102cea9e45ee3891b22" +dependencies = [ + "futures-channel", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core 0.18.2", + "pin-project", + "rustls-native-certs", + "soketto", + "thiserror", + "tokio", + "tokio-rustls 0.24.1", + "tokio-util", + "tracing", + "webpki-roots 0.23.1", ] [[package]] @@ -2225,7 +2386,7 @@ dependencies = [ "globset", "hyper", "jsonrpsee-types 0.16.2", - "parking_lot", + "parking_lot 0.12.1", "rand 0.8.5", "rustc-hash", "serde", @@ -2244,13 +2405,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64c6832a55f662b5a6ecc844db24b8b9c387453f923de863062c60ce33d62b81" dependencies = [ "anyhow", + "async-lock", "async-trait", "beef", + "futures-timer", "futures-util", "globset", "hyper", "jsonrpsee-types 0.18.2", - "parking_lot", + "parking_lot 0.12.1", "rand 0.8.5", "rustc-hash", "serde", @@ -2258,7 +2421,9 @@ dependencies = [ "soketto", "thiserror", "tokio", + "tokio-stream", "tracing", + "wasm-bindgen-futures", ] [[package]] @@ -2401,11 +2566,22 @@ version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a77310456f43c6c89bcba1f6b2fc2a28300da7c341f320f5128f8c83cc63232d" dependencies = [ - "jsonrpsee-client-transport", + "jsonrpsee-client-transport 0.16.2", "jsonrpsee-core 0.16.2", "jsonrpsee-types 0.16.2", ] +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34e6ea7c6d862e60f8baebd946c037b70c6808a4e4e31e792a4029184e3ce13a" +dependencies = [ + "jsonrpsee-client-transport 0.18.2", + "jsonrpsee-core 0.18.2", + "jsonrpsee-types 0.18.2", +] + [[package]] name = "jsonrpsee-ws-client" version = "0.16.2" @@ -2413,36 +2589,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b83daeecfc6517cfe210df24e570fb06213533dfb990318fae781f4c7119dd9" dependencies = [ "http", - "jsonrpsee-client-transport", + "jsonrpsee-client-transport 0.16.2", "jsonrpsee-core 0.16.2", "jsonrpsee-types 0.16.2", ] [[package]] -name = "jupiter" -version = "0.1.0" +name = "jsonrpsee-ws-client" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a64b2589680ba1ad7863f279cd2d5083c1dc0a7c0ea959d22924553050f8ab9f" dependencies = [ - "anyhow", - "async-trait", - "base64 0.21.2", - "bech32", - "borsh", - "hex", - "hex-literal", - "jsonrpsee 0.16.2", - "nmt-rs", - "prost", - "prost-build", - "prost-types", - "serde", - "serde_json", - "sha2 0.10.6", - "sov-rollup-interface", - "tendermint", - "tendermint-proto", - "thiserror", - "tokio", - "tracing", + "http", + "jsonrpsee-client-transport 0.18.2", + "jsonrpsee-core 0.18.2", + "jsonrpsee-types 0.18.2", ] [[package]] @@ -2586,6 +2747,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + [[package]] name = "linux-raw-sys" version = "0.1.4" @@ -2598,6 +2765,12 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +[[package]] +name = "linux-raw-sys" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" + [[package]] name = "lock_api" version = "0.4.9" @@ -2615,26 +2788,59 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ "cfg-if", + "serde", ] [[package]] -name = "lz4-sys" -version = "1.9.4" +name = "log-mdc" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57d27b317e207b10f69f5e75494119e391a96f48861ae870d1da6edac98ca900" -dependencies = [ - "cc", - "libc", -] +checksum = "a94d21414c1f4a51209ad204c1776a3d0765002c76c6abcb602a6f09f1e881c7" [[package]] -name = "mach" -version = "0.3.2" +name = "log4rs" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" +checksum = "d36ca1786d9e79b8193a68d480a0907b612f109537115c6ff655a3a1967533fd" dependencies = [ - "libc", -] + "anyhow", + "arc-swap", + "chrono", + "derivative", + "fnv", + "humantime", + "libc", + "log", + "log-mdc", + "parking_lot 0.12.1", + "serde", + "serde-value", + "serde_json", + "serde_yaml", + "thiserror", + "thread-id", + "typemap-ors", + "winapi", +] + +[[package]] +name = "lz4-sys" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d27b317e207b10f69f5e75494119e391a96f48861ae870d1da6edac98ca900" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "mach" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" +dependencies = [ + "libc", +] [[package]] name = "matchers" @@ -2645,6 +2851,15 @@ dependencies = [ "regex-automata", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + [[package]] name = "matrixmultiply" version = "0.3.7" @@ -2700,7 +2915,7 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e0c7cba9ce19ac7ffd2053ac9f49843bbd3f4318feedfd74e85c19d5fb0ba66" dependencies = [ - "hash-db 0.15.2", + "hash-db", "hashbrown 0.12.3", ] @@ -2986,18 +3201,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "object" -version = "0.30.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" -dependencies = [ - "crc32fast", - "hashbrown 0.13.2", - "indexmap", - "memchr", -] - [[package]] name = "object" version = "0.31.1" @@ -3031,7 +3234,7 @@ version = "0.10.52" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01b8574602df80f7b85fdfc5392fa884a4e3b3f4f35402c070ab34c3d3f78d56" dependencies = [ - "bitflags", + "bitflags 1.3.2", "cfg-if", "foreign-types", "libc", @@ -3075,6 +3278,15 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +[[package]] +name = "ordered-float" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87" +dependencies = [ + "num-traits", +] + [[package]] name = "overload" version = "0.1.1" @@ -3114,6 +3326,17 @@ version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1ad0aff30c1da14b1254fcb2af73e1fa9a28670e584a626f53a369d0e157304" +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -3121,7 +3344,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core", + "parking_lot_core 0.9.7", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec 1.10.0", + "winapi", ] [[package]] @@ -3255,6 +3492,12 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +[[package]] +name = "platforms" +version = "3.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4503fa043bf02cee09a9582e9554b4c6403b2ef55e4612e96561d294419429f8" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -3268,6 +3511,7 @@ dependencies = [ "anyhow", "async-trait", "avail-subxt", + "borsh", "bytes", "parity-scale-codec", "primitive-types", @@ -3275,7 +3519,7 @@ dependencies = [ "serde", "serde_json", "sov-rollup-interface", - "sp-core 21.0.0", + "sp-core-hashing 10.0.0", "subxt", "thiserror", "tokio", @@ -3303,6 +3547,20 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "prettytable-rs" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eea25e07510aa6ab6547308ebe3c036016d162b8da920dbb079e3ba8acf3d95a" +dependencies = [ + "csv", + "encode_unicode", + "is-terminal", + "lazy_static", + "term", + "unicode-width", +] + [[package]] name = "primitive-types" version = "0.12.1" @@ -3378,7 +3636,7 @@ dependencies = [ "fnv", "lazy_static", "memchr", - "parking_lot", + "parking_lot 0.12.1", "thiserror", ] @@ -3565,7 +3823,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -3574,7 +3832,7 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -3707,11 +3965,14 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", + "bytemuck", "risc0-circuit-rv32im", "risc0-zkp", "risc0-zkvm", + "risc0-zkvm-platform", "serde", "sov-rollup-interface", + "zk-cycle-utils", ] [[package]] @@ -3921,7 +4182,7 @@ version = "0.36.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "14e4d67015953998ad0eb82887a0eb0129e18a7e2f3b7b0f6c422fddcd503d62" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno", "io-lifetimes", "libc", @@ -3935,7 +4196,7 @@ version = "0.37.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno", "io-lifetimes", "libc", @@ -3943,6 +4204,19 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "rustix" +version = "0.38.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0c3dde1fc030af041adc40e79c0e7fbcf431dd24870053d187d7c66e4b87453" +dependencies = [ + "bitflags 2.4.0", + "errno", + "libc", + "linux-raw-sys 0.4.5", + "windows-sys 0.48.0", +] + [[package]] name = "rustls" version = "0.20.8" @@ -3963,7 +4237,7 @@ checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" dependencies = [ "log", "ring", - "rustls-webpki", + "rustls-webpki 0.101.3", "sct", ] @@ -3988,6 +4262,16 @@ dependencies = [ "base64 0.21.2", ] +[[package]] +name = "rustls-webpki" +version = "0.100.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e98ff011474fa39949b7e5c0428f9b4937eda7da7848bbb947786b7be0b27dab" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustls-webpki" version = "0.101.3" @@ -3998,6 +4282,12 @@ dependencies = [ "untrusted", ] +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + [[package]] name = "ryu" version = "1.0.13" @@ -4181,7 +4471,7 @@ version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "core-foundation-sys", "libc", @@ -4215,9 +4505,9 @@ checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" [[package]] name = "serde" -version = "1.0.183" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32ac8da02677876d532745a130fc9d8e6edfa81a269b107c5b00829b91d8eb3c" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] @@ -4233,6 +4523,16 @@ dependencies = [ "smallvec 0.6.14", ] +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + [[package]] name = "serde_bytes" version = "0.11.12" @@ -4244,9 +4544,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.183" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aafe972d60b0b9bee71a91b92fee2d4fb3c9d7e8f6b179aa99f27203d99a4816" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", @@ -4307,6 +4607,18 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_yaml" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" +dependencies = [ + "indexmap", + "ryu", + "serde", + "yaml-rust", +] + [[package]] name = "sha-1" version = "0.9.8" @@ -4469,7 +4781,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap 4.3.0", + "clap 4.3.19", "jsonrpsee 0.18.2", "schemars", "serde", @@ -4485,7 +4797,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap 4.3.0", + "clap 4.3.19", "hex", "jsonrpsee 0.18.2", "schemars", @@ -4497,6 +4809,58 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sov-blob-storage" +version = "0.1.0" +dependencies = [ + "anyhow", + "bincode", + "borsh", + "hex", + "jsonrpsee 0.18.2", + "schemars", + "serde", + "serde_json", + "sov-modules-api", + "sov-modules-macros", + "sov-rollup-interface", + "sov-sequencer-registry", + "sov-state", + "tracing", +] + +[[package]] +name = "sov-chain-state" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "jsonrpsee 0.18.2", + "serde", + "serde_json", + "sov-modules-api", + "sov-modules-macros", + "sov-rollup-interface", + "sov-state", +] + +[[package]] +name = "sov-cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "directories", + "hex", + "jsonrpsee 0.18.2", + "serde", + "serde_json", + "sov-accounts", + "sov-bank", + "sov-modules-api", + "tokio", +] + [[package]] name = "sov-db" version = "0.1.0" @@ -4510,6 +4874,7 @@ dependencies = [ "serde", "sov-rollup-interface", "sov-schema-db", + "tokio", ] [[package]] @@ -4518,40 +4883,65 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", + "const-rollup-config", "demo-stf", + "env_logger", "hex", "jsonrpsee 0.16.2", + "log", + "log4rs", "methods", + "once_cell", + "parking_lot 0.11.2", "presence", + "prettytable-rs", + "regex", "risc0-adapter", "risc0-zkvm", "serde", "serde_json", "sha2 0.10.6", + "sov-demo-rollup", "sov-modules-api", "sov-rollup-interface", "sov-state", "sov-stf-runner", + "tempfile", "tokio", "tracing", "tracing-subscriber 0.3.17", ] [[package]] -name = "sov-election" +name = "sov-demo-rollup" version = "0.1.0" dependencies = [ "anyhow", + "async-trait", "borsh", - "clap 4.3.0", + "bytes", + "celestia", + "const-rollup-config", + "demo-stf", + "futures", "hex", + "jmt", "jsonrpsee 0.18.2", - "schemars", + "presence", + "risc0-adapter", "serde", "serde_json", + "sov-cli", + "sov-db", "sov-modules-api", + "sov-modules-stf-template", "sov-rollup-interface", + "sov-sequencer", "sov-state", + "sov-stf-runner", + "tokio", + "tracing", + "tracing-subscriber 0.3.17", ] [[package]] @@ -4568,18 +4958,19 @@ dependencies = [ "anyhow", "bech32", "borsh", - "clap 4.3.0", + "clap 4.3.19", "derive_more", - "ed25519-dalek", + "ed25519-dalek 2.0.0", "hex", "jsonrpsee 0.18.2", - "rand 0.7.3", + "rand 0.8.5", "schemars", "serde", "serde_json", "sha2 0.10.6", "sov-modules-macros", "sov-rollup-interface", + "sov-sequencer", "sov-state", "thiserror", ] @@ -4609,6 +5000,7 @@ dependencies = [ "sov-modules-api", "sov-rollup-interface", "sov-state", + "thiserror", "tracing", ] @@ -4618,12 +5010,14 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "bincode", "borsh", "bytes", "digest 0.10.7", "hex", "serde", "sha2 0.10.6", + "tokio", ] [[package]] @@ -4647,7 +5041,6 @@ dependencies = [ "hex", "jsonrpsee 0.18.2", "serde", - "sov-modules-api", "sov-rollup-interface", "tracing", ] @@ -4658,7 +5051,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap 4.3.0", + "clap 4.3.19", "jsonrpsee 0.18.2", "schemars", "serde", @@ -4674,6 +5067,7 @@ name = "sov-state" version = "0.1.0" dependencies = [ "anyhow", + "bcs", "borsh", "hex", "jmt", @@ -4691,10 +5085,10 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", + "celestia", "futures", "hex", "jsonrpsee 0.18.2", - "jupiter", "serde", "serde_json", "sov-db", @@ -4713,7 +5107,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap 4.3.0", + "clap 4.3.19", "jsonrpsee 0.18.2", "schemars", "serde", @@ -4733,9 +5127,9 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-core 16.0.0", + "sp-core", "sp-io", - "sp-std 6.0.0", + "sp-std", ] [[package]] @@ -4749,7 +5143,7 @@ dependencies = [ "parity-scale-codec", "scale-info", "serde", - "sp-std 6.0.0", + "sp-std", "static_assertions", ] @@ -4761,13 +5155,13 @@ checksum = "9c96dc3debbe5c22ebf18f99e6a53199efe748e6e584a1902adb88cbad66ae7c" dependencies = [ "array-bytes", "base58", - "bitflags", + "bitflags 1.3.2", "blake2", "bounded-collections", "dyn-clonable", "ed25519-zebra", "futures", - "hash-db 0.15.2", + "hash-db", "hash256-std-hasher", "impl-serde", "lazy_static", @@ -4775,7 +5169,7 @@ dependencies = [ "log", "merlin", "parity-scale-codec", - "parking_lot", + "parking_lot 0.12.1", "primitive-types", "rand 0.8.5", "regex", @@ -4785,56 +5179,11 @@ dependencies = [ "secrecy", "serde", "sp-core-hashing 6.0.0", - "sp-debug-derive 6.0.0", - "sp-externalities 0.17.0", - "sp-runtime-interface 13.0.0", - "sp-std 6.0.0", - "sp-storage 11.0.0", - "ss58-registry", - "substrate-bip39", - "thiserror", - "tiny-bip39", - "zeroize", -] - -[[package]] -name = "sp-core" -version = "21.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f18d9e2f67d8661f9729f35347069ac29d92758b59135176799db966947a7336" -dependencies = [ - "array-bytes", - "bitflags", - "blake2", - "bounded-collections", - "bs58", - "dyn-clonable", - "ed25519-zebra", - "futures", - "hash-db 0.16.0", - "hash256-std-hasher", - "impl-serde", - "lazy_static", - "libsecp256k1", - "log", - "merlin", - "parity-scale-codec", - "parking_lot", - "paste", - "primitive-types", - "rand 0.8.5", - "regex", - "scale-info", - "schnorrkel", - "secp256k1", - "secrecy", - "serde", - "sp-core-hashing 9.0.0", - "sp-debug-derive 8.0.0", - "sp-externalities 0.19.0", - "sp-runtime-interface 17.0.0", - "sp-std 8.0.0", - "sp-storage 13.0.0", + "sp-debug-derive", + "sp-externalities", + "sp-runtime-interface", + "sp-std", + "sp-storage", "ss58-registry", "substrate-bip39", "thiserror", @@ -4853,22 +5202,21 @@ dependencies = [ "digest 0.10.7", "sha2 0.10.6", "sha3", - "sp-std 6.0.0", + "sp-std", "twox-hash", ] [[package]] name = "sp-core-hashing" -version = "9.0.0" +version = "10.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ee599a8399448e65197f9a6cee338ad192e9023e35e31f22382964c3c174c68" +checksum = "e360755a2706a76886d58776665cad0db793dece3c7d390455b28e8a1efd6285" dependencies = [ "blake2b_simd", "byteorder", "digest 0.10.7", "sha2 0.10.6", "sha3", - "sp-std 8.0.0", "twox-hash", ] @@ -4883,17 +5231,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "sp-debug-derive" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f531814d2f16995144c74428830ccf7d94ff4a7749632b83ad8199b181140c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - [[package]] name = "sp-externalities" version = "0.17.0" @@ -4902,20 +5239,8 @@ checksum = "57052935c9c9b070ea6b339ef0da3bf241b7e065fc37f9c551669ee83ecfc3c1" dependencies = [ "environmental", "parity-scale-codec", - "sp-std 6.0.0", - "sp-storage 11.0.0", -] - -[[package]] -name = "sp-externalities" -version = "0.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0f71c671e01a8ca60da925d43a1b351b69626e268b8837f8371e320cf1dd100" -dependencies = [ - "environmental", - "parity-scale-codec", - "sp-std 8.0.0", - "sp-storage 13.0.0", + "sp-std", + "sp-storage", ] [[package]] @@ -4926,19 +5251,19 @@ checksum = "578959f9a7e44fd2dd96e8b8bc893cea04fcd7c00a4ffbb0b91c5013899dd02b" dependencies = [ "bytes", "ed25519 1.5.3", - "ed25519-dalek", + "ed25519-dalek 1.0.1", "futures", "libsecp256k1", "log", "parity-scale-codec", "secp256k1", - "sp-core 16.0.0", - "sp-externalities 0.17.0", + "sp-core", + "sp-externalities", "sp-keystore", - "sp-runtime-interface 13.0.0", + "sp-runtime-interface", "sp-state-machine", - "sp-std 6.0.0", - "sp-tracing 8.0.0", + "sp-std", + "sp-tracing", "sp-trie", "tracing", "tracing-core", @@ -4954,10 +5279,10 @@ dependencies = [ "futures", "merlin", "parity-scale-codec", - "parking_lot", + "parking_lot 0.12.1", "schnorrkel", - "sp-core 16.0.0", - "sp-externalities 0.17.0", + "sp-core", + "sp-externalities", "thiserror", ] @@ -4989,9 +5314,9 @@ dependencies = [ "serde", "sp-application-crypto", "sp-arithmetic", - "sp-core 16.0.0", + "sp-core", "sp-io", - "sp-std 6.0.0", + "sp-std", "sp-weights", ] @@ -5005,31 +5330,12 @@ dependencies = [ "impl-trait-for-tuples", "parity-scale-codec", "primitive-types", - "sp-externalities 0.17.0", - "sp-runtime-interface-proc-macro 9.0.0", - "sp-std 6.0.0", - "sp-storage 11.0.0", - "sp-tracing 8.0.0", - "sp-wasm-interface 10.0.0", - "static_assertions", -] - -[[package]] -name = "sp-runtime-interface" -version = "17.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e676128182f90015e916f806cba635c8141e341e7abbc45d25525472e1bbce8" -dependencies = [ - "bytes", - "impl-trait-for-tuples", - "parity-scale-codec", - "primitive-types", - "sp-externalities 0.19.0", - "sp-runtime-interface-proc-macro 11.0.0", - "sp-std 8.0.0", - "sp-storage 13.0.0", - "sp-tracing 10.0.0", - "sp-wasm-interface 14.0.0", + "sp-externalities", + "sp-runtime-interface-proc-macro", + "sp-std", + "sp-storage", + "sp-tracing", + "sp-wasm-interface", "static_assertions", ] @@ -5046,35 +5352,22 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "sp-runtime-interface-proc-macro" -version = "11.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5d5bd5566fe5633ec48dfa35ab152fd29f8a577c21971e1c6db9f28afb9bbb9" -dependencies = [ - "Inflector", - "proc-macro-crate 1.3.1", - "proc-macro2", - "quote", - "syn 2.0.28", -] - [[package]] name = "sp-state-machine" version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c957b8b4c252507c12674948db427c5e34fd1760ce256922f1ec5f89f781a4f" dependencies = [ - "hash-db 0.15.2", + "hash-db", "log", "parity-scale-codec", - "parking_lot", + "parking_lot 0.12.1", "rand 0.8.5", "smallvec 1.10.0", - "sp-core 16.0.0", - "sp-externalities 0.17.0", + "sp-core", + "sp-externalities", "sp-panic-handler", - "sp-std 6.0.0", + "sp-std", "sp-trie", "thiserror", "tracing", @@ -5086,12 +5379,6 @@ version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af0ee286f98455272f64ac5bb1384ff21ac029fbb669afbaf48477faff12760e" -[[package]] -name = "sp-std" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" - [[package]] name = "sp-storage" version = "11.0.0" @@ -5102,22 +5389,8 @@ dependencies = [ "parity-scale-codec", "ref-cast", "serde", - "sp-debug-derive 6.0.0", - "sp-std 6.0.0", -] - -[[package]] -name = "sp-storage" -version = "13.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94294be83f11d4958cfea89ed5798f0b6605f5defc3a996948848458abbcc18e" -dependencies = [ - "impl-serde", - "parity-scale-codec", - "ref-cast", - "serde", - "sp-debug-derive 8.0.0", - "sp-std 8.0.0", + "sp-debug-derive", + "sp-std", ] [[package]] @@ -5127,20 +5400,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e46bd547da89a9cda69b4ce4c91a5b7e1f86915190d83cd407b715d0c6bac042" dependencies = [ "parity-scale-codec", - "sp-std 6.0.0", - "tracing", - "tracing-core", - "tracing-subscriber 0.2.25", -] - -[[package]] -name = "sp-tracing" -version = "10.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357f7591980dd58305956d32f8f6646d0a8ea9ea0e7e868e46f53b68ddf00cec" -dependencies = [ - "parity-scale-codec", - "sp-std 8.0.0", + "sp-std", "tracing", "tracing-core", "tracing-subscriber 0.2.25", @@ -5153,17 +5413,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8efbe5b6d29a18fea7c2f52e0098135f2f864b31d335d5105b40a349866ba874" dependencies = [ "ahash 0.8.3", - "hash-db 0.15.2", + "hash-db", "hashbrown 0.12.3", "lazy_static", "memory-db", "nohash-hasher", "parity-scale-codec", - "parking_lot", + "parking_lot 0.12.1", "scale-info", "schnellru", - "sp-core 16.0.0", - "sp-std 6.0.0", + "sp-core", + "sp-std", "thiserror", "tracing", "trie-db", @@ -5180,23 +5440,9 @@ dependencies = [ "impl-trait-for-tuples", "log", "parity-scale-codec", - "sp-std 6.0.0", + "sp-std", "wasmi", - "wasmtime 5.0.1", -] - -[[package]] -name = "sp-wasm-interface" -version = "14.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19c122609ca5d8246be6386888596320d03c7bc880959eaa2c36bcd5acd6846" -dependencies = [ - "anyhow", - "impl-trait-for-tuples", - "log", - "parity-scale-codec", - "sp-std 8.0.0", - "wasmtime 8.0.1", + "wasmtime", ] [[package]] @@ -5210,9 +5456,9 @@ dependencies = [ "serde", "smallvec 1.10.0", "sp-arithmetic", - "sp-core 16.0.0", - "sp-debug-derive 6.0.0", - "sp-std 6.0.0", + "sp-core", + "sp-debug-derive", + "sp-std", ] [[package]] @@ -5344,7 +5590,7 @@ dependencies = [ "impl-serde", "jsonrpsee 0.16.2", "parity-scale-codec", - "parking_lot", + "parking_lot 0.12.1", "primitive-types", "scale-bits", "scale-decode", @@ -5352,7 +5598,7 @@ dependencies = [ "scale-value", "serde", "serde_json", - "sp-core 16.0.0", + "sp-core", "sp-core-hashing 6.0.0", "sp-runtime", "subxt-macro", @@ -5442,15 +5688,15 @@ checksum = "1b1c7f239eb94671427157bd93b3694320f3668d4e1eff08c7285366fd777fac" [[package]] name = "tempfile" -version = "3.5.0" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ "cfg-if", "fastrand", "redox_syscall 0.3.5", - "rustix 0.37.19", - "windows-sys 0.45.0", + "rustix 0.38.11", + "windows-sys 0.48.0", ] [[package]] @@ -5478,7 +5724,7 @@ dependencies = [ "subtle", "subtle-encoding", "tendermint-proto", - "time", + "time 0.3.21", "zeroize", ] @@ -5497,7 +5743,27 @@ dependencies = [ "serde", "serde_bytes", "subtle-encoding", - "time", + "time 0.3.21", +] + +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", ] [[package]] @@ -5529,6 +5795,17 @@ dependencies = [ "syn 2.0.28", ] +[[package]] +name = "thread-id" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79474f573561cdc4871a0de34a51c92f7f5a56039113fbb5b9c9f96bdb756669" +dependencies = [ + "libc", + "redox_syscall 0.2.16", + "winapi", +] + [[package]] name = "thread_local" version = "1.1.7" @@ -5539,6 +5816,17 @@ dependencies = [ "once_cell", ] +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + [[package]] name = "time" version = "0.3.21" @@ -5610,7 +5898,7 @@ dependencies = [ "libc", "mio", "num_cpus", - "parking_lot", + "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", "socket2", @@ -5820,7 +6108,7 @@ dependencies = [ "ansi_term", "chrono", "lazy_static", - "matchers", + "matchers 0.0.1", "regex", "serde", "serde_json", @@ -5839,10 +6127,14 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" dependencies = [ + "matchers 0.1.0", "nu-ansi-term", + "once_cell", + "regex", "sharded-slab", "smallvec 1.10.0", "thread_local", + "tracing", "tracing-core", "tracing-log", ] @@ -5853,7 +6145,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "004e1e8f92535694b4cb1444dc5a8073ecf0815e3357f729638b9f8fc4062908" dependencies = [ - "hash-db 0.15.2", + "hash-db", "hashbrown 0.12.3", "log", "rustc-hex", @@ -5866,7 +6158,7 @@ version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a36c5ca3911ed3c9a5416ee6c679042064b93fc637ded67e25f92e68d783891" dependencies = [ - "hash-db 0.15.2", + "hash-db", ] [[package]] @@ -5887,6 +6179,15 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "typemap-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68c24b707f02dd18f1e4ccceb9d49f2058c2fb86384ef9972592904d7a28867" +dependencies = [ + "unsafe-any-ors", +] + [[package]] name = "typenum" version = "1.16.0" @@ -5968,6 +6269,15 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +[[package]] +name = "unsafe-any-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a303d30665362d9680d7d91d78b23f5f899504d4f08b3c4cf08d055d87c0ad" +dependencies = [ + "destructure_traitobject", +] + [[package]] name = "untrusted" version = "0.7.1" @@ -6031,6 +6341,12 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -6146,16 +6462,6 @@ dependencies = [ "url", ] -[[package]] -name = "wasmparser" -version = "0.102.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48134de3d7598219ab9eaf6b91b15d8e50d31da76b8519fe4ecfcec2cf35104b" -dependencies = [ - "indexmap", - "url", -] - [[package]] name = "wasmtime" version = "5.0.1" @@ -6174,38 +6480,13 @@ dependencies = [ "psm", "serde", "target-lexicon", - "wasmparser 0.96.0", - "wasmtime-environ 5.0.1", - "wasmtime-jit 5.0.1", - "wasmtime-runtime 5.0.1", + "wasmparser", + "wasmtime-environ", + "wasmtime-jit", + "wasmtime-runtime", "windows-sys 0.42.0", ] -[[package]] -name = "wasmtime" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f907fdead3153cb9bfb7a93bbd5b62629472dc06dee83605358c64c52ed3dda9" -dependencies = [ - "anyhow", - "bincode", - "cfg-if", - "indexmap", - "libc", - "log", - "object 0.30.4", - "once_cell", - "paste", - "psm", - "serde", - "target-lexicon", - "wasmparser 0.102.0", - "wasmtime-environ 8.0.1", - "wasmtime-jit 8.0.1", - "wasmtime-runtime 8.0.1", - "windows-sys 0.45.0", -] - [[package]] name = "wasmtime-asm-macros" version = "5.0.1" @@ -6215,15 +6496,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "wasmtime-asm-macros" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3b9daa7c14cd4fa3edbf69de994408d5f4b7b0959ac13fa69d465f6597f810d" -dependencies = [ - "cfg-if", -] - [[package]] name = "wasmtime-environ" version = "5.0.1" @@ -6231,7 +6503,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9350c919553cddf14f78f9452119c8004d7ef6bfebb79a41a21819ed0c5604d8" dependencies = [ "anyhow", - "cranelift-entity 0.92.1", + "cranelift-entity", "gimli 0.26.2", "indexmap", "log", @@ -6239,27 +6511,8 @@ dependencies = [ "serde", "target-lexicon", "thiserror", - "wasmparser 0.96.0", - "wasmtime-types 5.0.1", -] - -[[package]] -name = "wasmtime-environ" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a990198cee4197423045235bf89d3359e69bd2ea031005f4c2d901125955c949" -dependencies = [ - "anyhow", - "cranelift-entity 0.95.1", - "gimli 0.27.3", - "indexmap", - "log", - "object 0.30.4", - "serde", - "target-lexicon", - "thiserror", - "wasmparser 0.102.0", - "wasmtime-types 8.0.1", + "wasmparser", + "wasmtime-types", ] [[package]] @@ -6279,35 +6532,12 @@ dependencies = [ "rustc-demangle", "serde", "target-lexicon", - "wasmtime-environ 5.0.1", - "wasmtime-jit-icache-coherence 5.0.1", - "wasmtime-runtime 5.0.1", + "wasmtime-environ", + "wasmtime-jit-icache-coherence", + "wasmtime-runtime", "windows-sys 0.42.0", ] -[[package]] -name = "wasmtime-jit" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de48df552cfca1c9b750002d3e07b45772dd033b0b206d5c0968496abf31244" -dependencies = [ - "addr2line 0.19.0", - "anyhow", - "bincode", - "cfg-if", - "cpp_demangle", - "gimli 0.27.3", - "log", - "object 0.30.4", - "rustc-demangle", - "serde", - "target-lexicon", - "wasmtime-environ 8.0.1", - "wasmtime-jit-icache-coherence 8.0.1", - "wasmtime-runtime 8.0.1", - "windows-sys 0.45.0", -] - [[package]] name = "wasmtime-jit-debug" version = "5.0.1" @@ -6317,15 +6547,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "wasmtime-jit-debug" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e0554b84c15a27d76281d06838aed94e13a77d7bf604bbbaf548aa20eb93846" -dependencies = [ - "once_cell", -] - [[package]] name = "wasmtime-jit-icache-coherence" version = "5.0.1" @@ -6337,17 +6558,6 @@ dependencies = [ "windows-sys 0.42.0", ] -[[package]] -name = "wasmtime-jit-icache-coherence" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aecae978b13f7f67efb23bd827373ace4578f2137ec110bbf6a4a7cde4121bbd" -dependencies = [ - "cfg-if", - "libc", - "windows-sys 0.45.0", -] - [[package]] name = "wasmtime-runtime" version = "5.0.1" @@ -6366,58 +6576,22 @@ dependencies = [ "paste", "rand 0.8.5", "rustix 0.36.14", - "wasmtime-asm-macros 5.0.1", - "wasmtime-environ 5.0.1", - "wasmtime-jit-debug 5.0.1", + "wasmtime-asm-macros", + "wasmtime-environ", + "wasmtime-jit-debug", "windows-sys 0.42.0", ] -[[package]] -name = "wasmtime-runtime" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658cf6f325232b6760e202e5255d823da5e348fdea827eff0a2a22319000b441" -dependencies = [ - "anyhow", - "cc", - "cfg-if", - "indexmap", - "libc", - "log", - "mach", - "memfd", - "memoffset 0.8.0", - "paste", - "rand 0.8.5", - "rustix 0.36.14", - "wasmtime-asm-macros 8.0.1", - "wasmtime-environ 8.0.1", - "wasmtime-jit-debug 8.0.1", - "windows-sys 0.45.0", -] - [[package]] name = "wasmtime-types" version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "86e1e4f66a2b9a114f9def450ab9971828c968db6ea6fccd613724b771fa4913" dependencies = [ - "cranelift-entity 0.92.1", - "serde", - "thiserror", - "wasmparser 0.96.0", -] - -[[package]] -name = "wasmtime-types" -version = "8.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4f6fffd2a1011887d57f07654dd112791e872e3ff4a2e626aee8059ee17f06f" -dependencies = [ - "cranelift-entity 0.95.1", + "cranelift-entity", "serde", "thiserror", - "wasmparser 0.102.0", + "wasmparser", ] [[package]] @@ -6449,6 +6623,15 @@ dependencies = [ "webpki", ] +[[package]] +name = "webpki-roots" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338" +dependencies = [ + "rustls-webpki 0.100.2", +] + [[package]] name = "which" version = "4.4.0" @@ -6476,6 +6659,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" @@ -6665,6 +6857,15 @@ dependencies = [ "tap", ] +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + [[package]] name = "yap" version = "0.7.2" @@ -6707,10 +6908,18 @@ dependencies = [ "hmac 0.12.1", "pbkdf2 0.11.0", "sha1", - "time", + "time 0.3.21", "zstd", ] +[[package]] +name = "zk-cycle-utils" +version = "0.1.0" +dependencies = [ + "risc0-zkvm", + "risc0-zkvm-platform", +] + [[package]] name = "zstd" version = "0.11.2+zstd.1.5.2" diff --git a/examples/demo-prover-avail/Cargo.toml b/examples/demo-prover-avail/Cargo.toml index bd4544d1c..a9998ca01 100644 --- a/examples/demo-prover-avail/Cargo.toml +++ b/examples/demo-prover-avail/Cargo.toml @@ -8,6 +8,7 @@ resolver = "2" [workspace.dependencies] anyhow = "1.0.68" borsh = { version = "0.10.3", features = ["rc", "bytes"] } +bincode = "1.3.3" hex = "0.4.3" jsonrpsee = "0.16.2" serde = { version = "1.0.137", features = ["derive", "rc"] } @@ -16,6 +17,7 @@ sha2 = "0.10.6" risc0-zkvm = { version = "0.16" } risc0-build = { version = "0.16" } tokio = { version = "1", features = ["full"] } +tempfile = "3.6.0" # Always optimize; building and running the guest takes much longer without optimization. [profile.dev] diff --git a/examples/demo-prover-avail/host/Cargo.toml b/examples/demo-prover-avail/host/Cargo.toml index 4ba2b270d..0bffe56af 100644 --- a/examples/demo-prover-avail/host/Cargo.toml +++ b/examples/demo-prover-avail/host/Cargo.toml @@ -14,16 +14,28 @@ risc0-zkvm = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } sha2 = { workspace = true } -tokio = { version = "1", features = ["full"] } +tokio = { workspace = true } tracing = "0.1.37" tracing-subscriber = "0.3.16" presence = { path = "../../../adapters/avail" } -demo-stf = { path = "../../demo-stf" } +demo-stf = { path = "../../demo-stf", features = ["native"] } sov-rollup-interface = { path = "../../../rollup-interface" } -risc0-adapter = { path = "../../../adapters/risc0" } +risc0-adapter = { path = "../../../adapters/risc0", features = ["native"] } +const-rollup-config = { path = "../../const-rollup-config" } sov-modules-api = { path = "../../../module-system/sov-modules-api", features = ["native"] } sov-state = { path = "../../../module-system/sov-state", features = ["native"] } sov-stf-runner = { path = "../../../full-node/sov-stf-runner" } methods = { path = "../methods" } + +[dev-dependencies] +sov-demo-rollup = { path = "../../demo-rollup" } +tempfile = { workspace = true } +once_cell = "1.7.2" +parking_lot = "0.11.1" +prettytable-rs = "^0.10" +env_logger = "0.10.0" +log = "0.4" +log4rs = "1.0" +regex = "1.5" diff --git a/examples/demo-prover-avail/host/Dockerfile b/examples/demo-prover-avail/host/Dockerfile index 60f72b766..ce046f61f 100644 --- a/examples/demo-prover-avail/host/Dockerfile +++ b/examples/demo-prover-avail/host/Dockerfile @@ -1,31 +1,36 @@ FROM ubuntu:jammy -RUN apt-get update && \ - apt-get install -y curl libssl3 ca-certificates && \ - rm -rf /var/lib/apt/lists/* +RUN apt-get update --fix-missing || \ + (sleep 5 && apt-get update --fix-missing) || \ + (sleep 10 && apt-get update --fix-missing) && \ + apt-get install -y curl ca-certificates || \ + (sleep 5 && apt-get install -y curl ca-certificates) || \ + (sleep 10 && apt-get install -y curl ca-certificates) && \ + rm -rf /var/lib/apt/lists/* RUN mkdir -p /da/ && \ groupadd -r avail && \ useradd --no-log-init -r -g avail avail && \ chown -R avail:avail /da +COPY entrypoint.sh /da/entrypoint.sh +RUN chmod +x /da/entrypoint.sh + USER avail:avail WORKDIR /da ARG NODE_CLIENT_URL ARG APP_ID -RUN curl -L https://availproject.github.io/configs/kate/avail-light-1.4.3/config.yaml --output config.yaml && \ - curl -L https://github.com/availproject/avail-light/releases/download/v1.4.4/avail-light-linux-amd64.tar.gz --output avail-light-linux-amd64.tar.gz && \ +RUN curl -L https://raw.githubusercontent.com/availproject/availproject.github.io/c804aa520b66838209bb1bafbf7ffefdb249a2ac/static/kate/avail-light-1.4.3/config.yaml --output config.yaml && \ + curl -L https://github.com/availproject/avail-light/releases/download/v1.6.0-rc1/avail-light-linux-amd64.tar.gz --output avail-light-linux-amd64.tar.gz && \ tar -xf avail-light-linux-amd64.tar.gz && \ - echo "app_id = ${APP_ID}" >> config.yaml && \ - sed -i "s#full_node_ws = .*#full_node_ws = ['$NODE_CLIENT_URL']#" config.yaml && \ - sed -i "s#http_server_host = .*#http_server_host = '0.0.0.0'#" config.yaml + echo "app_id = ${APP_ID}" >> config.yaml && \ + sed -i "s#full_node_ws = .*#full_node_ws = ['$NODE_CLIENT_URL']#" config.yaml && \ + sed -i "s#http_server_host = .*#http_server_host = '0.0.0.0'#" config.yaml -ENV \ - APP_ID=0 +ENV APP_ID=0 -# Opencontainers annotations LABEL \ org.opencontainers.image.authors="The Avail Project Team" \ org.opencontainers.image.url="https://www.availproject.org/" \ @@ -37,4 +42,4 @@ LABEL \ org.opencontainers.image.title="Avail Light Client" \ org.opencontainers.image.description="Data Availability Docker Node" -CMD ls; cat config.yaml; ./avail-light-linux-amd64 +CMD ["/bin/sh", "entrypoint.sh"] diff --git a/examples/demo-prover-avail/host/Makefile b/examples/demo-prover-avail/host/Makefile index 096219ff1..68b6690ea 100644 --- a/examples/demo-prover-avail/host/Makefile +++ b/examples/demo-prover-avail/host/Makefile @@ -19,20 +19,13 @@ SEED_PHRASE := $(shell cat seed-phrase.json | grep -o '"secretPhrase": *"[^"]*"' endif key-exists: - @test -s seed-phrase.json || make create-new-key && make update-da-address - @make update-da-address + @test -s seed-phrase.json || echo "Error: Call make create-new-key and replace const SEQUENCER_AVAIL_DA_ADDRESS in const-rollup-config with publicKey from the created seed-phrase.json" + create-new-key: check-container-running @echo "Creating new key..." @docker run -it --pull=always docker.io/parity/subkey:latest generate --output-type json > seed-phrase.json -update-da-address: -ifeq ($(shell uname -s),Darwin) - @sed -i '' 's/^\(sequencer_da_address = \)"[^"]*"/\1"$(shell cat seed-phrase.json | grep -o '"publicKey": *"[^"]*"' | cut -d '"' -f 4 | cut -c3-)"/' rollup_config.toml -else - @sed -i 's/^\(sequencer_da_address = \)"[^"]*"/\1"$(shell cat seed-phrase.json | grep -o '"publicKey": *"[^"]*"' | cut -d '"' -f 4 | cut -c3-)"/' rollup_config.toml -endif - create-new-app-key: @cd ../../avail-helper/ && cargo run --bin create_app_id -- --ws_uri wss://kate.avail.tools:443/ws --seed "$(SEED_PHRASE)" @@ -89,7 +82,7 @@ else endif build-sov-cli: - cd ../../demo-stf && cargo build --bin sov-cli + cd ../../demo-rollup && cargo build --bin sov-cli test-serialize-create-token: check-container-running build-sov-cli $(SOV_CLI_REL_PATH) serialize-call ../../demo-stf/src/sov-cli/test_data/token_deployer_private_key.json Bank ../../demo-stf/src/sov-cli/test_data/create_token.json 0 diff --git a/examples/demo-prover-avail/host/entrypoint.sh b/examples/demo-prover-avail/host/entrypoint.sh new file mode 100644 index 000000000..4f10df0ce --- /dev/null +++ b/examples/demo-prover-avail/host/entrypoint.sh @@ -0,0 +1,4 @@ +#!/bin/sh +ls +cat config.yaml +exec ./avail-light-linux-amd64 diff --git a/examples/demo-prover-avail/host/rollup_config.toml b/examples/demo-prover-avail/host/rollup_config.toml index 7b9a0ac29..cb90ec930 100644 --- a/examples/demo-prover-avail/host/rollup_config.toml +++ b/examples/demo-prover-avail/host/rollup_config.toml @@ -1,19 +1,18 @@ -# We define the rollup's genesis to occur at Avail block number `start_height`. The rollup will ignore -# any Avail blocks before this height -sequencer_da_address = "b4dc7fc57630d2a7be7f358cbefc1e52bd6d0f250d19647cf264ecf2d8764d7b" - -[rollup_config] -start_height = 2 - [da] light_client_url = "http://127.0.0.1:8000" node_client_url = "wss://kate.avail.tools:443/ws" +seed = "secret_seed" -[rollup_config.runner.storage] +[storage] # The path to the rollup's data directory. Paths that do not begin with `/` are interpreted as relative paths. path = "demo_data" -[rollup_config.rpc_config] +# We define the rollup's genesis to occur at block number `start_height`. The rollup will ignore +# any blocks before this height +[runner] +start_height = 1 + +[runner.rpc_config] # the host and port to bind the rpc server for bind_host = "127.0.0.1" bind_port = 12345 diff --git a/examples/demo-prover-avail/host/src/config.rs b/examples/demo-prover-avail/host/src/config.rs deleted file mode 100644 index 03a26f20e..000000000 --- a/examples/demo-prover-avail/host/src/config.rs +++ /dev/null @@ -1,16 +0,0 @@ -use sov_stf_runner::RollupConfig; -use serde::Deserialize; - -//TODO - replace with runtime config. -#[derive(Debug, Clone, PartialEq, Deserialize)] -pub struct DaServiceConfig { - pub light_client_url: String, - pub node_client_url: String, -} - -#[derive(Debug, Clone, PartialEq, Deserialize)] -pub struct Config { - pub rollup_config: RollupConfig, - pub sequencer_da_address: String, - pub da: DaServiceConfig, -} diff --git a/examples/demo-prover-avail/host/src/main.rs b/examples/demo-prover-avail/host/src/main.rs index d7d92b001..34f1c4fc0 100644 --- a/examples/demo-prover-avail/host/src/main.rs +++ b/examples/demo-prover-avail/host/src/main.rs @@ -1,39 +1,46 @@ -mod config; - use std::env; +use std::str::FromStr; use anyhow::Context; -use demo_stf::app::{App, DefaultPrivateKey, DefaultContext}; +use const_rollup_config::SEQUENCER_AVAIL_DA_ADDRESS; +use demo_stf::app::{App, DefaultContext, DefaultPrivateKey}; use demo_stf::genesis_config::create_demo_genesis_config; use demo_stf::runtime::GenesisConfig; use methods::{ROLLUP_ELF, ROLLUP_ID}; +use presence::service::{DaProvider, DaServiceConfig}; +use presence::spec::transaction::AvailBlobTransaction; +use presence::spec::address::AvailAddress; +use presence::spec::DaLayerSpec; use risc0_adapter::host::{Risc0Host, Risc0Verifier}; - use sov_modules_api::PrivateKey; -use sov_rollup_interface::services::da::DaService; +use sov_rollup_interface::services::da::{DaService, SlotData}; use sov_rollup_interface::stf::StateTransitionFunction; -use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::zk::ZkvmHost; use sov_state::Storage; -use sov_stf_runner::{from_toml_path}; +use sov_stf_runner::{from_toml_path, RollupConfig}; use tracing::{info, Level}; -use presence::service::DaProvider as AvailDaProvider; -use presence::spec::transaction::AvailBlobTransaction; -use crate::config::Config; -pub fn get_genesis_config(sequencer_da_address: &str) -> GenesisConfig { +pub fn get_genesis_config( + sequencer_da_address: &AvailAddress, +) -> GenesisConfig { let sequencer_private_key = DefaultPrivateKey::generate(); + create_demo_genesis_config( 100000000, sequencer_private_key.default_address(), - hex::decode(sequencer_da_address).unwrap(), - &sequencer_private_key, + sequencer_da_address.as_ref().to_vec(), &sequencer_private_key, ) } #[tokio::main] async fn main() -> Result<(), anyhow::Error> { + // If SKIP_PROVER is set, this means that we still compile and generate the riscV ELF + // We execute the code inside the riscV but we don't prove it. This saves a significant amount of time + // The primary benefit of doing this is to make sure we produce valid code that can run inside the + // riscV virtual machine. Since proving is something we offload entirely to risc0, ensuring that + // we produce valid riscV code and that it can execute is very useful. + let skip_prover = env::var("SKIP_PROVER").is_ok(); // Initializing logging let subscriber = tracing_subscriber::fmt() .with_max_level(Level::INFO) @@ -45,42 +52,28 @@ async fn main() -> Result<(), anyhow::Error> { let rollup_config_path = env::args() .nth(1) .unwrap_or_else(|| "rollup_config.toml".to_string()); - let config: Config = - from_toml_path(&rollup_config_path).context("Failed to read rollup configuration")?; - - let node_client = presence::build_client(config.da.node_client_url.to_string(), false) - .await - .unwrap(); - let light_client_url = config.da.light_client_url.to_string(); - // Initialize the Avail service using the DaService interface - let da_service = AvailDaProvider { - node_client, - light_client_url, - }; - - let app: App = - App::new(config.rollup_config.runner.storage.clone()); + let rollup_config: RollupConfig = + from_toml_path(rollup_config_path).context("Failed to read rollup configuration")?; - let is_storage_empty = app.get_storage().is_empty(); - let mut demo = app.stf; - - let mut prev_state_root = { - // Check if the rollup has previously been initialized - if is_storage_empty { - info!("No history detected. Initializing chain..."); - demo.init_chain(get_genesis_config(&config.sequencer_da_address)); - info!("Chain initialization is done."); - } else { - info!("Chain is already initialized. Skipping initialization"); - } + let da_service = DaProvider::new(rollup_config.da.clone()).await; - let res = demo.apply_slot(Default::default(), []); - res.state_root.0 - }; + let mut app: App = App::new(rollup_config.storage); - //TODO: Start from slot processed before shut down. + let is_storage_empty = app.get_storage().is_empty(); + + let sequencer_da_address = AvailAddress::from_str(SEQUENCER_AVAIL_DA_ADDRESS)?; + if is_storage_empty { + info!("Starting from empty storage, initialization chain"); + app.stf + .init_chain(get_genesis_config(&sequencer_da_address)); + } - for height in config.rollup_config.start_height..=config.rollup_config.start_height + 30 { + let mut prev_state_root = app + .get_storage() + .get_state_root(&Default::default()) + .expect("The storage needs to have a state root"); + + for height in rollup_config.runner.start_height.. { let mut host = Risc0Host::new(ROLLUP_ELF); host.write_to_guest(prev_state_root); @@ -92,8 +85,9 @@ async fn main() -> Result<(), anyhow::Error> { let filtered_block = da_service.get_finalized_at(height).await?; let header_hash = hex::encode(filtered_block.hash()); host.write_to_guest(&filtered_block.header); - let (mut blob_txs, inclusion_proof, completeness_proof) = - da_service.extract_relevant_txs_with_proof(&filtered_block).await; + let (mut blob_txs, inclusion_proof, completeness_proof) = da_service + .extract_relevant_txs_with_proof(&filtered_block) + .await; info!( "Extracted {} relevant blobs at height {} header 0x{}", @@ -106,14 +100,23 @@ async fn main() -> Result<(), anyhow::Error> { host.write_to_guest(&completeness_proof); host.write_to_guest(&blob_txs); - let result = demo.apply_slot(Default::default(), &mut blob_txs); + let result = app.stf.apply_slot(Default::default(), &filtered_block, &mut blob_txs); host.write_to_guest(&result.witness); - info!("Starting proving..."); - let receipt = host.run().expect("Prover should run successfully"); - info!("Start verifying.."); - receipt.verify(ROLLUP_ID).expect("Receipt should be valid"); + // Run the actual prover to generate a receipt that can then be verified + if !skip_prover { + info!("Starting proving..."); + let receipt = host.run().expect("Prover should run successfully"); + info!("Start verifying.."); + receipt.verify(ROLLUP_ID).expect("Receipt should be valid"); + } else { + // This runs the riscV code inside the VM without actually generating the proofs + // This is useful for testing if rollup code actually executes properly + let _receipt = host + .run_without_proving() + .expect("Prover should run successfully"); + } prev_state_root = result.state_root.0; info!("Completed proving and verifying block {height}"); diff --git a/examples/demo-prover-avail/methods/guest/Cargo.lock b/examples/demo-prover-avail/methods/guest/Cargo.lock index 3050feea7..c8faab593 100644 --- a/examples/demo-prover-avail/methods/guest/Cargo.lock +++ b/examples/demo-prover-avail/methods/guest/Cargo.lock @@ -37,6 +37,12 @@ version = "1.0.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c7d0618f0e0b7e8ff11427422b64564d5fb0be1940354bfe2e0529b18a9d9b8" +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + [[package]] name = "arrayvec" version = "0.7.2" @@ -78,6 +84,16 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bcs" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd3ffe8b19a604421a5d461d4a70346223e535903fbc3067138bddbebddcf77" +dependencies = [ + "serde", + "thiserror", +] + [[package]] name = "bech32" version = "0.9.1" @@ -117,16 +133,18 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" dependencies = [ - "digest 0.10.6", + "digest", ] [[package]] -name = "block-buffer" -version = "0.9.0" +name = "blake2b_simd" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" +checksum = "3c2f0dc9a68c6317d884f97cc36cf5a3d20ba14ce404227df55e1af708ab04bc" dependencies = [ - "generic-array", + "arrayref", + "arrayvec", + "constant_time_eq 0.2.6", ] [[package]] @@ -287,6 +305,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "constant_time_eq" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21a53c0a4d288377e7415b53dcfc3c04da5cdc2cc95c8d5ac178b58f0b861ad6" + [[package]] name = "convert_case" version = "0.4.0" @@ -354,15 +378,29 @@ dependencies = [ [[package]] name = "curve25519-dalek" -version = "3.2.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" +checksum = "f711ade317dd348950a9910f81c5947e3d8907ebd2b83f76203ff1807e6a2bc2" dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "platforms", + "rustc_version", "subtle", - "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", ] [[package]] @@ -376,7 +414,8 @@ dependencies = [ "serde", "sov-accounts", "sov-bank", - "sov-election", + "sov-blob-storage", + "sov-chain-state", "sov-modules-api", "sov-modules-stf-template", "sov-rollup-interface", @@ -399,22 +438,13 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - [[package]] name = "digest" version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8168378f4e5023e7218c89c891c0fd8ecdb5e5e4f18cb78f38cf245dd021e76f" dependencies = [ - "block-buffer 0.10.4", + "block-buffer", "crypto-common", "subtle", ] @@ -447,7 +477,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05213e96f184578b5f70105d4d0a644a168e99e12d7bea0b200c15d67b5c182" dependencies = [ "futures", - "rand 0.8.5", + "rand", "reqwest", "thiserror", "tokio", @@ -480,24 +510,22 @@ dependencies = [ [[package]] name = "ed25519" -version = "1.5.3" +version = "2.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +checksum = "60f6d271ca33075c88028be6f04d502853d63a5ece419d269c15315d4fc1cf1d" dependencies = [ "signature", ] [[package]] name = "ed25519-dalek" -version = "1.0.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" dependencies = [ "curve25519-dalek", "ed25519", - "rand 0.7.3", - "sha2 0.9.9", - "zeroize", + "sha2", ] [[package]] @@ -560,6 +588,12 @@ dependencies = [ "instant", ] +[[package]] +name = "fiat-crypto" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e825f6987101665dea6ec934c09ec6d721de7bc1bf92248e1d5810c8cd636b77" + [[package]] name = "fixed-hash" version = "0.8.0" @@ -567,7 +601,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" dependencies = [ "byteorder", - "rand 0.8.5", + "rand", "rustc-hex", "static_assertions", ] @@ -798,7 +832,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ - "digest 0.10.6", + "digest", ] [[package]] @@ -885,7 +919,7 @@ dependencies = [ "prost", "ripemd", "serde", - "sha2 0.10.6", + "sha2", "sha3", ] @@ -997,12 +1031,13 @@ checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "jmt" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1a302f0defd323b833c9848c20ab40c3156128f50d7bf8eebeed2ef58167258" +checksum = "9e49c5d2c13e15f77f22cee3df3dc822b46051b217112035d72687cb57a9cbde" dependencies = [ "anyhow", "borsh", + "digest", "hashbrown 0.13.2", "hex", "ics23", @@ -1011,7 +1046,7 @@ dependencies = [ "num-derive 0.3.3", "num-traits", "serde", - "sha2 0.10.6", + "sha2", "thiserror", "tracing", ] @@ -1285,7 +1320,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" dependencies = [ "base64ct", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -1311,10 +1346,10 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" dependencies = [ - "digest 0.10.6", + "digest", "hmac", "password-hash", - "sha2 0.10.6", + "sha2", ] [[package]] @@ -1341,6 +1376,12 @@ version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" +[[package]] +name = "platforms" +version = "3.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4503fa043bf02cee09a9582e9554b4c6403b2ef55e4612e96561d294419429f8" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -1353,11 +1394,13 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "borsh", "bytes", "primitive-types", "serde", "serde_json", "sov-rollup-interface", + "sp-core-hashing", "thiserror", "tracing", "tracing-subscriber", @@ -1441,17 +1484,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - [[package]] name = "rand" version = "0.8.5" @@ -1459,18 +1491,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", + "rand_chacha", + "rand_core", ] [[package]] @@ -1480,15 +1502,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", + "rand_core", ] -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" - [[package]] name = "rand_core" version = "0.6.4" @@ -1498,15 +1514,6 @@ dependencies = [ "getrandom", ] -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", -] - [[package]] name = "redox_syscall" version = "0.2.16" @@ -1579,7 +1586,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" dependencies = [ - "digest 0.10.6", + "digest", ] [[package]] @@ -1588,9 +1595,12 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", + "bytemuck", "risc0-zkvm", + "risc0-zkvm-platform", "serde", "sov-rollup-interface", + "zk-cycle-utils", ] [[package]] @@ -1614,7 +1624,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ea0e9d6d5845f11157728c494541c42559357fee35afce767b3d3610ef7494b" dependencies = [ "bytemuck", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -1626,15 +1636,15 @@ dependencies = [ "anyhow", "blake2", "bytemuck", - "digest 0.10.6", + "digest", "hex", "log", "paste", - "rand_core 0.6.4", + "rand_core", "risc0-core", "risc0-zkvm-platform", "serde", - "sha2 0.10.6", + "sha2", "thiserror", "tracing", ] @@ -1770,18 +1780,18 @@ checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" [[package]] name = "serde" -version = "1.0.164" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e8c8cf938e98f769bc164923b06dce91cea1751522f46f8466461af04c9027d" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.164" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", @@ -1830,20 +1840,7 @@ checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" dependencies = [ "cfg-if", "cpufeatures", - "digest 0.10.6", -] - -[[package]] -name = "sha2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if", - "cpufeatures", - "digest 0.9.0", - "opaque-debug", + "digest", ] [[package]] @@ -1854,7 +1851,7 @@ checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" dependencies = [ "cfg-if", "cpufeatures", - "digest 0.10.6", + "digest", ] [[package]] @@ -1863,7 +1860,7 @@ version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" dependencies = [ - "digest 0.10.6", + "digest", "keccak", ] @@ -1878,9 +1875,9 @@ dependencies = [ [[package]] name = "signature" -version = "1.6.4" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" +checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" [[package]] name = "slab" @@ -1931,12 +1928,41 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sov-blob-storage" +version = "0.1.0" +dependencies = [ + "anyhow", + "bincode", + "borsh", + "hex", + "sov-modules-api", + "sov-modules-macros", + "sov-rollup-interface", + "sov-sequencer-registry", + "sov-state", + "tracing", +] + +[[package]] +name = "sov-chain-state" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "sov-modules-api", + "sov-modules-macros", + "sov-rollup-interface", + "sov-state", +] + [[package]] name = "sov-demo-prover-guest" version = "0.1.0" dependencies = [ "anyhow", "borsh", + "const-rollup-config", "demo-stf", "directories", "downloader", @@ -1946,25 +1972,13 @@ dependencies = [ "risc0-zkvm", "serde", "serde_json", - "sha2 0.10.6", + "sha2", "sov-rollup-interface", "tempfile", "tracing", "zip", ] -[[package]] -name = "sov-election" -version = "0.1.0" -dependencies = [ - "anyhow", - "borsh", - "hex", - "sov-modules-api", - "sov-rollup-interface", - "sov-state", -] - [[package]] name = "sov-first-read-last-write-cache" version = "0.1.0" @@ -1982,8 +1996,7 @@ dependencies = [ "derive_more", "ed25519-dalek", "serde", - "serde_json", - "sha2 0.10.6", + "sha2", "sov-modules-macros", "sov-rollup-interface", "sov-state", @@ -2014,6 +2027,7 @@ dependencies = [ "sov-modules-api", "sov-rollup-interface", "sov-state", + "thiserror", "tracing", ] @@ -2023,9 +2037,10 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "bincode", "borsh", "bytes", - "digest 0.10.6", + "digest", "hex", "serde", ] @@ -2047,11 +2062,12 @@ name = "sov-state" version = "0.1.0" dependencies = [ "anyhow", + "bcs", "borsh", "hex", "jmt", "serde", - "sha2 0.10.6", + "sha2", "sov-first-read-last-write-cache", "sov-rollup-interface", "thiserror", @@ -2069,6 +2085,20 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sp-core-hashing" +version = "10.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e360755a2706a76886d58776665cad0db793dece3c7d390455b28e8a1efd6285" +dependencies = [ + "blake2b_simd", + "byteorder", + "digest", + "sha2", + "sha3", + "twox-hash", +] + [[package]] name = "static_assertions" version = "1.1.0" @@ -2319,6 +2349,18 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +[[package]] +name = "twox-hash" +version = "1.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" +dependencies = [ + "cfg-if", + "digest", + "rand", + "static_assertions", +] + [[package]] name = "typenum" version = "1.16.0" @@ -2699,26 +2741,6 @@ dependencies = [ "tap", ] -[[package]] -name = "zeroize" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.28", -] - [[package]] name = "zip" version = "0.6.4" @@ -2728,7 +2750,7 @@ dependencies = [ "aes", "byteorder", "bzip2", - "constant_time_eq", + "constant_time_eq 0.1.5", "crc32fast", "crossbeam-utils", "flate2", @@ -2739,6 +2761,14 @@ dependencies = [ "zstd", ] +[[package]] +name = "zk-cycle-utils" +version = "0.1.0" +dependencies = [ + "risc0-zkvm", + "risc0-zkvm-platform", +] + [[package]] name = "zstd" version = "0.11.2+zstd.1.5.2" diff --git a/examples/demo-prover-avail/methods/guest/Cargo.toml b/examples/demo-prover-avail/methods/guest/Cargo.toml index a030ecb93..300f90b7e 100644 --- a/examples/demo-prover-avail/methods/guest/Cargo.toml +++ b/examples/demo-prover-avail/methods/guest/Cargo.toml @@ -10,7 +10,8 @@ anyhow = "1.0.68" presence = { path = "../../../../adapters/avail", default-features = false, features = ["verifier"] } risc0-zkvm = { version = "0.16", default-features = false, features = ["std"] } borsh = { version = "0.10.3", features = ["bytes"] } -demo-stf = { path = "../../../demo-stf", default-features = false } +demo-stf = { path = "../../../demo-stf" } +const-rollup-config = { path = "../../../const-rollup-config" } sov-rollup-interface = { path = "../../../../rollup-interface" } risc0-adapter = { path = "../../../../adapters/risc0", default-features = false } diff --git a/examples/demo-prover-avail/methods/guest/src/bin/rollup.rs b/examples/demo-prover-avail/methods/guest/src/bin/rollup.rs index 97393152c..3980b7ce5 100644 --- a/examples/demo-prover-avail/methods/guest/src/bin/rollup.rs +++ b/examples/demo-prover-avail/methods/guest/src/bin/rollup.rs @@ -1,6 +1,9 @@ // TODO: Rename this file to change the name of this method from METHOD_NAME #![no_main] + +use std::str::FromStr; + use demo_stf::app::create_zk_app_template; use demo_stf::ArrayWitness; use risc0_adapter::guest::Risc0Guest; @@ -9,8 +12,12 @@ use sov_rollup_interface::crypto::NoOpHasher; use sov_rollup_interface::da::{DaSpec, DaVerifier}; use sov_rollup_interface::stf::StateTransitionFunction; use sov_rollup_interface::zk::{StateTransition, ZkvmGuest}; +use sov_rollup_interface::da::BlockHeaderTrait; +use const_rollup_config::{SEQUENCER_AVAIL_DA_ADDRESS}; use presence::spec::{DaLayerSpec}; use presence::spec::header::AvailHeader; +use presence::spec::address::AvailAddress; +use presence::spec::block::AvailBlock; use presence::verifier::Verifier; use presence::spec::transaction::AvailBlobTransaction; @@ -40,14 +47,19 @@ pub fn main() { let mut blobs: Vec = guest.read_from_host(); env::write(&"blobs have been read\n"); - // Step 2: Verify tx list - let mut app = create_zk_app_template::(prev_state_root_hash); + let block: AvailBlock = AvailBlock { + header: header.clone(), + transactions: blobs.clone() + }; + + // Step 2: Apply blobs + let mut app = create_zk_app_template::(prev_state_root_hash); let witness: ArrayWitness = guest.read_from_host(); env::write(&"Witness have been read\n"); env::write(&"Applying slot...\n"); - let result = app.apply_slot(witness, &mut blobs); + let result = app.apply_slot(witness, &block, &mut blobs); env::write(&"Slot has been applied\n"); @@ -58,10 +70,13 @@ pub fn main() { .expect("Transaction list must be correct"); env::write(&"Relevant txs verified\n"); + let rewarded_address = AvailAddress::from_str(SEQUENCER_AVAIL_DA_ADDRESS).unwrap(); let output = StateTransition { initial_state_root: prev_state_root_hash, final_state_root: result.state_root.0, validity_condition, + rewarded_address: rewarded_address.as_ref().to_vec(), + slot_hash: header.hash().inner().clone(), }; env::commit(&output); env::write(&"new state root committed\n"); diff --git a/examples/demo-prover/Cargo.lock b/examples/demo-prover/Cargo.lock index 1e3db0d2d..4e6df4c88 100644 --- a/examples/demo-prover/Cargo.lock +++ b/examples/demo-prover/Cargo.lock @@ -2,24 +2,52 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "Inflector" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe438c63458706e03479442743baae6c88256498e6431708f6dfc520a26515d3" +dependencies = [ + "lazy_static", + "regex", +] + [[package]] name = "addchain" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" dependencies = [ - "num-bigint", + "num-bigint 0.3.3", "num-integer", "num-traits", ] +[[package]] +name = "addr2line" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b" +dependencies = [ + "gimli 0.26.2", +] + +[[package]] +name = "addr2line" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +dependencies = [ + "gimli 0.27.3", +] + [[package]] name = "addr2line" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" dependencies = [ - "gimli", + "gimli 0.27.3", ] [[package]] @@ -39,6 +67,17 @@ dependencies = [ "cpufeatures", ] +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.10", + "once_cell", + "version_check", +] + [[package]] name = "ahash" version = "0.8.3" @@ -46,6 +85,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -59,6 +99,30 @@ dependencies = [ "memchr", ] +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + [[package]] name = "anstream" version = "0.3.2" @@ -95,7 +159,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" dependencies = [ - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -105,7 +169,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" dependencies = [ "anstyle", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -114,12 +178,54 @@ version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" +[[package]] +name = "arc-swap" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" + +[[package]] +name = "array-bytes" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52f63c5c1316a16a4b35eaac8b76a98248961a533f061684cb2a7cb0eafb6c6" + +[[package]] +name = "array-init" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23589ecb866b460d3a0f1278834750268c607e8e28a1b982c907219f3178cd72" +dependencies = [ + "nodrop", +] + +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" + [[package]] name = "arrayvec" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" +[[package]] +name = "async-lock" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" +dependencies = [ + "event-listener", +] + [[package]] name = "async-trait" version = "0.1.71" @@ -128,7 +234,18 @@ checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", +] + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi 0.1.19", + "libc", + "winapi", ] [[package]] @@ -137,21 +254,50 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +[[package]] +name = "avail-subxt" +version = "0.2.1" +source = "git+https://github.com/availproject/avail.git?tag=v1.6.0#99b85257d6b5bb3fa3051b1c05d30d42f7471a7e" +dependencies = [ + "anyhow", + "curve25519-dalek 2.1.3", + "derive_more", + "futures", + "hex", + "jsonrpsee 0.16.2", + "num_enum", + "parity-scale-codec", + "scale-info", + "schnorrkel", + "serde", + "serde-hex", + "sp-core 16.0.0", + "structopt", + "subxt", + "tokio", +] + [[package]] name = "backtrace" version = "0.3.68" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" dependencies = [ - "addr2line", + "addr2line 0.20.0", "cc", "cfg-if", "libc", "miniz_oxide", - "object", + "object 0.31.1", "rustc-demangle", ] +[[package]] +name = "base58" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6107fe1be6682a68940da878d9e9f5e90ca5745b3dec9fd1bb393c8777d4f581" + [[package]] name = "base64" version = "0.13.1" @@ -170,6 +316,16 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bcs" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd3ffe8b19a604421a5d461d4a70346223e535903fbc3067138bddbebddcf77" +dependencies = [ + "serde", + "thiserror", +] + [[package]] name = "bech32" version = "0.9.1" @@ -212,7 +368,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -248,13 +404,36 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "blake2b_simd" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c2f0dc9a68c6317d884f97cc36cf5a3d20ba14ce404227df55e1af708ab04bc" +dependencies = [ + "arrayref", + "arrayvec 0.7.4", + "constant_time_eq 0.2.6", +] + +[[package]] +name = "block-buffer" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" +dependencies = [ + "block-padding", + "byte-tools", + "byteorder", + "generic-array 0.12.4", +] + [[package]] name = "block-buffer" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -263,7 +442,16 @@ version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ - "generic-array", + "generic-array 0.14.7", +] + +[[package]] +name = "block-padding" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" +dependencies = [ + "byte-tools", ] [[package]] @@ -323,6 +511,24 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "bounded-collections" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb5b05133427c07c4776906f673ccf36c21b102c9829c641a5b56bd151d44fd6" +dependencies = [ + "log", + "parity-scale-codec", + "scale-info", + "serde", +] + +[[package]] +name = "bs58" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" + [[package]] name = "bstr" version = "1.6.0" @@ -339,6 +545,18 @@ version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" +[[package]] +name = "byte-slice-cast" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3ac9f8b63eca6fd385229b3675f6cc0dc5c8a5c8a54a59d4f52ffd670d87b0c" + +[[package]] +name = "byte-tools" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" + [[package]] name = "bytemuck" version = "1.13.1" @@ -356,7 +574,7 @@ checksum = "fdde5c9cd29ebd706ce1b35600920a33550e402fc998a2e53ad3b42c3c47a192" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -429,11 +647,42 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.79" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +checksum = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01" dependencies = [ "jobserver", + "libc", +] + +[[package]] +name = "celestia" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "base64 0.21.2", + "bech32", + "borsh", + "hex", + "hex-literal", + "jsonrpsee 0.18.2", + "nmt-rs", + "prost", + "prost-build", + "prost-types", + "risc0-zkvm", + "risc0-zkvm-platform", + "serde", + "serde_json", + "sha2 0.10.7", + "sov-rollup-interface", + "tendermint", + "tendermint-proto", + "thiserror", + "tokio", + "tracing", + "zk-cycle-macros", ] [[package]] @@ -451,6 +700,21 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "chrono" +version = "0.4.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "time 0.1.45", + "wasm-bindgen", + "winapi", +] + [[package]] name = "cipher" version = "0.4.4" @@ -472,6 +736,21 @@ dependencies = [ "libloading", ] +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "ansi_term", + "atty", + "bitflags 1.3.2", + "strsim 0.8.0", + "textwrap", + "unicode-width", + "vec_map", +] + [[package]] name = "clap" version = "4.3.17" @@ -492,7 +771,7 @@ dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim", + "strsim 0.10.0", ] [[package]] @@ -501,10 +780,10 @@ version = "4.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -535,6 +814,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" +[[package]] +name = "constant_time_eq" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21a53c0a4d288377e7415b53dcfc3c04da5cdc2cc95c8d5ac178b58f0b861ad6" + [[package]] name = "convert_case" version = "0.4.0" @@ -557,6 +842,15 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +[[package]] +name = "cpp_demangle" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eeaa953eaad386a53111e47172c2fedba671e5684c8dd601a5f474f4f118710f" +dependencies = [ + "cfg-if", +] + [[package]] name = "cpufeatures" version = "0.2.9" @@ -566,6 +860,24 @@ dependencies = [ "libc", ] +[[package]] +name = "cranelift-entity" +version = "0.92.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a9e39cfc857e7e539aa623e03bb6bec11f54aef3dfdef41adcfa7b594af3b54" +dependencies = [ + "serde", +] + +[[package]] +name = "cranelift-entity" +version = "0.95.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40099d38061b37e505e63f89bab52199037a72b931ad4868d9089ff7268660b0" +dependencies = [ + "serde", +] + [[package]] name = "crc32fast" version = "1.3.2" @@ -605,7 +917,7 @@ dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset", + "memoffset 0.9.0", "scopeguard", ] @@ -618,6 +930,12 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + [[package]] name = "crypto-bigint" version = "0.5.2" @@ -634,43 +952,160 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array", + "generic-array 0.14.7", "typenum", ] +[[package]] +name = "crypto-mac" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" +dependencies = [ + "generic-array 0.14.7", + "subtle", +] + +[[package]] +name = "crypto-mac" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" +dependencies = [ + "generic-array 0.14.7", + "subtle", +] + +[[package]] +name = "csv" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626ae34994d3d8d668f4269922248239db4ae42d538b14c398b74a52208e8086" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" +dependencies = [ + "memchr", +] + [[package]] name = "curve25519-dalek" -version = "3.2.0" +version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" +checksum = "4a9b85542f99a2dfa2a1b8e192662741c9859a846b296bef1c92ef9b58b5a216" dependencies = [ "byteorder", - "digest 0.9.0", + "digest 0.8.1", "rand_core 0.5.1", "subtle", "zeroize", ] [[package]] -name = "curve25519-dalek-ng" -version = "4.1.1" +name = "curve25519-dalek" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" +checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" dependencies = [ "byteorder", "digest 0.9.0", - "rand_core 0.6.4", - "subtle-ng", + "rand_core 0.5.1", + "subtle", "zeroize", ] [[package]] -name = "demo-stf" +name = "curve25519-dalek" +version = "4.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f711ade317dd348950a9910f81c5947e3d8907ebd2b83f76203ff1807e6a2bc2" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "platforms", + "rustc_version", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.29", +] + +[[package]] +name = "curve25519-dalek-ng" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" +dependencies = [ + "byteorder", + "digest 0.9.0", + "rand_core 0.6.4", + "subtle-ng", + "zeroize", +] + +[[package]] +name = "darling" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim 0.10.0", + "syn 1.0.109", +] + +[[package]] +name = "darling_macro" +version = "0.14.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" +dependencies = [ + "darling_core", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "demo-stf" version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap", + "clap 4.3.17", "const-rollup-config", "hex", "jsonrpsee 0.18.2", @@ -678,7 +1113,9 @@ dependencies = [ "serde_json", "sov-accounts", "sov-bank", - "sov-election", + "sov-blob-storage", + "sov-chain-state", + "sov-cli", "sov-modules-api", "sov-modules-stf-template", "sov-rollup-interface", @@ -702,6 +1139,17 @@ dependencies = [ "zeroize", ] +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "derive_more" version = "0.99.17" @@ -715,13 +1163,28 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "destructure_traitobject" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c877555693c14d2f84191cfd3ad8582790fc52b5e2274b40b59cf5f5cea25c7" + +[[package]] +name = "digest" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5" +dependencies = [ + "generic-array 0.12.4", +] + [[package]] name = "digest" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -744,6 +1207,16 @@ dependencies = [ "dirs-sys", ] +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + [[package]] name = "dirs-sys" version = "0.4.1" @@ -753,7 +1226,18 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys", + "windows-sys 0.48.0", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", ] [[package]] @@ -775,6 +1259,27 @@ dependencies = [ "tokio", ] +[[package]] +name = "dyn-clonable" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e9232f0e607a262ceb9bd5141a3dfb3e4db6994b31989bbfd845878cba59fd4" +dependencies = [ + "dyn-clonable-impl", + "dyn-clone", +] + +[[package]] +name = "dyn-clonable-impl" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "558e40ea573c374cf53507fd240b7ee2f5477df7cfebdb97323ec61c719399c5" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "dyn-clone" version = "1.0.12" @@ -816,6 +1321,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fb04eee5d9d907f29e80ee6b0e78f7e2c82342c63e3580d8c4f69d9d5aad963" dependencies = [ "pkcs8", + "serde", "signature 2.1.0", ] @@ -838,10 +1344,36 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ - "curve25519-dalek", + "curve25519-dalek 3.2.0", "ed25519 1.5.3", - "rand 0.7.3", + "sha2 0.9.9", + "zeroize", +] + +[[package]] +name = "ed25519-dalek" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" +dependencies = [ + "curve25519-dalek 4.0.0", + "ed25519 2.2.1", + "rand_core 0.6.4", "serde", + "sha2 0.10.7", + "zeroize", +] + +[[package]] +name = "ed25519-zebra" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c24f403d068ad0b359e577a77f92392118be3f3c927538f2bb544a5ecd828c6" +dependencies = [ + "curve25519-dalek 3.2.0", + "hashbrown 0.12.3", + "hex", + "rand_core 0.6.4", "sha2 0.9.9", "zeroize", ] @@ -858,6 +1390,12 @@ version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2b183d6ce6ca4cf30e3db37abf5b52568b5f9015c97d9fbdd7026aa5dcdd758" +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + [[package]] name = "encoding_rs" version = "0.8.32" @@ -867,6 +1405,25 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "env_logger" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" +dependencies = [ + "humantime", + "is-terminal", + "log", + "regex", + "termcolor", +] + +[[package]] +name = "environmental" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e48c92028aaa870e83d51c64e5d4e0b6981b360c522198c23959f219a4e1b15b" + [[package]] name = "equivalent" version = "1.0.1" @@ -890,7 +1447,7 @@ checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" dependencies = [ "errno-dragonfly", "libc", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -903,6 +1460,12 @@ dependencies = [ "libc", ] +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + [[package]] name = "eyre" version = "0.6.8" @@ -913,6 +1476,18 @@ dependencies = [ "once_cell", ] +[[package]] +name = "fake-simd" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" + +[[package]] +name = "fallible-iterator" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" + [[package]] name = "fastrand" version = "1.9.0" @@ -943,7 +1518,7 @@ checksum = "e9f54704be45ed286151c5e11531316eaef5b8f5af7d597b806fdb8af108d84a" dependencies = [ "addchain", "cfg-if", - "num-bigint", + "num-bigint 0.3.3", "num-integer", "num-traits", "proc-macro2", @@ -951,6 +1526,24 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "fiat-crypto" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e825f6987101665dea6ec934c09ec6d721de7bc1bf92248e1d5810c8cd636b77" + +[[package]] +name = "fixed-hash" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" +dependencies = [ + "byteorder", + "rand 0.8.5", + "rustc-hex", + "static_assertions", +] + [[package]] name = "fixedbitset" version = "0.4.2" @@ -1007,6 +1600,18 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "frame-metadata" +version = "15.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "878babb0b136e731cc77ec2fd883ff02745ff21e6fb662729953d44923df009c" +dependencies = [ + "cfg-if", + "parity-scale-codec", + "scale-info", + "serde", +] + [[package]] name = "funty" version = "2.0.0" @@ -1053,6 +1658,7 @@ dependencies = [ "futures-core", "futures-task", "futures-util", + "num_cpus", ] [[package]] @@ -1069,7 +1675,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -1084,6 +1690,16 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +[[package]] +name = "futures-timer" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" +dependencies = [ + "gloo-timers", + "send_wrapper", +] + [[package]] name = "futures-util" version = "0.3.28" @@ -1102,6 +1718,15 @@ dependencies = [ "slab", ] +[[package]] +name = "generic-array" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd" +dependencies = [ + "typenum", +] + [[package]] name = "generic-array" version = "0.14.7" @@ -1130,8 +1755,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "gimli" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" +dependencies = [ + "fallible-iterator", + "stable_deref_trait", ] [[package]] @@ -1139,6 +1776,11 @@ name = "gimli" version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" +dependencies = [ + "fallible-iterator", + "indexmap 1.9.3", + "stable_deref_trait", +] [[package]] name = "glob" @@ -1159,6 +1801,51 @@ dependencies = [ "regex", ] +[[package]] +name = "gloo-net" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9902a044653b26b99f7e3693a42f171312d9be8b26b5697bd1e43ad1f8a35e10" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "gloo-utils", + "js-sys", + "pin-project", + "serde", + "serde_json", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "gloo-utils" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037fcb07216cb3a30f7292bd0176b050b7b9a052ba830ef7d5d65f6dc64ba58e" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "h2" version = "0.3.20" @@ -1178,11 +1865,35 @@ dependencies = [ "tracing", ] +[[package]] +name = "hash-db" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" + +[[package]] +name = "hash-db" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e7d7786361d7425ae2fe4f9e407eb0efaa0840f5212d109cc018c40c35c6ab4" + +[[package]] +name = "hash256-std-hasher" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92c171d55b98633f4ed3860808f004099b36c1cc29c42cfc53aa8591b21efcf2" +dependencies = [ + "crunchy", +] + [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash 0.7.6", +] [[package]] name = "hashbrown" @@ -1190,7 +1901,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash", + "ahash 0.8.3", ] [[package]] @@ -1199,6 +1910,15 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "heck" version = "0.4.1" @@ -1207,9 +1927,18 @@ checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "hermit-abi" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" [[package]] name = "hex" @@ -1226,6 +1955,26 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" +[[package]] +name = "hmac" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "126888268dcc288495a26bf004b38c5fdbb31682f992c84ceb046a1f0fe38840" +dependencies = [ + "crypto-mac 0.8.0", + "digest 0.9.0", +] + +[[package]] +name = "hmac" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +dependencies = [ + "crypto-mac 0.11.1", + "digest 0.9.0", +] + [[package]] name = "hmac" version = "0.12.1" @@ -1235,6 +1984,17 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "hmac-drbg" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17ea0a1394df5b6574da6e0c1ade9e78868c9fb0a4e5ef4428e32da4676b85b1" +dependencies = [ + "digest 0.9.0", + "generic-array 0.14.7", + "hmac 0.8.1", +] + [[package]] name = "http" version = "0.2.9" @@ -1269,6 +2029,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + [[package]] name = "hyper" version = "0.14.27" @@ -1306,7 +2072,7 @@ dependencies = [ "rustls-native-certs", "tokio", "tokio-rustls 0.23.4", - "webpki-roots", + "webpki-roots 0.22.6", ] [[package]] @@ -1338,6 +2104,29 @@ dependencies = [ "tokio-native-tls", ] +[[package]] +name = "iana-time-zone" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + [[package]] name = "ics23" version = "0.10.1" @@ -1355,6 +2144,12 @@ dependencies = [ "sha3", ] +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + [[package]] name = "idna" version = "0.4.0" @@ -1365,6 +2160,35 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "impl-codec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-serde" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc88fc67028ae3db0c853baa36269d398d5f45b6982f95549ff5def78c935cd" +dependencies = [ + "serde", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "indenter" version = "0.3.3" @@ -1379,6 +2203,7 @@ checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", + "serde", ] [[package]] @@ -1397,7 +2222,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" dependencies = [ - "generic-array", + "generic-array 0.14.7", ] [[package]] @@ -1409,6 +2234,15 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "integer-sqrt" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "276ec31bcb4a9ee45f58bec6f9ec700ae4cf4f4f8f2fa7e06cb406bd5ffdd770" +dependencies = [ + "num-traits", +] + [[package]] name = "inventory" version = "0.3.9" @@ -1421,9 +2255,9 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.2", "libc", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1438,9 +2272,9 @@ version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.2", "rustix 0.38.4", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1460,12 +2294,13 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jmt" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1a302f0defd323b833c9848c20ab40c3156128f50d7bf8eebeed2ef58167258" +checksum = "9e49c5d2c13e15f77f22cee3df3dc822b46051b217112035d72687cb57a9cbde" dependencies = [ "anyhow", "borsh", + "digest 0.10.7", "hashbrown 0.13.2", "hex", "ics23", @@ -1503,10 +2338,15 @@ version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d291e3a5818a2384645fd9756362e6d89cf0541b0b916fa7702ea4a9833608e" dependencies = [ + "jsonrpsee-client-transport 0.16.2", "jsonrpsee-core 0.16.2", "jsonrpsee-http-client 0.16.2", + "jsonrpsee-proc-macros 0.16.3", "jsonrpsee-server 0.16.2", "jsonrpsee-types 0.16.2", + "jsonrpsee-wasm-client 0.16.2", + "jsonrpsee-ws-client 0.16.2", + "tracing", ] [[package]] @@ -1515,12 +2355,62 @@ version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1822d18e4384a5e79d94dc9e4d1239cfa9fad24e55b44d2efeff5b394c9fece4" dependencies = [ + "jsonrpsee-client-transport 0.18.2", "jsonrpsee-core 0.18.2", "jsonrpsee-http-client 0.18.2", - "jsonrpsee-proc-macros", + "jsonrpsee-proc-macros 0.18.2", "jsonrpsee-server 0.18.2", "jsonrpsee-types 0.18.2", + "jsonrpsee-wasm-client 0.18.2", + "jsonrpsee-ws-client 0.18.2", + "tracing", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "965de52763f2004bc91ac5bcec504192440f0b568a5d621c59d9dbd6f886c3fb" +dependencies = [ + "anyhow", + "futures-channel", + "futures-timer", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core 0.16.2", + "jsonrpsee-types 0.16.2", + "pin-project", + "rustls-native-certs", + "soketto", + "thiserror", + "tokio", + "tokio-rustls 0.23.4", + "tokio-util", + "tracing", + "webpki-roots 0.22.6", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11aa5766d5c430b89cb26a99b88f3245eb91534be8126102cea9e45ee3891b22" +dependencies = [ + "futures-channel", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core 0.18.2", + "pin-project", + "rustls-native-certs", + "soketto", + "thiserror", + "tokio", + "tokio-rustls 0.24.1", + "tokio-util", "tracing", + "webpki-roots 0.23.1", ] [[package]] @@ -1530,15 +2420,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4e70b4439a751a5de7dd5ed55eacff78ebf4ffe0fc009cb1ebb11417f5b536b" dependencies = [ "anyhow", - "arrayvec", + "arrayvec 0.7.4", + "async-lock", "async-trait", "beef", "futures-channel", + "futures-timer", "futures-util", "globset", "hyper", "jsonrpsee-types 0.16.2", - "parking_lot", + "parking_lot 0.12.1", "rand 0.8.5", "rustc-hash", "serde", @@ -1547,6 +2439,7 @@ dependencies = [ "thiserror", "tokio", "tracing", + "wasm-bindgen-futures", ] [[package]] @@ -1556,13 +2449,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64c6832a55f662b5a6ecc844db24b8b9c387453f923de863062c60ce33d62b81" dependencies = [ "anyhow", + "async-lock", "async-trait", "beef", + "futures-timer", "futures-util", "globset", "hyper", "jsonrpsee-types 0.18.2", - "parking_lot", + "parking_lot 0.12.1", "rand 0.8.5", "rustc-hash", "serde", @@ -1570,7 +2465,9 @@ dependencies = [ "soketto", "thiserror", "tokio", + "tokio-stream", "tracing", + "wasm-bindgen-futures", ] [[package]] @@ -1611,13 +2508,26 @@ dependencies = [ "tracing", ] +[[package]] +name = "jsonrpsee-proc-macros" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44e8ab85614a08792b9bff6c8feee23be78c98d0182d4c622c05256ab553892a" +dependencies = [ + "heck 0.4.1", + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "jsonrpsee-proc-macros" version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6027ac0b197ce9543097d02a290f550ce1d9432bf301524b013053c0b75cc94" dependencies = [ - "heck", + "heck 0.4.1", "proc-macro-crate 1.3.1", "proc-macro2", "quote", @@ -1695,30 +2605,49 @@ dependencies = [ ] [[package]] -name = "jupiter" -version = "0.1.0" +name = "jsonrpsee-wasm-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a77310456f43c6c89bcba1f6b2fc2a28300da7c341f320f5128f8c83cc63232d" dependencies = [ - "anyhow", - "async-trait", - "base64 0.21.2", - "bech32", - "borsh", - "hex", - "hex-literal", - "jsonrpsee 0.16.2", - "nmt-rs", - "prost", - "prost-build", - "prost-types", - "serde", - "serde_json", - "sha2 0.10.7", - "sov-rollup-interface", - "tendermint", - "tendermint-proto", - "thiserror", - "tokio", - "tracing", + "jsonrpsee-client-transport 0.16.2", + "jsonrpsee-core 0.16.2", + "jsonrpsee-types 0.16.2", +] + +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34e6ea7c6d862e60f8baebd946c037b70c6808a4e4e31e792a4029184e3ce13a" +dependencies = [ + "jsonrpsee-client-transport 0.18.2", + "jsonrpsee-core 0.18.2", + "jsonrpsee-types 0.18.2", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b83daeecfc6517cfe210df24e570fb06213533dfb990318fae781f4c7119dd9" +dependencies = [ + "http", + "jsonrpsee-client-transport 0.16.2", + "jsonrpsee-core 0.16.2", + "jsonrpsee-types 0.16.2", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a64b2589680ba1ad7863f279cd2d5083c1dc0a7c0ea959d22924553050f8ab9f" +dependencies = [ + "http", + "jsonrpsee-client-transport 0.18.2", + "jsonrpsee-core 0.18.2", + "jsonrpsee-types 0.18.2", ] [[package]] @@ -1803,6 +2732,54 @@ dependencies = [ "zstd-sys", ] +[[package]] +name = "libsecp256k1" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95b09eff1b35ed3b33b877ced3a691fc7a481919c7e29c53c906226fcf55e2a1" +dependencies = [ + "arrayref", + "base64 0.13.1", + "digest 0.9.0", + "hmac-drbg", + "libsecp256k1-core", + "libsecp256k1-gen-ecmult", + "libsecp256k1-gen-genmult", + "rand 0.8.5", + "serde", + "sha2 0.9.9", + "typenum", +] + +[[package]] +name = "libsecp256k1-core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5be9b9bb642d8522a44d533eab56c16c738301965504753b03ad1de3425d5451" +dependencies = [ + "crunchy", + "digest 0.9.0", + "subtle", +] + +[[package]] +name = "libsecp256k1-gen-ecmult" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3038c808c55c87e8a172643a7d87187fc6c4174468159cb3090659d55bcb4809" +dependencies = [ + "libsecp256k1-core", +] + +[[package]] +name = "libsecp256k1-gen-genmult" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3db8d6ba2cec9eacc40e6e8ccc98931840301f1006e95647ceb2dd5c3aa06f7c" +dependencies = [ + "libsecp256k1-core", +] + [[package]] name = "libz-sys" version = "1.1.9" @@ -1814,6 +2791,18 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" + [[package]] name = "linux-raw-sys" version = "0.3.8" @@ -1841,6 +2830,41 @@ name = "log" version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" +dependencies = [ + "serde", +] + +[[package]] +name = "log-mdc" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a94d21414c1f4a51209ad204c1776a3d0765002c76c6abcb602a6f09f1e881c7" + +[[package]] +name = "log4rs" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d36ca1786d9e79b8193a68d480a0907b612f109537115c6ff655a3a1967533fd" +dependencies = [ + "anyhow", + "arc-swap", + "chrono", + "derivative", + "fnv", + "humantime", + "libc", + "log", + "log-mdc", + "parking_lot 0.12.1", + "serde", + "serde-value", + "serde_json", + "serde_yaml", + "thiserror", + "thread-id", + "typemap-ors", + "winapi", +] [[package]] name = "lz4-sys" @@ -1853,21 +2877,81 @@ dependencies = [ ] [[package]] -name = "matrixmultiply" -version = "0.3.7" +name = "mach" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "090126dc04f95dc0d1c1c91f61bdd474b3930ca064c1edc8a849da2c6cbe1e77" +checksum = "b823e83b2affd8f40a9ee8c29dbc56404c1e34cd2710921f2801e2cf29527afa" dependencies = [ - "autocfg", - "rawpointer", + "libc", ] +[[package]] +name = "matchers" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f099785f7595cc4b4553a174ce30dd7589ef93391ff414dbb67f62392b9e0ce1" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matrixmultiply" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090126dc04f95dc0d1c1c91f61bdd474b3930ca064c1edc8a849da2c6cbe1e77" +dependencies = [ + "autocfg", + "rawpointer", +] + +[[package]] +name = "maybe-uninit" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" + [[package]] name = "memchr" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +[[package]] +name = "memfd" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffc89ccdc6e10d6907450f753537ebc5c5d3460d2e4e62ea74bd571db62c0f9e" +dependencies = [ + "rustix 0.37.23", +] + +[[package]] +name = "memoffset" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" +dependencies = [ + "autocfg", +] + +[[package]] +name = "memoffset" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" +dependencies = [ + "autocfg", +] + [[package]] name = "memoffset" version = "0.9.0" @@ -1877,6 +2961,34 @@ dependencies = [ "autocfg", ] +[[package]] +name = "memory-db" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e0c7cba9ce19ac7ffd2053ac9f49843bbd3f4318feedfd74e85c19d5fb0ba66" +dependencies = [ + "hash-db 0.15.2", + "hashbrown 0.12.3", +] + +[[package]] +name = "memory_units" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8452105ba047068f40ff7093dd1d9da90898e63dd61736462e9cdda6a90ad3c3" + +[[package]] +name = "merlin" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e261cf0f8b3c42ded9f7d2bb59dea03aa52bc8a1cbc7482f9fc3fd1229d3b42" +dependencies = [ + "byteorder", + "keccak", + "rand_core 0.5.1", + "zeroize", +] + [[package]] name = "methods" version = "0.1.0" @@ -1913,7 +3025,7 @@ checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1971,6 +3083,18 @@ dependencies = [ "sha2 0.10.7", ] +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "nohash-hasher" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" + [[package]] name = "nom" version = "7.1.3" @@ -2002,6 +3126,17 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-bigint" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + [[package]] name = "num-complex" version = "0.4.3" @@ -2030,7 +3165,17 @@ checksum = "9e6a0fd4f737c707bd9086cc16c925f294943eb62eb71499e9fd4cf71f8b9f4e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", +] + +[[package]] +name = "num-format" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3" +dependencies = [ + "arrayvec 0.7.4", + "itoa", ] [[package]] @@ -2043,6 +3188,18 @@ dependencies = [ "num-traits", ] +[[package]] +name = "num-rational" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +dependencies = [ + "autocfg", + "num-bigint 0.4.3", + "num-integer", + "num-traits", +] + [[package]] name = "num-traits" version = "0.2.15" @@ -2058,10 +3215,55 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi", + "hermit-abi 0.3.2", "libc", ] +[[package]] +name = "num_enum" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "object" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" +dependencies = [ + "crc32fast", + "hashbrown 0.12.3", + "indexmap 1.9.3", + "memchr", +] + +[[package]] +name = "object" +version = "0.30.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" +dependencies = [ + "crc32fast", + "hashbrown 0.13.2", + "indexmap 1.9.3", + "memchr", +] + [[package]] name = "object" version = "0.31.1" @@ -2077,6 +3279,12 @@ version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +[[package]] +name = "opaque-debug" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c" + [[package]] name = "opaque-debug" version = "0.3.0" @@ -2106,7 +3314,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -2133,12 +3341,65 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +[[package]] +name = "ordered-float" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87" +dependencies = [ + "num-traits", +] + [[package]] name = "overload" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" +[[package]] +name = "parity-scale-codec" +version = "3.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8e946cc0cc711189c0b0249fb8b599cbeeab9784d83c415719368bb8d4ac64" +dependencies = [ + "arrayvec 0.7.4", + "bitvec", + "byte-slice-cast", + "bytes", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "3.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a296c3079b5fefbc499e1de58dc26c09b1b9a5952d26694ee89f04a43ebbb3e" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "parity-wasm" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1ad0aff30c1da14b1254fcb2af73e1fa9a28670e584a626f53a369d0e157304" + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -2146,7 +3407,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core", + "parking_lot_core 0.9.8", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec 1.11.0", + "winapi", ] [[package]] @@ -2158,8 +3433,8 @@ dependencies = [ "cfg-if", "libc", "redox_syscall 0.3.5", - "smallvec", - "windows-targets", + "smallvec 1.11.0", + "windows-targets 0.48.1", ] [[package]] @@ -2189,6 +3464,15 @@ dependencies = [ "serde", ] +[[package]] +name = "pbkdf2" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d95f5254224e617595d2cc3cc73ff0a5eaf2637519e25f03388154e9378b6ffa" +dependencies = [ + "crypto-mac 0.11.1", +] + [[package]] name = "pbkdf2" version = "0.11.0" @@ -2196,7 +3480,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" dependencies = [ "digest 0.10.7", - "hmac", + "hmac 0.12.1", "password-hash", "sha2 0.10.7", ] @@ -2240,7 +3524,7 @@ checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -2271,12 +3555,41 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +[[package]] +name = "platforms" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d7ddaed09e0eb771a79ab0fd64609ba0afb0a8366421957936ad14cbd13630" + [[package]] name = "ppv-lite86" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "presence" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "avail-subxt", + "borsh", + "bytes", + "parity-scale-codec", + "primitive-types", + "reqwest", + "serde", + "serde_json", + "sov-rollup-interface", + "sp-core 21.0.0", + "subxt", + "thiserror", + "tokio", + "tracing", + "tracing-subscriber 0.3.17", +] + [[package]] name = "prettyplease" version = "0.1.25" @@ -2294,7 +3607,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "92139198957b410250d43fad93e630d956499a625c527eda65175c8680f83387" dependencies = [ "proc-macro2", - "syn 2.0.26", + "syn 2.0.29", +] + +[[package]] +name = "prettytable-rs" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eea25e07510aa6ab6547308ebe3c036016d162b8da920dbb079e3ba8acf3d95a" +dependencies = [ + "csv", + "encode_unicode", + "is-terminal", + "lazy_static", + "term", + "unicode-width", +] + +[[package]] +name = "primitive-types" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f3486ccba82358b11a77516035647c34ba167dfa53312630de83b12bd4f3d66" +dependencies = [ + "fixed-hash", + "impl-codec", + "impl-serde", + "scale-info", + "uint", ] [[package]] @@ -2316,6 +3656,30 @@ dependencies = [ "toml_edit", ] +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + [[package]] name = "proc-macro2" version = "1.0.66" @@ -2335,7 +3699,7 @@ dependencies = [ "fnv", "lazy_static", "memchr", - "parking_lot", + "parking_lot 0.12.1", "thiserror", ] @@ -2356,7 +3720,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ "bytes", - "heck", + "heck 0.4.1", "itertools", "lazy_static", "log", @@ -2393,6 +3757,15 @@ dependencies = [ "prost", ] +[[package]] +name = "psm" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874" +dependencies = [ + "cc", +] + [[package]] name = "quote" version = "1.0.31" @@ -2537,28 +3910,63 @@ dependencies = [ ] [[package]] -name = "regex" -version = "1.9.1" +name = "ref-cast" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +checksum = "acde58d073e9c79da00f2b5b84eed919c8326832648a5b109b3fce1bb1175280" dependencies = [ - "aho-corasick", - "memchr", - "regex-automata", - "regex-syntax", + "ref-cast-impl", ] [[package]] -name = "regex-automata" -version = "0.3.3" +name = "ref-cast-impl" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +checksum = "7f7473c2cfcf90008193dd0e3e16599455cb601a9fce322b5bb55de799664925" dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", + "proc-macro2", + "quote", + "syn 2.0.29", +] + +[[package]] +name = "regex" +version = "1.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.3.3", + "regex-syntax 0.7.4", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.7.4", ] +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.7.4" @@ -2632,11 +4040,16 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", + "bytemuck", + "once_cell", + "parking_lot 0.12.1", "risc0-circuit-rv32im", "risc0-zkp", "risc0-zkvm", + "risc0-zkvm-platform", "serde", "sov-rollup-interface", + "zk-cycle-utils", ] [[package]] @@ -2765,7 +4178,7 @@ dependencies = [ "crypto-bigint", "dyn_partial_eq", "elf", - "generic-array", + "generic-array 0.14.7", "getrandom 0.2.10", "hex", "lazy-regex", @@ -2825,6 +4238,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + [[package]] name = "rustc_version" version = "0.4.0" @@ -2834,6 +4253,20 @@ dependencies = [ "semver", ] +[[package]] +name = "rustix" +version = "0.36.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c37f1bd5ef1b5422177b7646cba67430579cfe2ace80f284fee876bca52ad941" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.1.4", + "windows-sys 0.45.0", +] + [[package]] name = "rustix" version = "0.37.23" @@ -2845,7 +4278,7 @@ dependencies = [ "io-lifetimes", "libc", "linux-raw-sys 0.3.8", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -2858,7 +4291,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys 0.4.3", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -2881,7 +4314,7 @@ checksum = "79ea77c539259495ce8ca47f53e66ae0330a8819f67e23ac96ca02f50e7b7d36" dependencies = [ "log", "ring", - "rustls-webpki", + "rustls-webpki 0.101.2", "sct", ] @@ -2906,6 +4339,16 @@ dependencies = [ "base64 0.21.2", ] +[[package]] +name = "rustls-webpki" +version = "0.100.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e98ff011474fa39949b7e5c0428f9b4937eda7da7848bbb947786b7be0b27dab" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustls-webpki" version = "0.101.2" @@ -2916,19 +4359,91 @@ dependencies = [ "untrusted", ] +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + [[package]] name = "ryu" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +[[package]] +name = "scale-bits" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8dd7aca73785181cc41f0bbe017263e682b585ca660540ba569133901d013ecf" +dependencies = [ + "parity-scale-codec", + "scale-info", + "serde", +] + +[[package]] +name = "scale-decode" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d823d4be477fc33321f93d08fb6c2698273d044f01362dc27573a750deb7c233" +dependencies = [ + "parity-scale-codec", + "scale-bits", + "scale-info", + "thiserror", +] + +[[package]] +name = "scale-info" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35c0a159d0c45c12b20c5a844feb1fe4bea86e28f17b92a5f0c42193634d3782" +dependencies = [ + "bitvec", + "cfg-if", + "derive_more", + "parity-scale-codec", + "scale-info-derive", + "serde", +] + +[[package]] +name = "scale-info-derive" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "912e55f6d20e0e80d63733872b40e1227c0bce1e1ab81ba67d696339bfd7fd29" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "scale-value" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16a5e7810815bd295da73e4216d1dfbced3c7c7c7054d70fa5f6e4c58123fff4" +dependencies = [ + "either", + "frame-metadata", + "parity-scale-codec", + "scale-bits", + "scale-decode", + "scale-info", + "serde", + "thiserror", + "yap", +] + [[package]] name = "schannel" version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" dependencies = [ - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -2955,6 +4470,35 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "schnellru" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "772575a524feeb803e5b0fcbc6dd9f367e579488197c94c6e4023aad2305774d" +dependencies = [ + "ahash 0.8.3", + "cfg-if", + "hashbrown 0.13.2", +] + +[[package]] +name = "schnorrkel" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "021b403afe70d81eea68f6ea12f6b3c9588e5d536a94c3bf80f15e7faa267862" +dependencies = [ + "arrayref", + "arrayvec 0.5.2", + "curve25519-dalek 2.1.3", + "getrandom 0.1.16", + "merlin", + "rand 0.7.3", + "rand_core 0.5.1", + "sha2 0.8.2", + "subtle", + "zeroize", +] + [[package]] name = "scopeguard" version = "1.2.0" @@ -2971,6 +4515,33 @@ dependencies = [ "untrusted", ] +[[package]] +name = "secp256k1" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b1629c9c557ef9b293568b338dddfc8208c98a18c59d722a9d53f859d9c9b62" +dependencies = [ + "secp256k1-sys", +] + +[[package]] +name = "secp256k1-sys" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83080e2c2fc1006e625be82e5d1eb6a43b7fd9578b617fcc55814daf286bba4b" +dependencies = [ + "cc", +] + +[[package]] +name = "secrecy" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bd1c54ea06cfd2f6b63219704de0b9b4f72dcc2b8fdef820be6cd799780e91e" +dependencies = [ + "zeroize", +] + [[package]] name = "security-framework" version = "2.9.1" @@ -3003,15 +4574,42 @@ dependencies = [ "serde", ] +[[package]] +name = "send_wrapper" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" + [[package]] name = "serde" -version = "1.0.173" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91f70896d6720bc714a4a57d22fc91f1db634680e65c8efe13323f1fa38d53f" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] +[[package]] +name = "serde-hex" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca37e3e4d1b39afd7ff11ee4e947efae85adfddf4841787bfa47c470e96dc26d" +dependencies = [ + "array-init", + "serde", + "smallvec 0.6.14", +] + +[[package]] +name = "serde-value" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" +dependencies = [ + "ordered-float", + "serde", +] + [[package]] name = "serde_bytes" version = "0.11.12" @@ -3023,13 +4621,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.173" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6250dde8342e0232232be9ca3db7aa40aceb5a3e5dd9bddbc00d99a007cde49" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -3062,7 +4660,7 @@ checksum = "1d89a8107374290037607734c0b73a85db7ed80cae314b3c5791f192a496e731" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -3086,6 +4684,18 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_yaml" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" +dependencies = [ + "indexmap 1.9.3", + "ryu", + "serde", + "yaml-rust", +] + [[package]] name = "sha-1" version = "0.9.8" @@ -3096,7 +4706,7 @@ dependencies = [ "cfg-if", "cpufeatures", "digest 0.9.0", - "opaque-debug", + "opaque-debug 0.3.0", ] [[package]] @@ -3110,6 +4720,18 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "sha2" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" +dependencies = [ + "block-buffer 0.7.3", + "digest 0.8.1", + "fake-simd", + "opaque-debug 0.2.3", +] + [[package]] name = "sha2" version = "0.9.9" @@ -3120,7 +4742,7 @@ dependencies = [ "cfg-if", "cpufeatures", "digest 0.9.0", - "opaque-debug", + "opaque-debug 0.3.0", ] [[package]] @@ -3189,6 +4811,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "smallvec" +version = "0.6.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97fcaeba89edba30f044a10c6a3cc39df9c3f17d7cd829dd1446cab35f890e0" +dependencies = [ + "maybe-uninit", +] + [[package]] name = "smallvec" version = "1.11.0" @@ -3227,7 +4858,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap", + "clap 4.3.17", "jsonrpsee 0.18.2", "schemars", "serde", @@ -3243,7 +4874,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap", + "clap 4.3.17", "hex", "jsonrpsee 0.18.2", "schemars", @@ -3255,6 +4886,58 @@ dependencies = [ "thiserror", ] +[[package]] +name = "sov-blob-storage" +version = "0.1.0" +dependencies = [ + "anyhow", + "bincode", + "borsh", + "hex", + "jsonrpsee 0.18.2", + "schemars", + "serde", + "serde_json", + "sov-modules-api", + "sov-modules-macros", + "sov-rollup-interface", + "sov-sequencer-registry", + "sov-state", + "tracing", +] + +[[package]] +name = "sov-chain-state" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "jsonrpsee 0.18.2", + "serde", + "serde_json", + "sov-modules-api", + "sov-modules-macros", + "sov-rollup-interface", + "sov-state", +] + +[[package]] +name = "sov-cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "directories", + "hex", + "jsonrpsee 0.18.2", + "serde", + "serde_json", + "sov-accounts", + "sov-bank", + "sov-modules-api", + "tokio", +] + [[package]] name = "sov-db" version = "0.1.0" @@ -3268,6 +4951,7 @@ dependencies = [ "serde", "sov-rollup-interface", "sov-schema-db", + "tokio", ] [[package]] @@ -3275,42 +4959,68 @@ name = "sov-demo-prover-host" version = "0.1.0" dependencies = [ "anyhow", + "bincode", "borsh", + "celestia", "const-rollup-config", "demo-stf", + "env_logger", "hex", "jsonrpsee 0.16.2", - "jupiter", + "log", + "log4rs", "methods", + "once_cell", + "parking_lot 0.11.2", + "prettytable-rs", + "regex", "risc0-adapter", "risc0-zkvm", "serde", "serde_json", "sha2 0.10.7", + "sov-demo-rollup", "sov-modules-api", "sov-rollup-interface", "sov-state", "sov-stf-runner", + "tempfile", "tokio", "tracing", - "tracing-subscriber", + "tracing-subscriber 0.3.17", + "zk-cycle-macros", ] [[package]] -name = "sov-election" +name = "sov-demo-rollup" version = "0.1.0" dependencies = [ "anyhow", + "async-trait", "borsh", - "clap", + "bytes", + "celestia", + "const-rollup-config", + "demo-stf", + "futures", "hex", + "jmt", "jsonrpsee 0.18.2", - "schemars", + "presence", + "risc0-adapter", "serde", "serde_json", + "sov-cli", + "sov-db", "sov-modules-api", + "sov-modules-stf-template", "sov-rollup-interface", + "sov-sequencer", "sov-state", + "sov-stf-runner", + "tokio", + "tracing", + "tracing-subscriber 0.3.17", ] [[package]] @@ -3327,18 +5037,19 @@ dependencies = [ "anyhow", "bech32", "borsh", - "clap", + "clap 4.3.17", "derive_more", - "ed25519-dalek", + "ed25519-dalek 2.0.0", "hex", "jsonrpsee 0.18.2", - "rand 0.7.3", + "rand 0.8.5", "schemars", "serde", "serde_json", "sha2 0.10.7", "sov-modules-macros", "sov-rollup-interface", + "sov-sequencer", "sov-state", "thiserror", ] @@ -3368,6 +5079,7 @@ dependencies = [ "sov-modules-api", "sov-rollup-interface", "sov-state", + "thiserror", "tracing", ] @@ -3377,12 +5089,14 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "bincode", "borsh", "bytes", "digest 0.10.7", "hex", "serde", "sha2 0.10.7", + "tokio", ] [[package]] @@ -3406,7 +5120,6 @@ dependencies = [ "hex", "jsonrpsee 0.18.2", "serde", - "sov-modules-api", "sov-rollup-interface", "tracing", ] @@ -3417,7 +5130,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap", + "clap 4.3.17", "jsonrpsee 0.18.2", "schemars", "serde", @@ -3433,6 +5146,7 @@ name = "sov-state" version = "0.1.0" dependencies = [ "anyhow", + "bcs", "borsh", "hex", "jmt", @@ -3450,10 +5164,10 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", + "celestia", "futures", "hex", "jsonrpsee 0.18.2", - "jupiter", "serde", "serde_json", "sov-db", @@ -3463,7 +5177,7 @@ dependencies = [ "tokio", "toml 0.7.6", "tracing", - "tracing-subscriber", + "tracing-subscriber 0.3.17", ] [[package]] @@ -3472,7 +5186,7 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", - "clap", + "clap 4.3.17", "jsonrpsee 0.18.2", "schemars", "serde", @@ -3484,8 +5198,499 @@ dependencies = [ ] [[package]] -name = "spin" -version = "0.5.2" +name = "sp-application-crypto" +version = "17.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f08604ba4bd856311946722958711a08bded5c929e1227f7a697c58deb09468" +dependencies = [ + "parity-scale-codec", + "scale-info", + "serde", + "sp-core 16.0.0", + "sp-io", + "sp-std 6.0.0", +] + +[[package]] +name = "sp-arithmetic" +version = "12.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7796939f2e3b68a3b9410ea17a2063b78038cd366f57fa772dd3be0798bd3412" +dependencies = [ + "integer-sqrt", + "num-traits", + "parity-scale-codec", + "scale-info", + "serde", + "sp-std 6.0.0", + "static_assertions", +] + +[[package]] +name = "sp-core" +version = "16.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c96dc3debbe5c22ebf18f99e6a53199efe748e6e584a1902adb88cbad66ae7c" +dependencies = [ + "array-bytes", + "base58", + "bitflags 1.3.2", + "blake2", + "bounded-collections", + "dyn-clonable", + "ed25519-zebra", + "futures", + "hash-db 0.15.2", + "hash256-std-hasher", + "impl-serde", + "lazy_static", + "libsecp256k1", + "log", + "merlin", + "parity-scale-codec", + "parking_lot 0.12.1", + "primitive-types", + "rand 0.8.5", + "regex", + "scale-info", + "schnorrkel", + "secp256k1", + "secrecy", + "serde", + "sp-core-hashing 6.0.0", + "sp-debug-derive 6.0.0", + "sp-externalities 0.17.0", + "sp-runtime-interface 13.0.0", + "sp-std 6.0.0", + "sp-storage 11.0.0", + "ss58-registry", + "substrate-bip39", + "thiserror", + "tiny-bip39", + "zeroize", +] + +[[package]] +name = "sp-core" +version = "21.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f18d9e2f67d8661f9729f35347069ac29d92758b59135176799db966947a7336" +dependencies = [ + "array-bytes", + "bitflags 1.3.2", + "blake2", + "bounded-collections", + "bs58", + "dyn-clonable", + "ed25519-zebra", + "futures", + "hash-db 0.16.0", + "hash256-std-hasher", + "impl-serde", + "lazy_static", + "libsecp256k1", + "log", + "merlin", + "parity-scale-codec", + "parking_lot 0.12.1", + "paste", + "primitive-types", + "rand 0.8.5", + "regex", + "scale-info", + "schnorrkel", + "secp256k1", + "secrecy", + "serde", + "sp-core-hashing 9.0.0", + "sp-debug-derive 8.0.0", + "sp-externalities 0.19.0", + "sp-runtime-interface 17.0.0", + "sp-std 8.0.0", + "sp-storage 13.0.0", + "ss58-registry", + "substrate-bip39", + "thiserror", + "tiny-bip39", + "zeroize", +] + +[[package]] +name = "sp-core-hashing" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbc2d1947252b7a4e403b0a260f596920443742791765ec111daa2bbf98eff25" +dependencies = [ + "blake2", + "byteorder", + "digest 0.10.7", + "sha2 0.10.7", + "sha3", + "sp-std 6.0.0", + "twox-hash", +] + +[[package]] +name = "sp-core-hashing" +version = "9.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ee599a8399448e65197f9a6cee338ad192e9023e35e31f22382964c3c174c68" +dependencies = [ + "blake2b_simd", + "byteorder", + "digest 0.10.7", + "sha2 0.10.7", + "sha3", + "sp-std 8.0.0", + "twox-hash", +] + +[[package]] +name = "sp-debug-derive" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66fb9dc63d54de7d7bed62a505b6e0bd66c122525ea1abb348f6564717c3df2d" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "sp-debug-derive" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f531814d2f16995144c74428830ccf7d94ff4a7749632b83ad8199b181140c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.29", +] + +[[package]] +name = "sp-externalities" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57052935c9c9b070ea6b339ef0da3bf241b7e065fc37f9c551669ee83ecfc3c1" +dependencies = [ + "environmental", + "parity-scale-codec", + "sp-std 6.0.0", + "sp-storage 11.0.0", +] + +[[package]] +name = "sp-externalities" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0f71c671e01a8ca60da925d43a1b351b69626e268b8837f8371e320cf1dd100" +dependencies = [ + "environmental", + "parity-scale-codec", + "sp-std 8.0.0", + "sp-storage 13.0.0", +] + +[[package]] +name = "sp-io" +version = "17.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578959f9a7e44fd2dd96e8b8bc893cea04fcd7c00a4ffbb0b91c5013899dd02b" +dependencies = [ + "bytes", + "ed25519 1.5.3", + "ed25519-dalek 1.0.1", + "futures", + "libsecp256k1", + "log", + "parity-scale-codec", + "secp256k1", + "sp-core 16.0.0", + "sp-externalities 0.17.0", + "sp-keystore", + "sp-runtime-interface 13.0.0", + "sp-state-machine", + "sp-std 6.0.0", + "sp-tracing 8.0.0", + "sp-trie", + "tracing", + "tracing-core", +] + +[[package]] +name = "sp-keystore" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "480dbd54b281c638209fbcfce69902b82a0a1af0e22219d46825eadced3136b6" +dependencies = [ + "async-trait", + "futures", + "merlin", + "parity-scale-codec", + "parking_lot 0.12.1", + "schnorrkel", + "sp-core 16.0.0", + "sp-externalities 0.17.0", + "thiserror", +] + +[[package]] +name = "sp-panic-handler" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4abed79c3d5b3622f65ab065676addd9923b9b122cd257df23e2757ce487c6d2" +dependencies = [ + "backtrace", + "lazy_static", + "regex", +] + +[[package]] +name = "sp-runtime" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8ab2fd44668d3e8674e2253a43852857a47d49be7db737e98bf157e4bcebefd" +dependencies = [ + "either", + "hash256-std-hasher", + "impl-trait-for-tuples", + "log", + "parity-scale-codec", + "paste", + "rand 0.8.5", + "scale-info", + "serde", + "sp-application-crypto", + "sp-arithmetic", + "sp-core 16.0.0", + "sp-io", + "sp-std 6.0.0", + "sp-weights", +] + +[[package]] +name = "sp-runtime-interface" +version = "13.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb7707246cee4967a8cc71e3ef0e82f562e8b1020606447a6a12b99c7c1b443" +dependencies = [ + "bytes", + "impl-trait-for-tuples", + "parity-scale-codec", + "primitive-types", + "sp-externalities 0.17.0", + "sp-runtime-interface-proc-macro 9.0.0", + "sp-std 6.0.0", + "sp-storage 11.0.0", + "sp-tracing 8.0.0", + "sp-wasm-interface 10.0.0", + "static_assertions", +] + +[[package]] +name = "sp-runtime-interface" +version = "17.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e676128182f90015e916f806cba635c8141e341e7abbc45d25525472e1bbce8" +dependencies = [ + "bytes", + "impl-trait-for-tuples", + "parity-scale-codec", + "primitive-types", + "sp-externalities 0.19.0", + "sp-runtime-interface-proc-macro 11.0.0", + "sp-std 8.0.0", + "sp-storage 13.0.0", + "sp-tracing 10.0.0", + "sp-wasm-interface 14.0.0", + "static_assertions", +] + +[[package]] +name = "sp-runtime-interface-proc-macro" +version = "9.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2773c90e5765847c5e8b4a24b553d38a9ca52ded47c142cfcfb7948f42827af9" +dependencies = [ + "Inflector", + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "sp-runtime-interface-proc-macro" +version = "11.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5d5bd5566fe5633ec48dfa35ab152fd29f8a577c21971e1c6db9f28afb9bbb9" +dependencies = [ + "Inflector", + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 2.0.29", +] + +[[package]] +name = "sp-state-machine" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c957b8b4c252507c12674948db427c5e34fd1760ce256922f1ec5f89f781a4f" +dependencies = [ + "hash-db 0.15.2", + "log", + "parity-scale-codec", + "parking_lot 0.12.1", + "rand 0.8.5", + "smallvec 1.11.0", + "sp-core 16.0.0", + "sp-externalities 0.17.0", + "sp-panic-handler", + "sp-std 6.0.0", + "sp-trie", + "thiserror", + "tracing", +] + +[[package]] +name = "sp-std" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af0ee286f98455272f64ac5bb1384ff21ac029fbb669afbaf48477faff12760e" + +[[package]] +name = "sp-std" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53458e3c57df53698b3401ec0934bea8e8cfce034816873c0b0abbd83d7bac0d" + +[[package]] +name = "sp-storage" +version = "11.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c20cb0c562d1a159ecb2c7ca786828c81e432c535474967d2df3a484977cea4" +dependencies = [ + "impl-serde", + "parity-scale-codec", + "ref-cast", + "serde", + "sp-debug-derive 6.0.0", + "sp-std 6.0.0", +] + +[[package]] +name = "sp-storage" +version = "13.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94294be83f11d4958cfea89ed5798f0b6605f5defc3a996948848458abbcc18e" +dependencies = [ + "impl-serde", + "parity-scale-codec", + "ref-cast", + "serde", + "sp-debug-derive 8.0.0", + "sp-std 8.0.0", +] + +[[package]] +name = "sp-tracing" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e46bd547da89a9cda69b4ce4c91a5b7e1f86915190d83cd407b715d0c6bac042" +dependencies = [ + "parity-scale-codec", + "sp-std 6.0.0", + "tracing", + "tracing-core", + "tracing-subscriber 0.2.25", +] + +[[package]] +name = "sp-tracing" +version = "10.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357f7591980dd58305956d32f8f6646d0a8ea9ea0e7e868e46f53b68ddf00cec" +dependencies = [ + "parity-scale-codec", + "sp-std 8.0.0", + "tracing", + "tracing-core", + "tracing-subscriber 0.2.25", +] + +[[package]] +name = "sp-trie" +version = "16.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8efbe5b6d29a18fea7c2f52e0098135f2f864b31d335d5105b40a349866ba874" +dependencies = [ + "ahash 0.8.3", + "hash-db 0.15.2", + "hashbrown 0.12.3", + "lazy_static", + "memory-db", + "nohash-hasher", + "parity-scale-codec", + "parking_lot 0.12.1", + "scale-info", + "schnellru", + "sp-core 16.0.0", + "sp-std 6.0.0", + "thiserror", + "tracing", + "trie-db", + "trie-root", +] + +[[package]] +name = "sp-wasm-interface" +version = "10.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbc05650b6338808892a7b04f0c56bb1f7f928bfa9ac58e0af2c1e5bef33229" +dependencies = [ + "anyhow", + "impl-trait-for-tuples", + "log", + "parity-scale-codec", + "sp-std 6.0.0", + "wasmi", + "wasmtime 5.0.1", +] + +[[package]] +name = "sp-wasm-interface" +version = "14.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a19c122609ca5d8246be6386888596320d03c7bc880959eaa2c36bcd5acd6846" +dependencies = [ + "anyhow", + "impl-trait-for-tuples", + "log", + "parity-scale-codec", + "sp-std 8.0.0", + "wasmtime 8.0.1", +] + +[[package]] +name = "sp-weights" +version = "14.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ebab7696f915aa548494aef3ca8d15217baf10458fe6edb87e60587a47de358" +dependencies = [ + "parity-scale-codec", + "scale-info", + "serde", + "smallvec 1.11.0", + "sp-arithmetic", + "sp-core 16.0.0", + "sp-debug-derive 6.0.0", + "sp-std 6.0.0", +] + +[[package]] +name = "spin" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" @@ -3499,17 +5704,87 @@ dependencies = [ "der", ] +[[package]] +name = "ss58-registry" +version = "1.43.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6915280e2d0db8911e5032a5c275571af6bdded2916abd691a659be25d3439" +dependencies = [ + "Inflector", + "num-format", + "proc-macro2", + "quote", + "serde", + "serde_json", + "unicode-xid", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + [[package]] name = "strsim" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +[[package]] +name = "structopt" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" +dependencies = [ + "clap 2.34.0", + "lazy_static", + "structopt-derive", +] + +[[package]] +name = "structopt-derive" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +dependencies = [ + "heck 0.3.3", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "substrate-bip39" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49eee6965196b32f882dd2ee85a92b1dbead41b04e53907f269de3b0dc04733c" +dependencies = [ + "hmac 0.11.0", + "pbkdf2 0.8.0", + "schnorrkel", + "sha2 0.9.9", + "zeroize", +] + [[package]] name = "subtle" -version = "2.5.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "subtle-encoding" @@ -3526,6 +5801,84 @@ version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "734676eb262c623cec13c3155096e08d1f8f29adce39ba17948b18dad1e54142" +[[package]] +name = "subxt" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54639dba6a113584083968b6a8f457dedae612abe1bd214762101ca29f12e332" +dependencies = [ + "base58", + "blake2", + "derivative", + "frame-metadata", + "futures", + "getrandom 0.2.10", + "hex", + "impl-serde", + "jsonrpsee 0.16.2", + "parity-scale-codec", + "parking_lot 0.12.1", + "primitive-types", + "scale-bits", + "scale-decode", + "scale-info", + "scale-value", + "serde", + "serde_json", + "sp-core 16.0.0", + "sp-core-hashing 6.0.0", + "sp-runtime", + "subxt-macro", + "subxt-metadata", + "thiserror", + "tracing", +] + +[[package]] +name = "subxt-codegen" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e86cb719003f1cedf2710a6e55ca4c37aba4c989bbd3b81dd1c52af9e4827e" +dependencies = [ + "darling", + "frame-metadata", + "heck 0.4.1", + "hex", + "jsonrpsee 0.16.2", + "parity-scale-codec", + "proc-macro-error", + "proc-macro2", + "quote", + "scale-info", + "subxt-metadata", + "syn 1.0.109", + "tokio", +] + +[[package]] +name = "subxt-macro" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74c08de402a78c4c06c3ee3702c80e519efdcb65911348e018b6998d04404916" +dependencies = [ + "darling", + "proc-macro-error", + "subxt-codegen", + "syn 1.0.109", +] + +[[package]] +name = "subxt-metadata" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2593ab5f53435e6352675af4f9851342607f37785d84c7a3fb3139550d3c35f0" +dependencies = [ + "frame-metadata", + "parity-scale-codec", + "scale-info", + "sp-core-hashing 6.0.0", +] + [[package]] name = "syn" version = "1.0.109" @@ -3539,9 +5892,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.26" +version = "2.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" dependencies = [ "proc-macro2", "quote", @@ -3554,6 +5907,12 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "target-lexicon" +version = "0.12.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d0e916b1148c8e263850e1ebcbd046f333e0683c724876bb0da63ea4373dc8a" + [[package]] name = "tempfile" version = "3.6.0" @@ -3565,7 +5924,7 @@ dependencies = [ "fastrand", "redox_syscall 0.3.5", "rustix 0.37.23", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -3593,7 +5952,7 @@ dependencies = [ "subtle", "subtle-encoding", "tendermint-proto", - "time", + "time 0.3.23", "zeroize", ] @@ -3612,7 +5971,36 @@ dependencies = [ "serde", "serde_bytes", "subtle-encoding", - "time", + "time 0.3.23", +] + +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + +[[package]] +name = "termcolor" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", ] [[package]] @@ -3632,7 +6020,18 @@ checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", +] + +[[package]] +name = "thread-id" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ee93aa2b8331c0fec9091548843f2c90019571814057da3b783f9de09349d73" +dependencies = [ + "libc", + "redox_syscall 0.2.16", + "winapi", ] [[package]] @@ -3645,6 +6044,17 @@ dependencies = [ "once_cell", ] +[[package]] +name = "time" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + [[package]] name = "time" version = "0.3.23" @@ -3671,6 +6081,25 @@ dependencies = [ "time-core", ] +[[package]] +name = "tiny-bip39" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62cc94d358b5a1e84a5cb9109f559aa3c4d634d2b1b4de3d0fa4adc7c78e2861" +dependencies = [ + "anyhow", + "hmac 0.12.1", + "once_cell", + "pbkdf2 0.11.0", + "rand 0.8.5", + "rustc-hash", + "sha2 0.10.7", + "thiserror", + "unicode-normalization", + "wasm-bindgen", + "zeroize", +] + [[package]] name = "tinyvec" version = "1.6.0" @@ -3698,12 +6127,12 @@ dependencies = [ "libc", "mio", "num_cpus", - "parking_lot", + "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -3714,7 +6143,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -3865,7 +6294,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -3889,26 +6318,105 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" +dependencies = [ + "ansi_term", + "chrono", + "lazy_static", + "matchers 0.0.1", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec 1.11.0", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + [[package]] name = "tracing-subscriber" version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" dependencies = [ + "matchers 0.1.0", "nu-ansi-term", + "once_cell", + "regex", "sharded-slab", - "smallvec", + "smallvec 1.11.0", "thread_local", + "tracing", "tracing-core", "tracing-log", ] +[[package]] +name = "trie-db" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "004e1e8f92535694b4cb1444dc5a8073ecf0815e3357f729638b9f8fc4062908" +dependencies = [ + "hash-db 0.15.2", + "hashbrown 0.12.3", + "log", + "rustc-hex", + "smallvec 1.11.0", +] + +[[package]] +name = "trie-root" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a36c5ca3911ed3c9a5416ee6c679042064b93fc637ded67e25f92e68d783891" +dependencies = [ + "hash-db 0.15.2", +] + [[package]] name = "try-lock" version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +[[package]] +name = "twox-hash" +version = "1.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" +dependencies = [ + "cfg-if", + "digest 0.10.7", + "rand 0.8.5", + "static_assertions", +] + +[[package]] +name = "typemap-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a68c24b707f02dd18f1e4ccceb9d49f2058c2fb86384ef9972592904d7a28867" +dependencies = [ + "unsafe-any-ors", +] + [[package]] name = "typenum" version = "1.16.0" @@ -3936,7 +6444,19 @@ checksum = "d836cd032f71d90cbaa3c1f85ce84266af23659766d8c0b1c4c6524a0fb4c36f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", +] + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", ] [[package]] @@ -3949,144 +6469,500 @@ checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" name = "unicode-ident" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + +[[package]] +name = "unsafe-any-ors" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0a303d30665362d9680d7d91d78b23f5f899504d4f08b3c4cf08d055d87c0ad" +dependencies = [ + "destructure_traitobject", +] + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "url" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn 2.0.29", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.29", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.87" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" + +[[package]] +name = "wasmi" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06c326c93fbf86419608361a2c925a31754cf109da1b8b55737070b4d6669422" +dependencies = [ + "parity-wasm", + "wasmi-validation", + "wasmi_core", +] + +[[package]] +name = "wasmi-validation" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ff416ad1ff0c42e5a926ed5d5fab74c0f098749aa0ad8b2a34b982ce0e867b" +dependencies = [ + "parity-wasm", +] + +[[package]] +name = "wasmi_core" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d20cb3c59b788653d99541c646c561c9dd26506f25c0cebfe810659c54c6d7" +dependencies = [ + "downcast-rs", + "libm", + "memory_units", + "num-rational", + "num-traits", +] + +[[package]] +name = "wasmparser" +version = "0.96.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adde01ade41ab9a5d10ec8ed0bb954238cf8625b5cd5a13093d6de2ad9c2be1a" +dependencies = [ + "indexmap 1.9.3", + "url", +] + +[[package]] +name = "wasmparser" +version = "0.102.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48134de3d7598219ab9eaf6b91b15d8e50d31da76b8519fe4ecfcec2cf35104b" +dependencies = [ + "indexmap 1.9.3", + "url", +] [[package]] -name = "unicode-normalization" -version = "0.1.22" +name = "wasmtime" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +checksum = "49ffcc607adc9da024e87ca814592d4bc67f5c5b58e488f5608d5734a1ebc23e" dependencies = [ - "tinyvec", + "anyhow", + "bincode", + "cfg-if", + "indexmap 1.9.3", + "libc", + "log", + "object 0.29.0", + "once_cell", + "paste", + "psm", + "serde", + "target-lexicon", + "wasmparser 0.96.0", + "wasmtime-environ 5.0.1", + "wasmtime-jit 5.0.1", + "wasmtime-runtime 5.0.1", + "windows-sys 0.42.0", ] [[package]] -name = "untrusted" -version = "0.7.1" +name = "wasmtime" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +checksum = "f907fdead3153cb9bfb7a93bbd5b62629472dc06dee83605358c64c52ed3dda9" +dependencies = [ + "anyhow", + "bincode", + "cfg-if", + "indexmap 1.9.3", + "libc", + "log", + "object 0.30.4", + "once_cell", + "paste", + "psm", + "serde", + "target-lexicon", + "wasmparser 0.102.0", + "wasmtime-environ 8.0.1", + "wasmtime-jit 8.0.1", + "wasmtime-runtime 8.0.1", + "windows-sys 0.45.0", +] [[package]] -name = "url" -version = "2.4.0" +name = "wasmtime-asm-macros" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "12cb5dc4d79cd7b2453c395f64e9013d2ad90bd083be556d5565cb224ebe8d57" dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", + "cfg-if", ] [[package]] -name = "utf8parse" -version = "0.2.1" +name = "wasmtime-asm-macros" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "d3b9daa7c14cd4fa3edbf69de994408d5f4b7b0959ac13fa69d465f6597f810d" +dependencies = [ + "cfg-if", +] [[package]] -name = "valuable" -version = "0.1.0" +name = "wasmtime-environ" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" +checksum = "9350c919553cddf14f78f9452119c8004d7ef6bfebb79a41a21819ed0c5604d8" +dependencies = [ + "anyhow", + "cranelift-entity 0.92.1", + "gimli 0.26.2", + "indexmap 1.9.3", + "log", + "object 0.29.0", + "serde", + "target-lexicon", + "thiserror", + "wasmparser 0.96.0", + "wasmtime-types 5.0.1", +] [[package]] -name = "vcpkg" -version = "0.2.15" +name = "wasmtime-environ" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +checksum = "a990198cee4197423045235bf89d3359e69bd2ea031005f4c2d901125955c949" +dependencies = [ + "anyhow", + "cranelift-entity 0.95.1", + "gimli 0.27.3", + "indexmap 1.9.3", + "log", + "object 0.30.4", + "serde", + "target-lexicon", + "thiserror", + "wasmparser 0.102.0", + "wasmtime-types 8.0.1", +] [[package]] -name = "version_check" -version = "0.9.4" +name = "wasmtime-jit" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "90ba5779ea786386432b94c9fc9ad5597346c319e8239db0d98d5be5cc109a7e" +dependencies = [ + "addr2line 0.17.0", + "anyhow", + "bincode", + "cfg-if", + "cpp_demangle", + "gimli 0.26.2", + "log", + "object 0.29.0", + "rustc-demangle", + "serde", + "target-lexicon", + "wasmtime-environ 5.0.1", + "wasmtime-jit-icache-coherence 5.0.1", + "wasmtime-runtime 5.0.1", + "windows-sys 0.42.0", +] [[package]] -name = "want" -version = "0.3.1" +name = "wasmtime-jit" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +checksum = "0de48df552cfca1c9b750002d3e07b45772dd033b0b206d5c0968496abf31244" dependencies = [ - "try-lock", + "addr2line 0.19.0", + "anyhow", + "bincode", + "cfg-if", + "cpp_demangle", + "gimli 0.27.3", + "log", + "object 0.30.4", + "rustc-demangle", + "serde", + "target-lexicon", + "wasmtime-environ 8.0.1", + "wasmtime-jit-icache-coherence 8.0.1", + "wasmtime-runtime 8.0.1", + "windows-sys 0.45.0", ] [[package]] -name = "wasi" -version = "0.9.0+wasi-snapshot-preview1" +name = "wasmtime-jit-debug" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" +checksum = "f9841a44c82c74101c10ad4f215392761a2523b3c6c838597962bdb6de75fdb3" +dependencies = [ + "once_cell", +] [[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +name = "wasmtime-jit-debug" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "6e0554b84c15a27d76281d06838aed94e13a77d7bf604bbbaf548aa20eb93846" +dependencies = [ + "once_cell", +] [[package]] -name = "wasm-bindgen" -version = "0.2.87" +name = "wasmtime-jit-icache-coherence" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "fd4356c2493002da3b111d470c2ecea65a3017009afce8adc46eaa5758739891" dependencies = [ "cfg-if", - "wasm-bindgen-macro", + "libc", + "windows-sys 0.42.0", ] [[package]] -name = "wasm-bindgen-backend" -version = "0.2.87" +name = "wasmtime-jit-icache-coherence" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "aecae978b13f7f67efb23bd827373ace4578f2137ec110bbf6a4a7cde4121bbd" dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2", - "quote", - "syn 2.0.26", - "wasm-bindgen-shared", + "cfg-if", + "libc", + "windows-sys 0.45.0", ] [[package]] -name = "wasm-bindgen-futures" -version = "0.4.37" +name = "wasmtime-runtime" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +checksum = "dd26efea7a790fcf430e663ba2519f0ab6eb8980adf8b0c58c62b727da77c2ec" dependencies = [ + "anyhow", + "cc", "cfg-if", - "js-sys", - "wasm-bindgen", - "web-sys", + "indexmap 1.9.3", + "libc", + "log", + "mach", + "memfd", + "memoffset 0.6.5", + "paste", + "rand 0.8.5", + "rustix 0.36.15", + "wasmtime-asm-macros 5.0.1", + "wasmtime-environ 5.0.1", + "wasmtime-jit-debug 5.0.1", + "windows-sys 0.42.0", ] [[package]] -name = "wasm-bindgen-macro" -version = "0.2.87" +name = "wasmtime-runtime" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "658cf6f325232b6760e202e5255d823da5e348fdea827eff0a2a22319000b441" dependencies = [ - "quote", - "wasm-bindgen-macro-support", + "anyhow", + "cc", + "cfg-if", + "indexmap 1.9.3", + "libc", + "log", + "mach", + "memfd", + "memoffset 0.8.0", + "paste", + "rand 0.8.5", + "rustix 0.36.15", + "wasmtime-asm-macros 8.0.1", + "wasmtime-environ 8.0.1", + "wasmtime-jit-debug 8.0.1", + "windows-sys 0.45.0", ] [[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.87" +name = "wasmtime-types" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "86e1e4f66a2b9a114f9def450ab9971828c968db6ea6fccd613724b771fa4913" dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.26", - "wasm-bindgen-backend", - "wasm-bindgen-shared", + "cranelift-entity 0.92.1", + "serde", + "thiserror", + "wasmparser 0.96.0", ] [[package]] -name = "wasm-bindgen-shared" -version = "0.2.87" +name = "wasmtime-types" +version = "8.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "a4f6fffd2a1011887d57f07654dd112791e872e3ff4a2e626aee8059ee17f06f" +dependencies = [ + "cranelift-entity 0.95.1", + "serde", + "thiserror", + "wasmparser 0.102.0", +] [[package]] name = "web-sys" @@ -4117,6 +6993,15 @@ dependencies = [ "webpki", ] +[[package]] +name = "webpki-roots" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b03058f88386e5ff5310d9111d53f48b17d732b401aeb83a8d5190f2ac459338" +dependencies = [ + "rustls-webpki 0.100.2", +] + [[package]] name = "which" version = "4.4.0" @@ -4144,19 +7029,76 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" +[[package]] +name = "winapi-util" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +dependencies = [ + "winapi", +] + [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +dependencies = [ + "windows-targets 0.48.1", +] + +[[package]] +name = "windows-sys" +version = "0.42.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets", + "windows-targets 0.48.1", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", ] [[package]] @@ -4165,51 +7107,93 @@ version = "0.48.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + [[package]] name = "windows_aarch64_msvc" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + [[package]] name = "windows_i686_gnu" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + [[package]] name = "windows_i686_msvc" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + [[package]] name = "windows_x86_64_gnu" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + [[package]] name = "windows_x86_64_msvc" version = "0.48.0" @@ -4243,6 +7227,21 @@ dependencies = [ "tap", ] +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "yap" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fc77f52dc9e9b10d55d3f4462c3b7fc393c4f17975d641542833ab2d3bc26ef" + [[package]] name = "zeroize" version = "1.6.0" @@ -4260,7 +7259,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.29", ] [[package]] @@ -4272,17 +7271,36 @@ dependencies = [ "aes", "byteorder", "bzip2", - "constant_time_eq", + "constant_time_eq 0.1.5", "crc32fast", "crossbeam-utils", "flate2", - "hmac", - "pbkdf2", + "hmac 0.12.1", + "pbkdf2 0.11.0", "sha1", - "time", + "time 0.3.23", "zstd", ] +[[package]] +name = "zk-cycle-macros" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "zk-cycle-utils" +version = "0.1.0" +dependencies = [ + "risc0-zkvm", + "risc0-zkvm-platform", +] + [[package]] name = "zstd" version = "0.11.2+zstd.1.5.2" diff --git a/examples/demo-prover/Cargo.toml b/examples/demo-prover/Cargo.toml index bd4544d1c..a9998ca01 100644 --- a/examples/demo-prover/Cargo.toml +++ b/examples/demo-prover/Cargo.toml @@ -8,6 +8,7 @@ resolver = "2" [workspace.dependencies] anyhow = "1.0.68" borsh = { version = "0.10.3", features = ["rc", "bytes"] } +bincode = "1.3.3" hex = "0.4.3" jsonrpsee = "0.16.2" serde = { version = "1.0.137", features = ["derive", "rc"] } @@ -16,6 +17,7 @@ sha2 = "0.10.6" risc0-zkvm = { version = "0.16" } risc0-build = { version = "0.16" } tokio = { version = "1", features = ["full"] } +tempfile = "3.6.0" # Always optimize; building and running the guest takes much longer without optimization. [profile.dev] diff --git a/examples/demo-prover/Makefile b/examples/demo-prover/Makefile new file mode 100644 index 000000000..3c6fb360f --- /dev/null +++ b/examples/demo-prover/Makefile @@ -0,0 +1,19 @@ +# Directories and paths +TRACER_DIR = ../../utils/zk-cycle-utils/tracer +ELF_PATH_TRACER = ../../../examples/demo-prover/target/riscv-guest/riscv32im-risc0-zkvm-elf/release/rollup +TRACE_PATH_TRACER = ../../../examples/demo-prover/host/rollup.trace + +# This allows you to pass additional flags when you call `make run-tracer`. +# For example: `make run-tracer ADDITIONAL_FLAGS="--some-flag"` +ADDITIONAL_FLAGS ?= + +.PHONY: generate-files run-tracer + +all: generate-files run-tracer + +generate-files: + ROLLUP_TRACE=rollup.trace cargo bench --bench prover_bench --features bench + +run-tracer: + @cd $(TRACER_DIR) && \ + cargo run --release -- --no-raw-counts --rollup-elf $(ELF_PATH_TRACER) --rollup-trace $(TRACE_PATH_TRACER) $(ADDITIONAL_FLAGS) diff --git a/examples/demo-prover/README.md b/examples/demo-prover/README.md index 85bcb0854..213482bb6 100644 --- a/examples/demo-prover/README.md +++ b/examples/demo-prover/README.md @@ -16,7 +16,7 @@ harder to follow at first glance, so we recommend diving into the `demo-rollup` ## Prerequisites -Running this example require at least 96GB of RAM for x86 CPU architecture. +Running this example requires at least 96GB of RAM for x86 CPU architecture. ## Getting Started diff --git a/examples/demo-prover/host/Cargo.toml b/examples/demo-prover/host/Cargo.toml index 756610af4..79c821fc6 100644 --- a/examples/demo-prover/host/Cargo.toml +++ b/examples/demo-prover/host/Cargo.toml @@ -8,6 +8,7 @@ resolver = "2" [dependencies] anyhow = { workspace = true } borsh = { workspace = true } +bincode = { workspace = true } hex = { workspace = true } jsonrpsee = { workspace = true, features = ["http-client", "server"] } risc0-zkvm = { workspace = true } @@ -18,13 +19,34 @@ tokio = { workspace = true } tracing = "0.1.37" tracing-subscriber = "0.3.16" -jupiter = { path = "../../../adapters/celestia", features = ["native"] } -demo-stf = { path = "../../demo-stf" } +celestia = { path = "../../../adapters/celestia", features = ["native", "bench"] } +demo-stf = { path = "../../demo-stf", features = ["native"] } sov-rollup-interface = { path = "../../../rollup-interface" } -risc0-adapter = { path = "../../../adapters/risc0" } +risc0-adapter = { path = "../../../adapters/risc0", features = ["native"] } const-rollup-config = { path = "../../const-rollup-config" } sov-modules-api = { path = "../../../module-system/sov-modules-api", features = ["native"] } sov-state = { path = "../../../module-system/sov-state", features = ["native"] } +zk-cycle-macros = { path = "../../../utils/zk-cycle-macros", optional = true } sov-stf-runner = { path = "../../../full-node/sov-stf-runner" } methods = { path = "../methods" } + + +[dev-dependencies] +sov-demo-rollup = { path = "../../demo-rollup" } +tempfile = { workspace = true } +once_cell = "1.7.2" +parking_lot = "0.11.1" +prettytable-rs = "^0.10" +env_logger = "0.10.0" +log = "0.4" +log4rs = "1.0" +regex = "1.5" + +[[bench]] +name = "prover_bench" +harness = false + +[features] +bench = ["risc0-adapter/bench", "zk-cycle-macros/bench", "methods/bench"] + diff --git a/examples/demo-prover/host/benches/README.md b/examples/demo-prover/host/benches/README.md new file mode 100644 index 000000000..617438032 --- /dev/null +++ b/examples/demo-prover/host/benches/README.md @@ -0,0 +1,174 @@ +# Prover Benchmarks +* For benchmarking the prover, we measure the number of risc0 vm cycles for each of the major functions. +* The reason for using the cycles is the assumption that proving works off a cycles/second (KHz, MHz) based on the hardware used + +## Running the bench +* From sovereign-sdk +``` +$ cd examples/demo-prover/host/benches +$ cargo bench --features bench --bench prover_bench +``` + +## Methodology +* We have `cycle_tracker` macro defined which can be used to annotate a function in zk that we want to measure the cycles for +* The `cycle_tracker` macro is defined at `sovereign-sdk/zk-cycle-util` +```rust + #[cfg_attr(all(target_os = "zkvm", feature="bench"), cycle_tracker)] + fn begin_slot(&mut self, witness: Self::Witness) { + self.checkpoint = Some(StateCheckpoint::with_witness( + self.current_storage.clone(), + witness, + )); + } +``` +* The method we use to track metrics is by registering the `io_callback` syscall when creating the risc0 host. +``` +pub fn get_syscall_name_handler() -> (SyscallName, fn(&[u8]) -> Vec) { + let cycle_string = "cycle_metrics\0"; + let bytes = cycle_string.as_bytes(); + let metrics_syscall_name = unsafe { + SyscallName::from_bytes_with_nul(bytes.as_ptr()) + }; + + let metrics_callback = |input: &[u8]| -> Vec { + { + let met_tuple = deserialize_custom(input); + add_value(met_tuple.0, met_tuple.1); + } + vec![] + }; + + (metrics_syscall_name, metrics_callback) + +} + +#[cfg(feature = "bench")] +{ + let (metrics_syscall_name, metrics_callback) = get_syscall_name_handler(); + default_env.io_callback(metrics_syscall_name, metrics_callback); +} +``` +* The above allows us to use `risc0_zkvm::guest::env::send_recv_slice` which lets the guest pass a slice of raw bytes to host and get back a vector of bytes +* We use it to pass cycle metrics to the host +* Cycles are tracked by the macro which gets a cycle count at the beginning and end of the function +```rust +let before = risc0_zkvm::guest::env::get_cycle_count(); +let result = (|| #block)(); +let after = risc0_zkvm::guest::env::get_cycle_count(); +``` +* We feature gate the application of the macro `cycle_tracker` with both the target_os set to `zkvm` and the feature flag `bench` +* The reason for using both is that we need conditional compilation to work in all cases +* For the purpose of this profiling we run the prover without generating the proof + +## Input set +* Unlike demo-prover it's harder to generate fake data since all the proofs and checks need to succeed. +* This means the DA samples, hashes, signatures etc need to succeed +* To make this easier we use a static input set consisting of 3 blocks + * we avoid using empty blocks because they skew average metrics + * we have 3 blocks + * block 1 -> 1 blob containing 1 create token transaction + * block 2 -> 1 blob containing 1 transfer transaction + * block 3 -> 1 blob containing 2 transfer transactions +* This dataset is stored at `demo-prover/benches/blocks.hex` +* The dataset can be substituted with another valid dataset as well from Celestia (TBD: automate parametrized generation of blocks.hex) +* We can run this on different kinds of workloads to gauge the efficiency of different parts of the code + +## Result +* Standard hash function patched with risc0/rust_crypto +* Signature verification currently NOT patched (TBD) +* Signature verification takes about 60% of the total cycles +``` +Block stats + ++------------------------------------------+---+ +| Total blocks | 3 | ++------------------------------------------+---+ +| Blocks with transactions | 3 | ++------------------------------------------+---+ +| Number of blobs | 3 | ++------------------------------------------+---+ +| Total number of transactions | 4 | ++------------------------------------------+---+ +| Average number of transactions per block | 1 | ++------------------------------------------+---+ + +Cycle Metrics + ++-------------------------+----------------+-----------+ +| Function | Average Cycles | Num Calls | ++-------------------------+----------------+-----------+ +| Cycles per block | 6935250 | 3 | ++-------------------------+----------------+-----------+ +| apply_slot | 6433166 | 3 | ++-------------------------+----------------+-----------+ +| verify | 3965858 | 4 | ++-------------------------+----------------+-----------+ +| end_slot | 514929 | 3 | ++-------------------------+----------------+-----------+ +| validate_and_commit | 496189 | 3 | ++-------------------------+----------------+-----------+ +| verify_relevant_tx_list | 277438 | 3 | ++-------------------------+----------------+-----------+ +| begin_slot | 4683 | 3 | ++-------------------------+----------------+-----------+ +``` + +## Custom annotations +* We can also get finer grained information by annotating low level functions, but the process for this isn't straightforward. +* For code that we control, it's as simple as adding the `cycle_tracker` annotation to our function and then feature gating it (not feature gating it causes compilation errors) +* For external dependencies, we need to fork and include a path dependency locally after annotating +* We did this for the `jmt` jellyfish merkle tree library to measure cycle gains when we use the risc0 accelerated sha function vs without +* We apply the risc0 patch in the following way in demo-prover/methods/guest/Cargo.toml +```yaml +[patch.crates-io] +sha2 = { git = "https://github.com/risc0/RustCrypto-hashes", tag = "sha2/v0.10.6-risc0" } +``` +* Note that the specific tag needs to be pointed to, since master and other branches don't contain acceleration + +## Accelerated vs Non-accelerated libs +* Accelerated and risc0 optimized crypto libraries give a significant (nearly 10x) cycle gain +* With sha2 acceleration +``` +=====> hash: 1781 +=====> hash: 1781 +=====> hash: 1781 +=====> hash: 1781 +=====> hash: 1781 +``` +* Without sha2 acceleration +``` +=====> hash: 13901 +=====> hash: 13901 +=====> hash: 13901 +=====> hash: 13901 +=====> hash: 13901 +``` +* Overall performance difference when using sha acceleration vs without for the same dataset (3 blocks, 4 transactions) as described above +* With sha acceleration +``` ++-------------------------+----------------+-----------+ +| Function | Average Cycles | Num Calls | ++-------------------------+----------------+-----------+ +| Cycles per block | 6944938 | 3 | ++-------------------------+----------------+-----------+ +| validate_and_commit | 503468 | 3 | ++-------------------------+----------------+-----------+ +| verify_relevant_tx_list | 277092 | 3 | ++-------------------------+----------------+-----------+ +Total cycles consumed for test: 20834815 +``` +* Without sha acceleration +``` ++-------------------------+----------------+-----------+ +| Function | Average Cycles | Num Calls | ++-------------------------+----------------+-----------+ +| Cycles per block | 8717567 | 3 | ++-------------------------+----------------+-----------+ +| validate_and_commit | 1432461 | 3 | ++-------------------------+----------------+-----------+ +| verify_relevant_tx_list | 966893 | 3 | ++-------------------------+----------------+-----------+ +Total cycles consumed for test: 26152702 +``` +* There's an overall efficiency of 6 million cycles in total for 3 blocks. +* Keep in mind that the above table shows average number of cycles per call, so they give an efficiency per call, but the "Total cycles consumed for test" metric at the bottom shows total for 3 blocks \ No newline at end of file diff --git a/examples/demo-prover/host/benches/blocks.hex b/examples/demo-prover/host/benches/blocks.hex new file mode 100644 index 000000000..37f044279 --- /dev/null +++ b/examples/demo-prover/host/benches/blocks.hex @@ -0,0 +1,5 @@ +0200000000000000fffffffffffffffefffffffffffffffea97efd516627c3d613d05c8eb4c32eb63f87355e19dcc50a97c48625c35d99adffffffffffffffffffffffffffffffff31de1a752d3c99fc4323e978f498978a9156b714bc6d9fc29b72a00f393328a40200000000000000fffffffffffffffefffffffffffffffea97efd516627c3d613d05c8eb4c32eb63f87355e19dcc50a97c48625c35d99adffffffffffffffffffffffffffffffff31de1a752d3c99fc4323e978f498978a9156b714bc6d9fc29b72a00f393328a40200000000000000080b06000000000000000a0474657374020000000000000008010b0000000000000008d6ccf6a30610a1b88b550200000000000000120022000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85501257760461993f8f197b421ec7435f3c36c3734923e3da9a42dc73b05f07b3d0822000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20048091bc7ddc283f77bfbf91d73c44da58c3df8a9cbc867405d8b7f3daada22f22000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85522000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85522000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85516000000000000000a1449e43321ff82ae3aea23f2443100e84a93ad7922010000000000000000000000000000000000000000000000000000000000000000000000 +0200000000000000fffffffffffffffefffffffffffffffea97efd516627c3d613d05c8eb4c32eb63f87355e19dcc50a97c48625c35d99adffffffffffffffffffffffffffffffff31de1a752d3c99fc4323e978f498978a9156b714bc6d9fc29b72a00f393328a40200000000000000fffffffffffffffefffffffffffffffea97efd516627c3d613d05c8eb4c32eb63f87355e19dcc50a97c48625c35d99adffffffffffffffffffffffffffffffff31de1a752d3c99fc4323e978f498978a9156b714bc6d9fc29b72a00f393328a40200000000000000080b06000000000000000a0474657374020000000000000008020b0000000000000008f5c4e8a60610c4e5f11448000000000000000a202430df06922a2f18631d4dc84644ea4b24abc6d903825be10ab278fc0cd8bb27122408011220d728ed4a03505212b5b16cfba570a184b5e5f463719f3fe548bd430691d5ad7622000000000000000a2047837b9a731aca002a6e1ed9b74641e98f3f441cc2e1c69d03ff7431ba0e70c901257760461993f8f197b421ec7435f3c36c3734923e3da9a42dc73b05f07b3d0822000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20048091bc7ddc283f77bfbf91d73c44da58c3df8a9cbc867405d8b7f3daada22f22000000000000000a20a577a3590441b53e05dbbbefb913b5edd5124648d341a04b718713329d85b32e22000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85522000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85516000000000000000a1449e43321ff82ae3aea23f2443100e84a93ad7922010000000000000000000000000000000000000000000000000000000000000000000000 +04000000000000000000000000000004736f762d74657374490362a480c46eb54dd64154699cbb6c283fafdae0b9f4b4ad4c4369a4100ba9fffffffffffffffefffffffffffffffe2d8bdc0adb36c2b1355ecf54794ad62ee314a19aa1ceb2c6d041d712a192fe35fffffffffffffffffffffffffffffffff45d40d82fc4d79fafef36758409f80eeda1437590bf48f89679e99930058f0bffffffffffffffffffffffffffffffff2a320b741a128353cbb2def387a080acee5b87ed9918944396683b595fa6c6c904000000000000000000000000000004fffffffffffffffe60a51b35f75755b7a9aff56f2cd7dd7f2cfad901addffc4100b4258a213854fc736f762d74657374fffffffffffffffe3c63f2b45c89baec6a2f14654c5788d90c86bcbad20feb539d16561835129ba7ffffffffffffffffffffffffffffffffe8016ad36f4a7b3bc3b8da4601928f5c8f0976735624ac43938b70abbaabd561ffffffffffffffffffffffffffffffffb3207581c5bcef70e70e29bbb7f8a957bd6b98af7c1d3d7339c4d69360a670c00200000000000000080b06000000000000000a0474657374020000000000000008030b0000000000000008ffc4e8a60610c0a1dd3348000000000000000a204af71f283e5f977b94e5c8af31576ae80742408b232ce731c51022113bc8d998122408011220ce1c73e3825809c57be5ef90bf3ba4ac376467c74a4441d94749e1b0fcf5386022000000000000000a20b342c7d34aae5de87f96dffe7c7bd639827c8ecc353d24c13152222d883c49c9010140036afedcdd88646ba9fee3e5bd28a47a9774bf6aa0701e9186cdd11bd01822000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20048091bc7ddc283f77bfbf91d73c44da58c3df8a9cbc867405d8b7f3daada22f22000000000000000a20879128e81db8a6e35aed8d1501f2d319f03b6060e751c6df2a3f04cfa6b49ff722000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85522000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85516000000000000000a1449e43321ff82ae3aea23f2443100e84a93ad79220100000001000000000000000002000000000000736f762d7465737401000000fc01000000f4000000ee49083c046aa5f4bb54f088f08497a77fb21398ee3185464e33ee99b21a1ff1e5bed3b2f7a14ccba47c8b51661d75f2c287d5fd47f190c6f989859eff68ee0ff8ad2437a279e1c8932c07358c91dc4fe34864a98c6c25f298e2a0199c1509ff8800000000000b000000000000000e000000736f762d746573742d746f6b656ee803000000000000a3201954f70ad62230dc3d840a5bf767702c04869e85ab3eee0b962857ba759802000000fea6ac5b8751120fb62fff67b54d2eac66aef307c7dde1d394dea1e09e43dd44a3201954f70ad62230dc3d840a5bf767702c04869e85ab3eee0b962857ba75980000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000200000000000000026a92c27d3e9ea75532dbb8b5093508eb1c67b4d4fc4208be305bacdb57f0dae2f0000000000000063656c6573746961317737776375706b356773776a323563306b686e6b65793566776d6c6e64783674356161726d6b01000000000000000800000000000000736f762d746573740100000000000000fc0000000100000000000000200000000000000026a92c27d3e9ea75532dbb8b5093508eb1c67b4d4fc4208be305bacdb57f0dae0100000000000000000000000000000000000000010000000000000000000000000000000100000000000000040000000000000000020000000000000000000000000004010000014500000011c3020ab7020a8b010a88010a202f63656c65737469612e626c6f622e76312e4d7367506179466f72426c6f627312640a2f63656c6573746961317737776375706b356773776a323563306b686e6b65793566776d6c6e64783674356161726d6b1208736f762d746573741a02fc01222026a92c27d3e9ea75532dbb8b5093508eb1c67b4d4fc4208be305bacdb57f0dae42010012650a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a21027edd627fe580221f17003db181040d019ed834c911e1daa32c922870f7ebc3b812040a020801180112110a0b0a0475746961120333303010d0e80c1a4058fe4cf99f9b8384cd489608c5f06a9e26a37eff7e46ac57000569151050a4c11ff1c798e8792a657466cb2fba1bc95a5320b7432e189c582bc9e4b7e88e49961201011a04494e445800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000736f762d7465737401000000fc01000000f4000000ee49083c046aa5f4bb54f088f08497a77fb21398ee3185464e33ee99b21a1ff1e5bed3b2f7a14ccba47c8b51661d75f2c287d5fd47f190c6f989859eff68ee0ff8ad2437a279e1c8932c07358c91dc4fe34864a98c6c25f298e2a0199c1509ff8800000000000b000000000000000e000000736f762d746573742d746f6b656ee803000000000000a3201954f70ad62230dc3d840a5bf767702c04869e85ab3eee0b962857ba759802000000fea6ac5b8751120fb62fff67b54d2eac66aef307c7dde1d394dea1e09e43dd44a3201954f70ad62230dc3d840a5bf767702c04869e85ab3eee0b962857ba75980000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000b79abf31bc94b7b2010000038502000039d601047348a35b1a0ec6ee69d795992d902a34264a33350bbee924bf6532571408d4fe9a852ca70b85105a882b8a2f8488d14f938b1e93985c483152a22012399ab2b9c2a01811361897b48d0cc8fad6f50a9e5abb536429f7531f9b098911dec727cb94151cc8f1c4c832019c0315141b431b1881ab1e516f2dc4c265d5e3e7e6265578a1b61456af0d7bb196fb2dc7850d5a0cef50c9dfe98a5f8a48c6ce2c0a1df7e48ecc34f17cdf7a07fdc62f234217cf90084bd4e937f25a81c3ba312e1e1ca9ab23437c64f1719b1ff1a155c362149ab1c2891d287445dd40a43516acb8370bddfce6ea217008f53d870a760cff3b022c2e2e3a83a80b3ee8d79de391626c535fb1ed6405b497f8611b47cc9ecce74ede000dfa373ad24bba3394b56ea8c010f1c8f3be1d7b3dbfd6d01473ea1e3f60d713bfa373a85bee643b03033e0eeee2e6d700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000c4f5c91cc8f1c4c2010000023c03000028e1030ec4a4e0d82700244a974cee0ac00ccbd0f55ce008f13cb7c3d75d3097c0c9a9b10a19d41bcb30dd7921fce4c0dfc1af5f0d2cea320c7a88d6a32ccae2d70eef30be2fdf0734d9de3e2df18f989a4cfd3578347564f84d4b0f08f019dcac493ab104eb31bc94b7bc2303600237343dea393f5242874b4a2d0b2c46329e1d388b4b5b067134dd4c08c17c80a4393d30068c3cba86a451a5dbb7c25b85b9c7fbeb72b912bed2ad444b7cfa9cb61d168ada52e5df6fa4ade4a4b7c57cd61dfca5d0254964bc82a065928b7e6bef891919004af4d4596826d6b1903345ad59d9cc110d53637b400d8f09681fd4c577fd9e29011f1e1e2a53400724a88f63af68fdf7d0db7ca5f20d716792ff3de4b261b2a1e289000893222a82ef7b2c6572f640b93a94bc957532c126768c8334c4a93027fc8f387647c440d5a7f2290202240aa7aca28f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004736f762d74657374490362a480c46eb54dd64154699cbb6c283fafdae0b9f4b4ad4c4369a4100ba90100000000000000040000000000000000020000000000000000000000000004010000014500000011c3020ab7020a8b010a88010a202f63656c65737469612e626c6f622e76312e4d7367506179466f72426c6f627312640a2f63656c6573746961317737776375706b356773776a323563306b686e6b65793566776d6c6e64783674356161726d6b1208736f762d746573741a02fc01222026a92c27d3e9ea75532dbb8b5093508eb1c67b4d4fc4208be305bacdb57f0dae42010012650a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a21027edd627fe580221f17003db181040d019ed834c911e1daa32c922870f7ebc3b812040a020801180112110a0b0a0475746961120333303010d0e80c1a4058fe4cf99f9b8384cd489608c5f06a9e26a37eff7e46ac57000569151050a4c11ff1c798e8792a657466cb2fba1bc95a5320b7432e189c582bc9e4b7e88e49961201011a04494e445800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000736f762d7465737401000000fc01000000f4000000ee49083c046aa5f4bb54f088f08497a77fb21398ee3185464e33ee99b21a1ff1e5bed3b2f7a14ccba47c8b51661d75f2c287d5fd47f190c6f989859eff68ee0ff8ad2437a279e1c8932c07358c91dc4fe34864a98c6c25f298e2a0199c1509ff8800000000000b000000000000000e000000736f762d746573742d746f6b656ee803000000000000a3201954f70ad62230dc3d840a5bf767702c04869e85ab3eee0b962857ba759802000000fea6ac5b8751120fb62fff67b54d2eac66aef307c7dde1d394dea1e09e43dd44a3201954f70ad62230dc3d840a5bf767702c04869e85ab3eee0b962857ba75980000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000b79abf31bc94b7b2010000038502000039d601047348a35b1a0ec6ee69d795992d902a34264a33350bbee924bf6532571408d4fe9a852ca70b85105a882b8a2f8488d14f938b1e93985c483152a22012399ab2b9c2a01811361897b48d0cc8fad6f50a9e5abb536429f7531f9b098911dec727cb94151cc8f1c4c832019c0315141b431b1881ab1e516f2dc4c265d5e3e7e6265578a1b61456af0d7bb196fb2dc7850d5a0cef50c9dfe98a5f8a48c6ce2c0a1df7e48ecc34f17cdf7a07fdc62f234217cf90084bd4e937f25a81c3ba312e1e1ca9ab23437c64f1719b1ff1a155c362149ab1c2891d287445dd40a43516acb8370bddfce6ea217008f53d870a760cff3b022c2e2e3a83a80b3ee8d79de391626c535fb1ed6405b497f8611b47cc9ecce74ede000dfa373ad24bba3394b56ea8c010f1c8f3be1d7b3dbfd6d01473ea1e3f60d713bfa373a85bee643b03033e0eeee2e6d700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000c4f5c91cc8f1c4c2010000023c03000028e1030ec4a4e0d82700244a974cee0ac00ccbd0f55ce008f13cb7c3d75d3097c0c9a9b10a19d41bcb30dd7921fce4c0dfc1af5f0d2cea320c7a88d6a32ccae2d70eef30be2fdf0734d9de3e2df18f989a4cfd3578347564f84d4b0f08f019dcac493ab104eb31bc94b7bc2303600237343dea393f5242874b4a2d0b2c46329e1d388b4b5b067134dd4c08c17c80a4393d30068c3cba86a451a5dbb7c25b85b9c7fbeb72b912bed2ad444b7cfa9cb61d168ada52e5df6fa4ade4a4b7c57cd61dfca5d0254964bc82a065928b7e6bef891919004af4d4596826d6b1903345ad59d9cc110d53637b400d8f09681fd4c577fd9e29011f1e1e2a53400724a88f63af68fdf7d0db7ca5f20d716792ff3de4b261b2a1e289000893222a82ef7b2c6572f640b93a94bc957532c126768c8334c4a93027fc8f387647c440d5a7f2290202240aa7aca28f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004736f762d74657374490362a480c46eb54dd64154699cbb6c283fafdae0b9f4b4ad4c4369a4100ba9 +04000000000000000000000000000004736f762d74657374ac50b9d0223ddbb3be915d3e137e398954d178c230872c2129ac2b47e102b2cdfffffffffffffffefffffffffffffffe2d8bdc0adb36c2b1355ecf54794ad62ee314a19aa1ceb2c6d041d712a192fe35fffffffffffffffffffffffffffffffff71c654cd09a61036e01ed33ae918d18b0e5ba0794fbe47a2c741d20f2063910ffffffffffffffffffffffffffffffff4fa49d42a64106676832e1d9de1a11cdfa534202413d54f651474c42dfc17e6204000000000000000000000000000004fffffffffffffffebfd734204f10a51792e6702d6cf01aa26ff3fd7c9fd509165398ac17d86c4bdf736f762d74657374fffffffffffffffe23a22c3f7104f73c472215a5b3521b19e10794db1eb67eb7ca35b0d8852f0dfeffffffffffffffffffffffffffffffff64c37eed9469411345b6b40ba3364add2d173da14a55704d2cac9a041fcdd7b5ffffffffffffffffffffffffffffffffc57a236559368b2ce48ab92e3a182b7d64e4bfc3e167f7f7662f763c915cbc750200000000000000080b06000000000000000a0474657374020000000000000008040b000000000000000889c5e8a606108889d84948000000000000000a20b42b38b26398e4103db7a728dc19f01ae6ab2b3099fd1060baac22c6d3908c1e122408011220feb6598f634155981d4fad9e1c320eada5617d06952c2c7334f5c4cf7c5e584a22000000000000000a208b25e01aa7e4cdb22dad91f964ecb6f00327ff5b01820a806c3adacb2bc2749701f1556b39c2b6938f77830a6edad386a91ce74cf2cda7bb20fb3d59ec0607c23322000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20048091bc7ddc283f77bfbf91d73c44da58c3df8a9cbc867405d8b7f3daada22f22000000000000000a20cc1af9ec6ee03cdb31089286da64e71428a0af62c0b8e47b4e0c1d972642b25a22000000000000000a20ebb716b3f3d18b55ea0b304786d0ece8b4d3b51539364c7fd80e39cc90bc170322000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85516000000000000000a1449e43321ff82ae3aea23f2443100e84a93ad79220100000001000000000000000002000000000000736f762d7465737401000000be01000000b60000002adbd76606f2bd4125080e6f44df7ba2d728409955c80b8438eb1828ddf23e3c12188eeac7ecf6323be0ed5668e21cc354fca90d8bca513d6c0a240c26afa7007b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a0000000001fea6ac5b8751120fb62fff67b54d2eac66aef307c7dde1d394dea1e09e43dd44c800000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000200000000000000011a9b433a5ad7385e47bb072888d971f3776f1ccfc2bf1904435f66f245127572f0000000000000063656c6573746961317737776375706b356773776a323563306b686e6b65793566776d6c6e64783674356161726d6b01000000000000000800000000000000736f762d746573740100000000000000be0000000100000000000000200000000000000011a9b433a5ad7385e47bb072888d971f3776f1ccfc2bf1904435f66f245127570100000000000000000000000000000000000000010000000000000000000000000000000100000000000000040000000000000000020000000000000000000000000004010000014500000011c3020ab7020a8b010a88010a202f63656c65737469612e626c6f622e76312e4d7367506179466f72426c6f627312640a2f63656c6573746961317737776375706b356773776a323563306b686e6b65793566776d6c6e64783674356161726d6b1208736f762d746573741a02be01222011a9b433a5ad7385e47bb072888d971f3776f1ccfc2bf1904435f66f2451275742010012650a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a21027edd627fe580221f17003db181040d019ed834c911e1daa32c922870f7ebc3b812040a020801180212110a0b0a0475746961120333303010d0e80c1a4016bcae791b5af02b9907a7c315c1d4fa6bf1b2e7e3dc017c1cbe02057e5025d0428ec7b5d3a713dfa95a6d3efd0b4e3ce80d7ed46d732284244c2594f4f9d6e01201011a04494e445800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000736f762d7465737401000000be01000000b60000002adbd76606f2bd4125080e6f44df7ba2d728409955c80b8438eb1828ddf23e3c12188eeac7ecf6323be0ed5668e21cc354fca90d8bca513d6c0a240c26afa7007b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a0000000001fea6ac5b8751120fb62fff67b54d2eac66aef307c7dde1d394dea1e09e43dd44c800000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000b79abf31bc94b7b2010000032e020000397d0104733b5b0e960d30c9ae2810fa6b55a47e2ea2c6b6097d81f1c5da6c39d99294c8e6e9c020849bbd9be0d676ba8f8cb0dc8f79a422ff225a4a3951f0f5c1c6c01ec47046f247b8b9fd1a156b96c2a6beab781ff5ec5953844e76601eec9b2e22c2b16f1cc8f1c4ca5ded978dcf941045b71e29da03f19323ea26325215656d9b65e57b5e6bc2b081ecf51ad118dee9032bbcc4e59271c570c8cdd2935d3943e6a68925dc933f34859df549fd6b2e62312d1ed2cbfdeff1c03b16041701cc8bfdcfa05115333600267c520e0803618620bf39ae87471f6a11c69baabb7a3b0e040105033f013b390407040ecbc8faff3b022c2e2e3a83a80b3ee835744fc03dd697136d0c49bb37ba8d93fb947ea1af8803cd3175010dccd21983e95bb5728149388a43d6f4249f07e225a808cc8df4c4155f1ae3196599918cad3b03033e0eeee2e6d700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000c4f5c91cc8f1c4c201000002d50300002808030ec4138a52f1014a75e52d3797617d1e76f81c8fd8f24426986f94b60fbc3c01a6bb829ec11c1e3d02b09e8433d3cb31a520482bff9bc8a16c334a99a4bd8b5adeb77c594b803cee99ddd46b03bacf38234fdb99b84ea2138f4063e9be0c0b32be385331bc94b7bfb94985d76ae513e3b5027310c53959f43cfa49d845a7bb3e00f72695041129f0d29a3e803f89ab0238f382412ab096b24761ae0f43e55c80f220fb943cd0e1c73a9e59a4bf13bee71c3082be9fa494b929350e3603b2569fb045d1372c21001bcdd30a0502ff5e1476284f5de433f839b66c4178c2290a0e030d02270329280e0c0e0abebc939e29011f1e1e2a53400724a823c8e1b9268c6738f40bee78227b59699065cc464c5402b12dcb0308b2823c53abd572c752ee2b55ea8c991a620cac194005b25999b737db3eaf3cf16d685a4d290202240aa7aca28f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004736f762d74657374ac50b9d0223ddbb3be915d3e137e398954d178c230872c2129ac2b47e102b2cd0100000000000000040000000000000000020000000000000000000000000004010000014500000011c3020ab7020a8b010a88010a202f63656c65737469612e626c6f622e76312e4d7367506179466f72426c6f627312640a2f63656c6573746961317737776375706b356773776a323563306b686e6b65793566776d6c6e64783674356161726d6b1208736f762d746573741a02be01222011a9b433a5ad7385e47bb072888d971f3776f1ccfc2bf1904435f66f2451275742010012650a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a21027edd627fe580221f17003db181040d019ed834c911e1daa32c922870f7ebc3b812040a020801180212110a0b0a0475746961120333303010d0e80c1a4016bcae791b5af02b9907a7c315c1d4fa6bf1b2e7e3dc017c1cbe02057e5025d0428ec7b5d3a713dfa95a6d3efd0b4e3ce80d7ed46d732284244c2594f4f9d6e01201011a04494e445800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000736f762d7465737401000000be01000000b60000002adbd76606f2bd4125080e6f44df7ba2d728409955c80b8438eb1828ddf23e3c12188eeac7ecf6323be0ed5668e21cc354fca90d8bca513d6c0a240c26afa7007b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a0000000001fea6ac5b8751120fb62fff67b54d2eac66aef307c7dde1d394dea1e09e43dd44c800000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000b79abf31bc94b7b2010000032e020000397d0104733b5b0e960d30c9ae2810fa6b55a47e2ea2c6b6097d81f1c5da6c39d99294c8e6e9c020849bbd9be0d676ba8f8cb0dc8f79a422ff225a4a3951f0f5c1c6c01ec47046f247b8b9fd1a156b96c2a6beab781ff5ec5953844e76601eec9b2e22c2b16f1cc8f1c4ca5ded978dcf941045b71e29da03f19323ea26325215656d9b65e57b5e6bc2b081ecf51ad118dee9032bbcc4e59271c570c8cdd2935d3943e6a68925dc933f34859df549fd6b2e62312d1ed2cbfdeff1c03b16041701cc8bfdcfa05115333600267c520e0803618620bf39ae87471f6a11c69baabb7a3b0e040105033f013b390407040ecbc8faff3b022c2e2e3a83a80b3ee835744fc03dd697136d0c49bb37ba8d93fb947ea1af8803cd3175010dccd21983e95bb5728149388a43d6f4249f07e225a808cc8df4c4155f1ae3196599918cad3b03033e0eeee2e6d700000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000c4f5c91cc8f1c4c201000002d50300002808030ec4138a52f1014a75e52d3797617d1e76f81c8fd8f24426986f94b60fbc3c01a6bb829ec11c1e3d02b09e8433d3cb31a520482bff9bc8a16c334a99a4bd8b5adeb77c594b803cee99ddd46b03bacf38234fdb99b84ea2138f4063e9be0c0b32be385331bc94b7bfb94985d76ae513e3b5027310c53959f43cfa49d845a7bb3e00f72695041129f0d29a3e803f89ab0238f382412ab096b24761ae0f43e55c80f220fb943cd0e1c73a9e59a4bf13bee71c3082be9fa494b929350e3603b2569fb045d1372c21001bcdd30a0502ff5e1476284f5de433f839b66c4178c2290a0e030d02270329280e0c0e0abebc939e29011f1e1e2a53400724a823c8e1b9268c6738f40bee78227b59699065cc464c5402b12dcb0308b2823c53abd572c752ee2b55ea8c991a620cac194005b25999b737db3eaf3cf16d685a4d290202240aa7aca28f00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004736f762d74657374ac50b9d0223ddbb3be915d3e137e398954d178c230872c2129ac2b47e102b2cd +04000000000000000000000000000004736f762d74657374f0e19f70a9b8230eb4b6c2763def30872bcc268ecb782a709f63dc3b4064d290fffffffffffffffefffffffffffffffe2d8bdc0adb36c2b1355ecf54794ad62ee314a19aa1ceb2c6d041d712a192fe35ffffffffffffffffffffffffffffffff85332fe79fddb4e46da26cfda92a16a4847904073c8d7b9569939693ce2500c7fffffffffffffffffffffffffffffffffabf7945694d0c30d100b7f2cd9bdffc637de9037c20172723476d7ee5d37fd804000000000000000000000000000004fffffffffffffffece46ab5e2f606bb91bf550801dd58eb7c4e0af35930260438e8283b5baeb2dde736f762d74657374fffffffffffffffe2ec8952e1551fa2146d6a9929ceffb75f2b0beb1ce21b64e172233c058092a41ffffffffffffffffffffffffffffffff5430d61857c9817fe0488a8d45d035ccf2eded4cd99ec7b71c2dc2f1b5af93e6ffffffffffffffffffffffffffffffff3c9c890a387f94b1fcfe6692363fc7e1d39b29b56444adf950aa89d5d143d2180200000000000000080b06000000000000000a0474657374020000000000000008050b000000000000000893c5e8a60610b587895f48000000000000000a2002d54033f21bd236ca93380a387d63196d40b4e5f90da1e83a93ebecfe832ce9122408011220b4c8233a71d14f37d63a512dc7ed646d0b1dac7296869e5edbd50b0b76a8c6a722000000000000000a20677d12b53874733888e0b57b2100907f98c76716cfe54ebac333815963dc8e1201c282ddeb5eb9167f5d46ca71af830dd3d3b4d3cf041a3f68c7208d0813a8ba5022000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20313b46aa6296ad96730c290aa9bf4b7f5366cec34ac6d50386b2466ff9d70d6e22000000000000000a20048091bc7ddc283f77bfbf91d73c44da58c3df8a9cbc867405d8b7f3daada22f22000000000000000a205e5e89f09e0aacf2d4708e8fad2e4befcc3689122ac6ef9c44d6c25f523385e722000000000000000a20ebb716b3f3d18b55ea0b304786d0ece8b4d3b51539364c7fd80e39cc90bc170322000000000000000a20e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85516000000000000000a1449e43321ff82ae3aea23f2443100e84a93ad79220100000001000000000000000002000000000000736f762d74657374010000017802000000b600000009c241181dbfc2cf20083e3651da58c7cf6bf8b70182b70062d4ae713a5161ca0c87fde160667857e83ecfcbfe2b9c87d93f1295527561e466a5ef70c80661007b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a0000000001e39febaf77ef923eb6b4c86d7e0c11a528e303a46b9e8f1c382493e799b4dc27c700000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd0100000000000000b600000017896f9627db1799c59c6006bdfb5297e67bc4ea6e931a9a91946726295bd880e6bce7758326c33c89c4623da515654d8ce52d61a47619bb297ca524c6f74e017b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a00000000012e727adc7e69ba5ff2a216385651b4fff4059a25880214d947abbed193d8b6b8c600000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd0200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000020000000000000003193d8b98c9a83cb2314953cc18d41d044028e23576bfed024252562e6894a362f0000000000000063656c6573746961317737776375706b356773776a323563306b686e6b65793566776d6c6e64783674356161726d6b01000000000000000800000000000000736f762d74657374010000000000000078010000010000000000000020000000000000003193d8b98c9a83cb2314953cc18d41d044028e23576bfed024252562e6894a360100000000000000000000000000000000000000010000000000000000000000000000000100000000000000040000000000000000020000000000000000000000000004010000014500000011c3020ab7020a8b010a88010a202f63656c65737469612e626c6f622e76312e4d7367506179466f72426c6f627312640a2f63656c6573746961317737776375706b356773776a323563306b686e6b65793566776d6c6e64783674356161726d6b1208736f762d746573741a02f80222203193d8b98c9a83cb2314953cc18d41d044028e23576bfed024252562e6894a3642010012650a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a21027edd627fe580221f17003db181040d019ed834c911e1daa32c922870f7ebc3b812040a020801180312110a0b0a0475746961120333303010d0e80c1a40d6f5c12c2d16c327304b590b2fca5afe4aae7c292e396aed2a273205d4ce5fa746c265f9d3b54006198c4979a82a942f9df2b5940ae0aa110df14393089a69ba1201011a04494e445800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000736f762d74657374010000017802000000b600000009c241181dbfc2cf20083e3651da58c7cf6bf8b70182b70062d4ae713a5161ca0c87fde160667857e83ecfcbfe2b9c87d93f1295527561e466a5ef70c80661007b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a0000000001e39febaf77ef923eb6b4c86d7e0c11a528e303a46b9e8f1c382493e799b4dc27c700000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd0100000000000000b600000017896f9627db1799c59c6006bdfb5297e67bc4ea6e931a9a91946726295bd880e6bce7758326c33c89c4623da515654d8ce52d61a47619bb297ca524c6f74e017b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a00000000012e727adc7e69ba5ff2a216385651b4fff4059a25880214d947abbed193d8b6b8c600000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd02000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000b79abf31bc94b7b2010000015e030000397d0104730e7efc242b9d797b2010e4e677ac4bba856f7439f527391e5674ff54d4704c88c73d978875624e7484218d717ec60d2daddce1097e9354655f1380708f2a6ec47046f247b8b9fd1a156b96c2a6beab781ff5ec5953844e76601eec9b2e22c2b16f1cc8f1c4ca72fcd3e2a85fd57340bc2ef6e1b93ed95f69bbb67b7c312c482824bd69464e266bfda257ec21e9032bbcc4e59271c570c8cdd2935d3943e6a68925dc933f34859df549fd6b2e62312f1ed2cbfdeff1c0fd1604172012110ff0ff70e142ca9c2fb6398ff847dbf56327c38a72beeefd2cfd15f46a3ef348bad1387b26dc48a61fea269a661ebfcea5ed930bed013245354e84209800a1c151fd8f280c37977abf53d08dc3251100b28ae97d91b0d495d3ce6ee1fc57918172e80d0cef5694f283a499077051768f2d8458659c1f55d96cda2798eac86ff4b02424a7000000000000002b8735e140752274fbd02c6699cdb622b877e21c55f7c19b5904b73b440d43ca03000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000c4f5c91cc8f1c4c201000001630100002808030ec405b6363d3caababe2037b9b54a1360092365a2ec98caec3042917f689d467d23b2fc051b576859411f0d26b0af8ef4c6119087f54dd742b64ed51a702c1968b77c594b803cee99ddd46b03bacf38234fdb99b84ea2138f4063e9be0c0b32be385331bc94b7bf8b61c04ffd9076de2eb16a011c7e0c68e2b6799f5132fa32fe2fe0310e22b624899f44dea617ab0238f382412ab096b24761ae0f43e55c80f220fb943cd0e1c73a9e59a4bf13bee71f3082be9fa494b959350e3635e5a3fba8c1e75a9841fc14ba43d962a03ee9bd804171328c560822a4b9c729605dabc55016b902569ed5284439fb5ee63382d64ad62266389e081d9233b86cbaf867b5d9cf17e8c1f7a57455ce3837f13a1ee427e7a3e1e0547ee1a6d1a8d8786852c7a80a3b11653124c08ae96820f2dad39c9ab69c68c6e359f4a744cd4077a46321dcd8390000000000000038da164fa8be37bc6b533395f47cc637c2bd4e2d8a6c7bf78d0ac412a205a97701000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004736f762d74657374f0e19f70a9b8230eb4b6c2763def30872bcc268ecb782a709f63dc3b4064d2900100000000000000040000000000000000020000000000000000000000000004010000014500000011c3020ab7020a8b010a88010a202f63656c65737469612e626c6f622e76312e4d7367506179466f72426c6f627312640a2f63656c6573746961317737776375706b356773776a323563306b686e6b65793566776d6c6e64783674356161726d6b1208736f762d746573741a02f80222203193d8b98c9a83cb2314953cc18d41d044028e23576bfed024252562e6894a3642010012650a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a21027edd627fe580221f17003db181040d019ed834c911e1daa32c922870f7ebc3b812040a020801180312110a0b0a0475746961120333303010d0e80c1a40d6f5c12c2d16c327304b590b2fca5afe4aae7c292e396aed2a273205d4ce5fa746c265f9d3b54006198c4979a82a942f9df2b5940ae0aa110df14393089a69ba1201011a04494e445800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000736f762d74657374010000017802000000b600000009c241181dbfc2cf20083e3651da58c7cf6bf8b70182b70062d4ae713a5161ca0c87fde160667857e83ecfcbfe2b9c87d93f1295527561e466a5ef70c80661007b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a0000000001e39febaf77ef923eb6b4c86d7e0c11a528e303a46b9e8f1c382493e799b4dc27c700000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd0100000000000000b600000017896f9627db1799c59c6006bdfb5297e67bc4ea6e931a9a91946726295bd880e6bce7758326c33c89c4623da515654d8ce52d61a47619bb297ca524c6f74e017b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a00000000012e727adc7e69ba5ff2a216385651b4fff4059a25880214d947abbed193d8b6b8c600000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd02000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000b79abf31bc94b7b2010000015e030000397d0104730e7efc242b9d797b2010e4e677ac4bba856f7439f527391e5674ff54d4704c88c73d978875624e7484218d717ec60d2daddce1097e9354655f1380708f2a6ec47046f247b8b9fd1a156b96c2a6beab781ff5ec5953844e76601eec9b2e22c2b16f1cc8f1c4ca72fcd3e2a85fd57340bc2ef6e1b93ed95f69bbb67b7c312c482824bd69464e266bfda257ec21e9032bbcc4e59271c570c8cdd2935d3943e6a68925dc933f34859df549fd6b2e62312f1ed2cbfdeff1c0fd1604172012110ff0ff70e142ca9c2fb6398ff847dbf56327c38a72beeefd2cfd15f46a3ef348bad1387b26dc48a61fea269a661ebfcea5ed930bed013245354e84209800a1c151fd8f280c37977abf53d08dc3251100b28ae97d91b0d495d3ce6ee1fc57918172e80d0cef5694f283a499077051768f2d8458659c1f55d96cda2798eac86ff4b02424a7000000000000002b8735e140752274fbd02c6699cdb622b877e21c55f7c19b5904b73b440d43ca03000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000c4f5c91cc8f1c4c201000001630100002808030ec405b6363d3caababe2037b9b54a1360092365a2ec98caec3042917f689d467d23b2fc051b576859411f0d26b0af8ef4c6119087f54dd742b64ed51a702c1968b77c594b803cee99ddd46b03bacf38234fdb99b84ea2138f4063e9be0c0b32be385331bc94b7bf8b61c04ffd9076de2eb16a011c7e0c68e2b6799f5132fa32fe2fe0310e22b624899f44dea617ab0238f382412ab096b24761ae0f43e55c80f220fb943cd0e1c73a9e59a4bf13bee71f3082be9fa494b959350e3635e5a3fba8c1e75a9841fc14ba43d962a03ee9bd804171328c560822a4b9c729605dabc55016b902569ed5284439fb5ee63382d64ad62266389e081d9233b86cbaf867b5d9cf17e8c1f7a57455ce3837f13a1ee427e7a3e1e0547ee1a6d1a8d8786852c7a80a3b11653124c08ae96820f2dad39c9ab69c68c6e359f4a744cd4077a46321dcd8390000000000000038da164fa8be37bc6b533395f47cc637c2bd4e2d8a6c7bf78d0ac412a205a97701000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004736f762d74657374f0e19f70a9b8230eb4b6c2763def30872bcc268ecb782a709f63dc3b4064d290 \ No newline at end of file diff --git a/examples/demo-prover/host/benches/prover_bench.rs b/examples/demo-prover/host/benches/prover_bench.rs new file mode 100644 index 000000000..fdbdc5db5 --- /dev/null +++ b/examples/demo-prover/host/benches/prover_bench.rs @@ -0,0 +1,263 @@ +use std::collections::HashMap; +use std::env; +use std::fs::{read_to_string, remove_file, File, OpenOptions}; +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::str::FromStr; +use std::sync::{Arc, Mutex}; + +use anyhow::Context; +use celestia::types::{FilteredCelestiaBlock, NamespaceId}; +use celestia::verifier::address::CelestiaAddress; +use celestia::verifier::{CelestiaSpec, RollupParams}; +use celestia::CelestiaService; +use const_rollup_config::{ROLLUP_NAMESPACE_RAW, SEQUENCER_DA_ADDRESS}; +use demo_stf::app::{App, DefaultPrivateKey}; +use demo_stf::genesis_config::create_demo_genesis_config; +use log4rs::config::{Appender, Config, Root}; +use methods::ROLLUP_ELF; +use regex::Regex; +use risc0_adapter::host::Risc0Host; +use sov_modules_api::PrivateKey; +use sov_rollup_interface::services::da::DaService; +use sov_rollup_interface::stf::StateTransitionFunction; +use sov_rollup_interface::zk::ZkvmHost; +use sov_state::storage::Storage; +use sov_stf_runner::{from_toml_path, RollupConfig}; +use tempfile::TempDir; + +#[derive(Debug)] +struct RegexAppender { + regex: Regex, + file: Arc>, +} + +impl RegexAppender { + fn new(pattern: &str, file_path: &str) -> Self { + if Path::new(file_path).exists() { + remove_file(file_path).expect("Failed to remove existing file"); + } + let file = Arc::new(Mutex::new( + OpenOptions::new() + .create(true) + .append(true) + .open(file_path) + .unwrap(), + )); + let regex = Regex::new(pattern).unwrap(); + RegexAppender { regex, file } + } +} + +impl log::Log for RegexAppender { + fn log(&self, record: &log::Record) { + if let Some(captures) = self.regex.captures(record.args().to_string().as_str()) { + let mut file_guard = self.file.lock().unwrap(); + if let Some(matched_pc) = captures.get(1) { + let pc_value_num = u64::from_str_radix(&matched_pc.as_str()[2..], 16).unwrap(); + let pc_value = format!("{}\t", pc_value_num); + file_guard.write_all(pc_value.as_bytes()).unwrap(); + } + if let Some(matched_iname) = captures.get(2) { + let iname = matched_iname.as_str().to_uppercase(); + let iname_value = format!("{}\n", iname); + file_guard.write_all(iname_value.as_bytes()).unwrap(); + } + } + } + + fn enabled(&self, _metadata: &log::Metadata) -> bool { + true + } + + fn flush(&self) {} +} + +fn get_config(rollup_trace: &str) -> Config { + // [942786] pc: 0x0008e564, insn: 0xffc67613 => andi x12, x12, -4 + let regex_pattern = r".*?pc: (0x[0-9a-fA-F]+), insn: .*?=> ([a-z]*?) "; + + let custom_appender = RegexAppender::new(regex_pattern, rollup_trace); + + Config::builder() + .appender(Appender::builder().build("custom_appender", Box::new(custom_appender))) + .build( + Root::builder() + .appender("custom_appender") + .build(log::LevelFilter::Trace), + ) + .unwrap() +} + +#[cfg(feature = "bench")] +use risc0_adapter::metrics::GLOBAL_HASHMAP; + +// The rollup stores its data in the namespace b"sov-test" on Celestia +const ROLLUP_NAMESPACE: NamespaceId = NamespaceId(ROLLUP_NAMESPACE_RAW); + +#[macro_use] +extern crate prettytable; + +use prettytable::Table; + +fn print_cycle_averages(metric_map: HashMap) { + let mut metrics_vec: Vec<(String, (u64, u64))> = metric_map + .iter() + .map(|(k, (sum, count))| { + ( + k.clone(), + ( + ((*sum as f64) / (*count as f64)).round() as u64, + count.clone(), + ), + ) + }) + .collect(); + + metrics_vec.sort_by(|a, b| b.1.cmp(&a.1)); + + let mut table = Table::new(); + table.add_row(row!["Function", "Average Cycles", "Num Calls"]); + for (k, (avg, count)) in metrics_vec { + table.add_row(row![k, format!("{}", avg), format!("{}", count)]); + } + table.printstd(); +} + +fn chain_stats(num_blocks: usize, num_blocks_with_txns: usize, num_txns: usize, num_blobs: usize) { + let mut table = Table::new(); + table.add_row(row!["Total blocks", num_blocks]); + table.add_row(row!["Blocks with transactions", num_blocks_with_txns]); + table.add_row(row!["Number of blobs", num_blobs]); + table.add_row(row!["Total number of transactions", num_txns]); + table.add_row(row![ + "Average number of transactions per block", + ((num_txns as f64) / (num_blocks_with_txns as f64)) as u64 + ]); + table.printstd(); +} + +#[tokio::main] +async fn main() -> Result<(), anyhow::Error> { + if let Some(rollup_trace) = env::var("ROLLUP_TRACE").ok() { + if let Err(e) = log4rs::init_config(get_config(&rollup_trace)) { + eprintln!("Error initializing logger: {:?}", e); + } + } + + let rollup_config_path = "benches/rollup_config.toml".to_string(); + let mut rollup_config: RollupConfig = + from_toml_path(&rollup_config_path) + .context("Failed to read rollup configuration") + .unwrap(); + + let mut num_blocks = 0; + let mut num_blobs = 0; + let mut num_blocks_with_txns = 0; + let mut num_total_transactions = 0; + + let temp_dir = TempDir::new().expect("Unable to create temporary directory"); + rollup_config.storage.path = PathBuf::from(temp_dir.path()); + + let da_service = CelestiaService::new( + rollup_config.da.clone(), + RollupParams { + namespace: ROLLUP_NAMESPACE, + }, + ) + .await; + + let sequencer_private_key = DefaultPrivateKey::generate(); + + let mut app: App = App::new(rollup_config.storage.clone()); + + let sequencer_da_address = CelestiaAddress::from_str(SEQUENCER_DA_ADDRESS).unwrap(); + + let genesis_config = create_demo_genesis_config( + 100000000, + sequencer_private_key.default_address(), + sequencer_da_address.as_ref().to_vec(), + &sequencer_private_key, + ); + println!("Starting from empty storage, initialization chain"); + app.stf.init_chain(genesis_config); + + let mut prev_state_root = app + .get_storage() + .get_state_root(&Default::default()) + .expect("The storage needs to have a state root"); + + let mut demo = app.stf; + + let hex_data = read_to_string("benches/blocks.hex").expect("Failed to read data"); + let bincoded_blocks: Vec = hex_data + .lines() + .map(|line| { + let bytes = hex::decode(line).expect("Failed to decode hex data"); + bincode::deserialize(&bytes).expect("Failed to deserialize data") + }) + .collect(); + + for height in 2..(bincoded_blocks.len() as u64) { + num_blocks += 1; + let mut host = Risc0Host::new(ROLLUP_ELF); + host.write_to_guest(prev_state_root); + println!( + "Requesting data for height {} and prev_state_root 0x{}", + height, + hex::encode(prev_state_root) + ); + let filtered_block = &bincoded_blocks[height as usize]; + let _header_hash = hex::encode(filtered_block.header.header.hash()); + host.write_to_guest(&filtered_block.header); + let (mut blob_txs, inclusion_proof, completeness_proof) = da_service + .extract_relevant_txs_with_proof(&filtered_block) + .await; + + host.write_to_guest(&inclusion_proof); + host.write_to_guest(&completeness_proof); + host.write_to_guest(&blob_txs); + + if !blob_txs.is_empty() { + num_blobs += blob_txs.len(); + } + + let result = demo.apply_slot(Default::default(), filtered_block, &mut blob_txs); + for r in result.batch_receipts { + let num_tx = r.tx_receipts.len(); + num_total_transactions += num_tx; + if num_tx > 0 { + num_blocks_with_txns += 1; + } + } + // println!("{:?}",result.batch_receipts); + + host.write_to_guest(&result.witness); + + println!("Skipping prover at block {height} to capture cycle counts\n"); + let _receipt = host + .run_without_proving() + .expect("Prover should run successfully"); + println!("==================================================\n"); + prev_state_root = result.state_root.0; + } + + #[cfg(feature = "bench")] + { + let hashmap_guard = GLOBAL_HASHMAP.lock(); + let metric_map = hashmap_guard.clone(); + let total_cycles = metric_map.get("Cycles per block").unwrap().0; + println!("\nBlock stats\n"); + chain_stats( + num_blocks, + num_blocks_with_txns, + num_total_transactions, + num_blobs, + ); + println!("\nCycle Metrics\n"); + print_cycle_averages(metric_map); + println!("\nTotal cycles consumed for test: {}\n", total_cycles); + } + + Ok(()) +} diff --git a/examples/demo-prover/host/benches/rollup_config.toml b/examples/demo-prover/host/benches/rollup_config.toml new file mode 100644 index 000000000..22c57fc90 --- /dev/null +++ b/examples/demo-prover/host/benches/rollup_config.toml @@ -0,0 +1,21 @@ +[da] +# The JWT used to authenticate with the celestia light client. Instructions for generating this token can be found in the README +celestia_rpc_auth_token = "MY.SECRET.TOKEN" +# The address of the *trusted* Celestia light client to interact with +celestia_rpc_address = "http://localhost:11111/" +# The largest response the rollup will accept from the Celestia node. Defaults to 100 MB +max_celestia_response_body_size = 104_857_600 + +[storage] +# The path to the rollup's data directory. Paths that do not begin with `/` are interpreted as relative paths. +path = "benches/demo_data" + +[runner] +# We define the rollup's genesis to occur at block number `start_height`. The rollup will ignore +# any blocks before this height +start_height = 1 + +[runner.rpc_config] +# the host and port to bind the rpc server for +bind_host = "127.0.0.1" +bind_port = 12345 diff --git a/examples/demo-prover/host/rollup_config.toml b/examples/demo-prover/host/rollup_config.toml index e1e3aa3f7..92fc60ee6 100644 --- a/examples/demo-prover/host/rollup_config.toml +++ b/examples/demo-prover/host/rollup_config.toml @@ -1,9 +1,24 @@ -[rollup_config] -start_height = 671431 [da] -celestia_rpc_auth_token = "SUPER_SECRET_TOKEN" -celestia_rpc_address = "http://localhost:11111/" -# 100 MB +# The JWT used to authenticate with the celestia light client. Instructions for generating this token can be found in the README +celestia_rpc_auth_token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJBbGxvdyI6WyJwdWJsaWMiLCJyZWFkIiwid3JpdGUiLCJhZG1pbiJdfQ.Y4MAobLjkH5Rp9EdcFWmL4V8TVUabsrAocR0Xf5tjEo" +# The address of the *trusted* Celestia light client to interact with +celestia_rpc_address = "http://127.0.0.1:26658" +# The largest response the rollup will accept from the Celestia node. Defaults to 100 MB max_celestia_response_body_size = 104_857_600 -[rollup_config.runner.storage] +# The maximum time to wait for a response to an RPC query against Celestia node. Defaults to 60 seconds. +celestia_rpc_timeout_seconds = 60 + +[storage] +# The path to the rollup's data directory. Paths that do not begin with `/` are interpreted as relative paths. path = "demo_data" + +[runner.storage] +path = "demo_data" + +[runner] +start_height = 1 + +[runner.rpc_config] +# the host and port to bind the rpc server for +bind_host = "127.0.0.1" +bind_port = 12345 diff --git a/examples/demo-prover/host/src/main.rs b/examples/demo-prover/host/src/main.rs index 1adff2942..3ccbfb775 100644 --- a/examples/demo-prover/host/src/main.rs +++ b/examples/demo-prover/host/src/main.rs @@ -1,35 +1,35 @@ use std::env; +use std::str::FromStr; use anyhow::Context; +use celestia::types::NamespaceId; +use celestia::verifier::address::CelestiaAddress; +use celestia::verifier::{CelestiaSpec, RollupParams}; +use celestia::{CelestiaService, DaServiceConfig}; use const_rollup_config::{ROLLUP_NAMESPACE_RAW, SEQUENCER_DA_ADDRESS}; use demo_stf::app::{App, DefaultPrivateKey}; use demo_stf::genesis_config::create_demo_genesis_config; -use jupiter::da_service::{CelestiaService, DaServiceConfig}; -use jupiter::types::NamespaceId; -use jupiter::verifier::RollupParams; use methods::{ROLLUP_ELF, ROLLUP_ID}; use risc0_adapter::host::{Risc0Host, Risc0Verifier}; -use serde::Deserialize; use sov_modules_api::PrivateKey; use sov_rollup_interface::services::da::DaService; use sov_rollup_interface::stf::StateTransitionFunction; use sov_rollup_interface::zk::ZkvmHost; use sov_state::Storage; -use sov_stf_runner::{from_toml_path, Config as RunnerConfig}; +use sov_stf_runner::{from_toml_path, RollupConfig}; use tracing::{info, Level}; -use sov_stf_runner::RollupConfig; - -#[derive(Debug, Clone, PartialEq, Deserialize)] -pub struct Config { - pub rollup_config: RollupConfig, - pub da: DaServiceConfig, -} // The rollup stores its data in the namespace b"sov-test" on Celestia const ROLLUP_NAMESPACE: NamespaceId = NamespaceId(ROLLUP_NAMESPACE_RAW); #[tokio::main] async fn main() -> Result<(), anyhow::Error> { + // If SKIP_PROVER is set, this means that we still compile and generate the riscV ELF + // We execute the code inside the riscV but we don't prove it. This saves a significant amount of time + // The primary benefit of doing this is to make sure we produce valid code that can run inside the + // riscV virtual machine. Since proving is something we offload entirely to risc0, ensuring that + // we produce valid riscV code and that it can execute is very useful. + let skip_prover = env::var("SKIP_PROVER").is_ok(); // Initializing logging let subscriber = tracing_subscriber::fmt() .with_max_level(Level::INFO) @@ -38,47 +38,60 @@ async fn main() -> Result<(), anyhow::Error> { .map_err(|_err| eprintln!("Unable to set global default subscriber")) .expect("Cannot fail to set subscriber"); + // Same rollup_config.toml as used for the demo_rollup + // When running from the demo-prover folder, the first argument can be pointed to ../demo-rollup/rollup_config.toml let rollup_config_path = env::args() .nth(1) .unwrap_or_else(|| "rollup_config.toml".to_string()); - let config: Config = + let rollup_config: RollupConfig = from_toml_path(&rollup_config_path).context("Failed to read rollup configuration")?; + // New Celestia DA service to fetch blocks from the DA node (light client / docker / mock DA) let da_service = CelestiaService::new( - config.da.clone(), + rollup_config.da.clone(), RollupParams { namespace: ROLLUP_NAMESPACE, }, ) .await; + // This is the private key of the sequencer on the rollup itself (NOT the DA layer. The DA layer address would be SEQUENCER_DA_ADDRESS) let sequencer_private_key = DefaultPrivateKey::generate(); - let app: App = - App::new(config.rollup_config.runner.storage.clone()); + let mut app: App = App::new(rollup_config.storage.clone()); let is_storage_empty = app.get_storage().is_empty(); - let mut demo = app.stf; + // If storage is empty, we're starting from scratch, so we need to initialize if is_storage_empty { + let sequencer_da_address = CelestiaAddress::from_str(SEQUENCER_DA_ADDRESS).unwrap(); let genesis_config = create_demo_genesis_config( 100000000, sequencer_private_key.default_address(), - SEQUENCER_DA_ADDRESS.to_vec(), - &sequencer_private_key, + sequencer_da_address.as_ref().to_vec(), &sequencer_private_key, ); info!("Starting from empty storage, initialization chain"); - demo.init_chain(genesis_config); + app.stf.init_chain(genesis_config); } - let mut prev_state_root = { - let res = demo.apply_slot(Default::default(), []); - res.state_root.0 - }; - - for height in config.rollup_config.start_height.. { + let mut prev_state_root = app + .get_storage() + .get_state_root(&Default::default()) + .expect("The storage needs to have a state root"); + + // We start from the height in rollup_config. When running with docker, this is usually height 1 + for height in rollup_config.runner.start_height.. { + // We initialize a new VM with the rollup ELF. + // ROLLUP_ELF points to the riscV ELF code generated by the risc0 infrastructure + // Risc0Host::new carries out the process of compiling the code in methods/guest/src/bin/rollup.rs + // and generating the ELF file. (The risc0 code builds a new toolchain to enable compiling to a riscV llvm backend) let mut host = Risc0Host::new(ROLLUP_ELF); + // This function is used to communicate to the rollup.rs code running inside the VM + // The reads need to be in order of the writes + // prev_state_root is the root after applying the block at height-1 + // This is necessary since we're proving that the current state root for the current height is + // result of applying the block against state with root prev_state_root host.write_to_guest(prev_state_root); info!( "Requesting data for height {} and prev_state_root 0x{}", @@ -88,6 +101,8 @@ async fn main() -> Result<(), anyhow::Error> { let filtered_block = da_service.get_finalized_at(height).await?; let header_hash = hex::encode(filtered_block.header.header.hash()); host.write_to_guest(&filtered_block.header); + // When we get a block from DA, we also need to provide proofs of completeness and correctness + // https://github.com/Sovereign-Labs/sovereign-sdk/blob/nightly/rollup-interface/specs/interfaces/da.md#type-inclusionmultiproof let (mut blobs, inclusion_proof, completeness_proof) = da_service .extract_relevant_txs_with_proof(&filtered_block) .await; @@ -99,19 +114,35 @@ async fn main() -> Result<(), anyhow::Error> { header_hash, ); + // The above proofs of correctness and completeness need to passed to the prover host.write_to_guest(&inclusion_proof); host.write_to_guest(&completeness_proof); + // The extracted blobs need to be passed to the prover host.write_to_guest(&blobs); - let result = demo.apply_slot(Default::default(), &mut blobs); + let result = app + .stf + .apply_slot(Default::default(), &filtered_block, &mut blobs); + // Witness contains the merkle paths to the state root so that the code inside the VM + // can access state values (Witness can also contain other hints and proofs) host.write_to_guest(&result.witness); - info!("Starting proving..."); - let receipt = host.run().expect("Prover should run successfully"); - info!("Start verifying.."); - receipt.verify(ROLLUP_ID).expect("Receipt should be valid"); - + // Run the actual prover to generate a receipt that can then be verified + if !skip_prover { + info!("Starting proving..."); + let receipt = host.run().expect("Prover should run successfully"); + info!("Start verifying.."); + receipt.verify(ROLLUP_ID).expect("Receipt should be valid"); + } else { + // This runs the riscV code inside the VM without actually generating the proofs + // This is useful for testing if rollup code actually executes properly + let _receipt = host + .run_without_proving() + .expect("Prover should run successfully"); + } + + // Set the value of prev_state_root to the current one in preparation for the next block prev_state_root = result.state_root.0; info!("Completed proving and verifying block {height}"); } diff --git a/examples/demo-prover/methods/Cargo.toml b/examples/demo-prover/methods/Cargo.toml index 348782fbb..8147581d2 100644 --- a/examples/demo-prover/methods/Cargo.toml +++ b/examples/demo-prover/methods/Cargo.toml @@ -9,3 +9,6 @@ risc0-build = { workspace = true } [package.metadata.risc0] methods = ["guest"] + +[features] +bench = [] diff --git a/examples/demo-prover/methods/build.rs b/examples/demo-prover/methods/build.rs index 08a8a4eb7..b4b99cdf8 100644 --- a/examples/demo-prover/methods/build.rs +++ b/examples/demo-prover/methods/build.rs @@ -1,3 +1,24 @@ +use std::collections::HashMap; + fn main() { - risc0_build::embed_methods(); + let guest_pkg_to_options = get_guest_options(); + risc0_build::embed_methods_with_options(guest_pkg_to_options); +} + +#[cfg(not(feature = "bench"))] +fn get_guest_options() -> HashMap<&'static str, risc0_build::GuestOptions> { + HashMap::new() +} + +#[cfg(feature = "bench")] +fn get_guest_options() -> HashMap<&'static str, risc0_build::GuestOptions> { + let mut guest_pkg_to_options = HashMap::new(); + guest_pkg_to_options.insert( + "sov-demo-prover-guest", + risc0_build::GuestOptions { + features: vec!["bench".to_string()], + std: true, + }, + ); + guest_pkg_to_options } diff --git a/examples/demo-prover/methods/guest/Cargo.lock b/examples/demo-prover/methods/guest/Cargo.lock index 154f49725..8430f2b93 100644 --- a/examples/demo-prover/methods/guest/Cargo.lock +++ b/examples/demo-prover/methods/guest/Cargo.lock @@ -41,9 +41,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +checksum = "86b8f9420f797f2d9e935edf629310eb938a0d839f984e25327f3c7eed22300c" dependencies = [ "memchr", ] @@ -56,13 +56,13 @@ checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" [[package]] name = "async-trait" -version = "0.1.71" +version = "0.1.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" +checksum = "cc6dde6e4ed435a4c1ee4e73592f5ba9da2151af10076cc04858746af9352d09" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] @@ -104,6 +104,16 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bcs" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd3ffe8b19a604421a5d461d4a70346223e535903fbc3067138bddbebddcf77" +dependencies = [ + "serde", + "thiserror", +] + [[package]] name = "bech32" version = "0.9.1" @@ -125,6 +135,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" + [[package]] name = "blake2" version = "0.10.6" @@ -221,7 +237,7 @@ checksum = "fdde5c9cd29ebd706ce1b35600920a33550e402fc998a2e53ad3b42c3c47a192" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] @@ -268,6 +284,33 @@ dependencies = [ "jobserver", ] +[[package]] +name = "celestia" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "base64 0.21.2", + "bech32", + "borsh", + "hex", + "hex-literal", + "nmt-rs", + "prost", + "prost-build", + "prost-types", + "risc0-zkvm", + "risc0-zkvm-platform", + "serde", + "sha2 0.10.6", + "sov-rollup-interface", + "tendermint", + "tendermint-proto", + "thiserror", + "tracing", + "zk-cycle-macros", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -286,9 +329,9 @@ dependencies = [ [[package]] name = "const-oid" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "795bc6e66a8e340f075fcf6227e417a2dc976b92b91f3cdc778bb858778b6747" +checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" [[package]] name = "const-rollup-config" @@ -361,15 +404,29 @@ dependencies = [ [[package]] name = "curve25519-dalek" -version = "3.2.0" +version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9fdf9972b2bd6af2d913799d9ebc165ea4d2e65878e329d9c6b372c4491b61" +checksum = "f711ade317dd348950a9910f81c5947e3d8907ebd2b83f76203ff1807e6a2bc2" dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "platforms", + "rustc_version", "subtle", - "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", ] [[package]] @@ -380,7 +437,7 @@ checksum = "1c359b7249347e46fb28804470d071c921156ad62b3eef5d34e2ba867533dec8" dependencies = [ "byteorder", "digest 0.9.0", - "rand_core 0.6.4", + "rand_core", "subtle-ng", "zeroize", ] @@ -396,7 +453,8 @@ dependencies = [ "serde", "sov-accounts", "sov-bank", - "sov-election", + "sov-blob-storage", + "sov-chain-state", "sov-modules-api", "sov-modules-stf-template", "sov-rollup-interface", @@ -408,14 +466,20 @@ dependencies = [ [[package]] name = "der" -version = "0.7.7" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c7ed52955ce76b1554f509074bb357d3fb8ac9b51288a65a3fd480d1dfba946" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" dependencies = [ "const-oid", "zeroize", ] +[[package]] +name = "deranged" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7684a49fb1af197853ef7b2ee694bc1f5b4179556f1e5710e1760c5db6f5e929" + [[package]] name = "derive_more" version = "0.99.17" @@ -477,7 +541,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05213e96f184578b5f70105d4d0a644a168e99e12d7bea0b200c15d67b5c182" dependencies = [ "futures", - "rand 0.8.5", + "rand", "reqwest", "thiserror", "tokio", @@ -508,15 +572,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "ed25519" -version = "1.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" -dependencies = [ - "signature 1.6.4", -] - [[package]] name = "ed25519" version = "2.2.1" @@ -524,7 +579,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fb04eee5d9d907f29e80ee6b0e78f7e2c82342c63e3580d8c4f69d9d5aad963" dependencies = [ "pkcs8", - "signature 2.1.0", + "signature", ] [[package]] @@ -535,29 +590,27 @@ checksum = "3c8465edc8ee7436ffea81d21a019b16676ee3db267aa8d5a8d729581ecf998b" dependencies = [ "curve25519-dalek-ng", "hex", - "rand_core 0.6.4", + "rand_core", "sha2 0.9.9", "zeroize", ] [[package]] name = "ed25519-dalek" -version = "1.0.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" dependencies = [ "curve25519-dalek", - "ed25519 1.5.3", - "rand 0.7.3", - "sha2 0.9.9", - "zeroize", + "ed25519", + "sha2 0.10.6", ] [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "encoding_rs" @@ -579,9 +632,9 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" dependencies = [ "errno-dragonfly", "libc", @@ -610,12 +663,15 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.9.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" -dependencies = [ - "instant", -] +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" + +[[package]] +name = "fiat-crypto" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e825f6987101665dea6ec934c09ec6d721de7bc1bf92248e1d5810c8cd636b77" [[package]] name = "fixedbitset" @@ -729,7 +785,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] @@ -910,7 +966,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.4.9", "tokio", "tower-service", "tracing", @@ -943,7 +999,7 @@ dependencies = [ "prost", "ripemd", "serde", - "sha2 0.10.7", + "sha2 0.10.6", "sha3", ] @@ -982,31 +1038,11 @@ dependencies = [ "generic-array", ] -[[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - [[package]] name = "inventory" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25b1d6b4b9fb75fc419bdef998b689df5080a32931cb3395b86202046b56a9ea" - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys", -] +checksum = "a53088c87cf71c9d4f3372a2cb9eea1e7b8a0b1bf8b7f7d23fe5b76dbb07e63b" [[package]] name = "ipnet" @@ -1031,12 +1067,13 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jmt" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1a302f0defd323b833c9848c20ab40c3156128f50d7bf8eebeed2ef58167258" +checksum = "9e49c5d2c13e15f77f22cee3df3dc822b46051b217112035d72687cb57a9cbde" dependencies = [ "anyhow", "borsh", + "digest 0.10.7", "hashbrown 0.13.2", "hex", "ics23", @@ -1045,7 +1082,7 @@ dependencies = [ "num-derive 0.3.3", "num-traits", "serde", - "sha2 0.10.7", + "sha2 0.10.6", "thiserror", "tracing", ] @@ -1068,31 +1105,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "jupiter" -version = "0.1.0" -dependencies = [ - "anyhow", - "async-trait", - "base64 0.21.2", - "bech32", - "borsh", - "hex", - "hex-literal", - "nmt-rs", - "prost", - "prost-build", - "prost-types", - "serde", - "serde_json", - "sha2 0.10.7", - "sov-rollup-interface", - "tendermint", - "tendermint-proto", - "thiserror", - "tracing", -] - [[package]] name = "keccak" version = "0.1.4" @@ -1122,9 +1134,9 @@ checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" [[package]] name = "linux-raw-sys" -version = "0.3.8" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" [[package]] name = "log" @@ -1202,7 +1214,7 @@ dependencies = [ "borsh", "bytes", "serde", - "sha2 0.10.7", + "sha2 0.10.6", ] [[package]] @@ -1224,14 +1236,14 @@ checksum = "9e6a0fd4f737c707bd9086cc16c925f294943eb62eb71499e9fd4cf71f8b9f4e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", ] @@ -1269,11 +1281,11 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.55" +version = "0.10.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" +checksum = "729b745ad4a5575dd06a3e1af1414bd330ee561c01b3899eb584baeaa8def17e" dependencies = [ - "bitflags", + "bitflags 1.3.2", "cfg-if", "foreign-types", "libc", @@ -1290,7 +1302,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] @@ -1301,9 +1313,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.90" +version = "0.9.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" +checksum = "866b5f16f90776b9bb8dc1e1802ac6f0513de3a7a7465867bfbc563dc737faac" dependencies = [ "cc", "libc", @@ -1324,7 +1336,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" dependencies = [ "base64ct", - "rand_core 0.6.4", + "rand_core", "subtle", ] @@ -1353,7 +1365,7 @@ dependencies = [ "digest 0.10.7", "hmac", "password-hash", - "sha2 0.10.7", + "sha2 0.10.6", ] [[package]] @@ -1374,9 +1386,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.10" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" +checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" [[package]] name = "pin-utils" @@ -1400,6 +1412,12 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +[[package]] +name = "platforms" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d7ddaed09e0eb771a79ab0fd64609ba0afb0a8366421957936ad14cbd13630" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -1490,24 +1508,13 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.31" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" +checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" dependencies = [ "proc-macro2", ] -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - [[package]] name = "rand" version = "0.8.5" @@ -1515,18 +1522,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", + "rand_chacha", + "rand_core", ] [[package]] @@ -1536,15 +1533,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.4", + "rand_core", ] -[[package]] -name = "rand_core" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" - [[package]] name = "rand_core" version = "0.6.4" @@ -1554,22 +1545,13 @@ dependencies = [ "getrandom", ] -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", -] - [[package]] name = "redox_syscall" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -1578,7 +1560,7 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -1594,9 +1576,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.9.1" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" dependencies = [ "aho-corasick", "memchr", @@ -1606,9 +1588,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.3" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" dependencies = [ "aho-corasick", "memchr", @@ -1673,9 +1655,12 @@ version = "0.1.0" dependencies = [ "anyhow", "bincode", + "bytemuck", "risc0-zkvm", + "risc0-zkvm-platform", "serde", "sov-rollup-interface", + "zk-cycle-utils", ] [[package]] @@ -1699,7 +1684,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2ea0e9d6d5845f11157728c494541c42559357fee35afce767b3d3610ef7494b" dependencies = [ "bytemuck", - "rand_core 0.6.4", + "rand_core", ] [[package]] @@ -1715,11 +1700,11 @@ dependencies = [ "hex", "log", "paste", - "rand_core 0.6.4", + "rand_core", "risc0-core", "risc0-zkvm-platform", "serde", - "sha2 0.10.7", + "sha2 0.10.6", "thiserror", "tracing", ] @@ -1773,13 +1758,12 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.23" +version = "0.38.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" +checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f" dependencies = [ - "bitflags", + "bitflags 2.3.3", "errno", - "io-lifetimes", "libc", "linux-raw-sys", "windows-sys", @@ -1826,11 +1810,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.9.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "core-foundation-sys", "libc", @@ -1839,9 +1823,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51d0c0d83bec45f16480d0ce0058397a69e48fcdc52d1dc8855fb68acbd31a7" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" dependencies = [ "core-foundation-sys", "libc", @@ -1855,9 +1839,9 @@ checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" [[package]] name = "serde" -version = "1.0.173" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91f70896d6720bc714a4a57d22fc91f1db634680e65c8efe13323f1fa38d53f" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] @@ -1873,13 +1857,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.173" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6250dde8342e0232232be9ca3db7aa40aceb5a3e5dd9bddbc00d99a007cde49" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] @@ -1895,9 +1879,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.103" +version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" +checksum = "076066c5f1078eac5b722a31827a8832fe108bed65dfa75e233c89f8206e976c" dependencies = [ "itoa", "ryu", @@ -1906,13 +1890,13 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.14" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d89a8107374290037607734c0b73a85db7ed80cae314b3c5791f192a496e731" +checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] @@ -1953,9 +1937,8 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +version = "0.10.6" +source = "git+https://github.com/risc0/RustCrypto-hashes?tag=sha2/v0.10.6-risc0#e75cafd9f55da196061f6fadf8bc8a86778192b7" dependencies = [ "cfg-if", "cpufeatures", @@ -1972,12 +1955,6 @@ dependencies = [ "keccak", ] -[[package]] -name = "signature" -version = "1.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" - [[package]] name = "signature" version = "2.1.0" @@ -2003,6 +1980,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" +dependencies = [ + "libc", + "windows-sys", +] + [[package]] name = "sov-accounts" version = "0.1.0" @@ -2028,40 +2015,60 @@ dependencies = [ ] [[package]] -name = "sov-demo-prover-guest" +name = "sov-blob-storage" version = "0.1.0" dependencies = [ "anyhow", + "bincode", "borsh", - "const-rollup-config", - "demo-stf", - "directories", - "downloader", - "jupiter", - "log", - "risc0-adapter", - "risc0-zkvm", - "serde", - "serde_json", - "sha2 0.10.7", + "hex", + "sov-modules-api", + "sov-modules-macros", "sov-rollup-interface", - "tempfile", + "sov-sequencer-registry", + "sov-state", "tracing", - "zip", ] [[package]] -name = "sov-election" +name = "sov-chain-state" version = "0.1.0" dependencies = [ "anyhow", "borsh", - "hex", "sov-modules-api", + "sov-modules-macros", "sov-rollup-interface", "sov-state", ] +[[package]] +name = "sov-demo-prover-guest" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "celestia", + "const-rollup-config", + "demo-stf", + "directories", + "downloader", + "risc0-adapter", + "risc0-zkvm", + "risc0-zkvm-platform", + "serde", + "serde_json", + "sha2 0.10.6", + "sha3", + "sov-modules-api", + "sov-modules-stf-template", + "sov-rollup-interface", + "sov-sequencer-registry", + "sov-state", + "tempfile", + "zip", +] + [[package]] name = "sov-first-read-last-write-cache" version = "0.1.0" @@ -2078,13 +2085,15 @@ dependencies = [ "borsh", "derive_more", "ed25519-dalek", + "risc0-zkvm", + "risc0-zkvm-platform", "serde", - "serde_json", - "sha2 0.10.7", + "sha2 0.10.6", "sov-modules-macros", "sov-rollup-interface", "sov-state", "thiserror", + "zk-cycle-macros", ] [[package]] @@ -2107,11 +2116,16 @@ dependencies = [ "borsh", "hex", "jmt", + "risc0-zkvm", + "risc0-zkvm-platform", "serde", "sov-modules-api", "sov-rollup-interface", "sov-state", + "thiserror", "tracing", + "zk-cycle-macros", + "zk-cycle-utils", ] [[package]] @@ -2120,6 +2134,7 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "bincode", "borsh", "bytes", "digest 0.10.7", @@ -2133,10 +2148,14 @@ version = "0.1.0" dependencies = [ "anyhow", "borsh", + "risc0-zkvm", + "risc0-zkvm-platform", "sov-bank", "sov-modules-api", "sov-rollup-interface", "sov-state", + "zk-cycle-macros", + "zk-cycle-utils", ] [[package]] @@ -2144,14 +2163,18 @@ name = "sov-state" version = "0.1.0" dependencies = [ "anyhow", + "bcs", "borsh", "hex", "jmt", + "risc0-zkvm", + "risc0-zkvm-platform", "serde", - "sha2 0.10.7", + "sha2 0.10.6", "sov-first-read-last-write-cache", "sov-rollup-interface", "thiserror", + "zk-cycle-macros", ] [[package]] @@ -2210,9 +2233,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.26" +version = "2.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567" dependencies = [ "proc-macro2", "quote", @@ -2221,11 +2244,10 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.6.0" +version = "3.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" +checksum = "dc02fddf48964c42031a0b3fe0428320ecf3a73c401040fc0096f97794310651" dependencies = [ - "autocfg", "cfg-if", "fastrand", "redox_syscall 0.3.5", @@ -2241,7 +2263,7 @@ checksum = "3f0a7d05cf78524782337f8edd55cbc578d159a16ad4affe2135c92f7dbac7f0" dependencies = [ "bytes", "digest 0.10.7", - "ed25519 2.2.1", + "ed25519", "ed25519-consensus", "flex-error", "futures", @@ -2253,8 +2275,8 @@ dependencies = [ "serde_bytes", "serde_json", "serde_repr", - "sha2 0.10.7", - "signature 2.1.0", + "sha2 0.10.6", + "signature", "subtle", "subtle-encoding", "tendermint-proto", @@ -2282,30 +2304,31 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.43" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" +checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.43" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" +checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] name = "time" -version = "0.3.23" +version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" +checksum = "b0fdd63d58b18d663fbdf70e049f00a22c8e42be082203be7f26589213cd75ea" dependencies = [ + "deranged", "serde", "time-core", "time-macros", @@ -2319,9 +2342,9 @@ checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" [[package]] name = "time-macros" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" +checksum = "eb71511c991639bb078fd5bf97757e03914361c48100d52878b8e52b46fb92cd" dependencies = [ "time-core", ] @@ -2343,18 +2366,17 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.29.1" +version = "1.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +checksum = "2d3ce25f50619af8b0aec2eb23deebe84249e19e2ddd393a6e16e3300a6dadfd" dependencies = [ - "autocfg", "backtrace", "bytes", "libc", "mio", "num_cpus", "pin-project-lite", - "socket2", + "socket2 0.5.3", "windows-sys", ] @@ -2417,7 +2439,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] @@ -2443,9 +2465,9 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" [[package]] name = "typetag" -version = "0.2.10" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a66aafcfb982bf1f9a28755ac6bcbdcd4631ff516cb038fa61299201ebb4364" +checksum = "aec6850cc671cd0cfb3ab285465e48a3b927d9de155051c35797446b32f9169f" dependencies = [ "erased-serde", "inventory", @@ -2456,13 +2478,13 @@ dependencies = [ [[package]] name = "typetag-impl" -version = "0.2.10" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d836cd032f71d90cbaa3c1f85ce84266af23659766d8c0b1c4c6524a0fb4c36f" +checksum = "30c49a6815b4f8379c36f06618bc1b80ca77aaf8a3fd4d8549dca6fdb016000f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] @@ -2545,7 +2567,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", "wasm-bindgen-shared", ] @@ -2579,7 +2601,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -2725,7 +2747,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.28", ] [[package]] @@ -2748,6 +2770,25 @@ dependencies = [ "zstd", ] +[[package]] +name = "zk-cycle-macros" +version = "0.1.0" +dependencies = [ + "anyhow", + "borsh", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "zk-cycle-utils" +version = "0.1.0" +dependencies = [ + "risc0-zkvm", + "risc0-zkvm-platform", +] + [[package]] name = "zstd" version = "0.11.2+zstd.1.5.2" diff --git a/examples/demo-prover/methods/guest/Cargo.toml b/examples/demo-prover/methods/guest/Cargo.toml index c67d73d1e..6ef430cb1 100644 --- a/examples/demo-prover/methods/guest/Cargo.toml +++ b/examples/demo-prover/methods/guest/Cargo.toml @@ -9,16 +9,17 @@ resolver = "2" [dependencies] anyhow = "1.0.68" risc0-zkvm = { version = "0.16", default-features = false, features = ["std"] } +risc0-zkvm-platform = "0.16" borsh = { version = "0.10.3", features = ["bytes"] } -jupiter = { path = "../../../../adapters/celestia", default-features = false } -demo-stf = { path = "../../../demo-stf", default-features = false } -sov-rollup-interface = { path = "../../../../rollup-interface" } -risc0-adapter = { path = "../../../../adapters/risc0", default-features = false } +celestia = { path = "../../../../adapters/celestia" } +demo-stf = { path = "../../../demo-stf" } +sov-rollup-interface = { path = "../../../../rollup-interface", default-features = false} +risc0-adapter = { path = "../../../../adapters/risc0" } const-rollup-config = { path = "../../../const-rollup-config" } - -# TODO: Just for test -tracing = "0.1.37" -log = "0.4.17" +sov-modules-stf-template = {path = "../../../../module-system/sov-modules-stf-template", optional=true} +sov-state = {path = "../../../../module-system/sov-state", default-features = false, optional=true} +sov-modules-api = {path = "../../../../module-system/sov-modules-api", default-features = false, optional=true} +sov-sequencer-registry = {path = "../../../../module-system/module-implementations/sov-sequencer-registry", default-features = false, optional=true} [build-dependencies] directories = "5.0" @@ -26,9 +27,13 @@ downloader = "0.2" serde = { version = "1.0", default-features = false, features = ["derive"] } serde_json = "1.0" sha2 = "0.10.6" +sha3 = "0.10.8" tempfile = "3.5" zip = "0.6" +[patch.crates-io] +sha2 = { git = "https://github.com/risc0/RustCrypto-hashes", tag = "sha2/v0.10.6-risc0" } + [profile.dev] opt-level = 3 @@ -41,3 +46,6 @@ lto = true [profile.release.build-override] opt-level = 3 + +[features] +bench=["celestia/bench", "sov-modules-stf-template/bench", "sov-state/bench", "sov-modules-api/bench", "sov-sequencer-registry/bench"] diff --git a/examples/demo-prover/methods/guest/src/bin/rollup.rs b/examples/demo-prover/methods/guest/src/bin/rollup.rs index 84bfd0902..0f1fe8278 100644 --- a/examples/demo-prover/methods/guest/src/bin/rollup.rs +++ b/examples/demo-prover/methods/guest/src/bin/rollup.rs @@ -2,16 +2,21 @@ #![no_main] -use const_rollup_config::ROLLUP_NAMESPACE_RAW; +use std::str::FromStr; + +use celestia::types::NamespaceId; +use celestia::verifier::address::CelestiaAddress; +use celestia::verifier::{CelestiaSpec, CelestiaVerifier}; +use celestia::{BlobWithSender, CelestiaHeader}; +use const_rollup_config::{ROLLUP_NAMESPACE_RAW, SEQUENCER_DA_ADDRESS}; use demo_stf::app::create_zk_app_template; use demo_stf::ArrayWitness; -use jupiter::types::NamespaceId; -use jupiter::verifier::{CelestiaSpec, CelestiaVerifier}; -use jupiter::{BlobWithSender, CelestiaHeader}; + use risc0_adapter::guest::Risc0Guest; use risc0_zkvm::guest::env; use sov_rollup_interface::crypto::NoOpHasher; use sov_rollup_interface::da::{DaSpec, DaVerifier}; +use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::stf::StateTransitionFunction; use sov_rollup_interface::zk::{StateTransition, ZkvmGuest}; @@ -31,6 +36,8 @@ pub fn main() { env::write(&"Start guest\n"); let guest = Risc0Guest; + #[cfg(feature = "bench")] + let start_cycles = env::get_cycle_count(); let prev_state_root_hash: [u8; 32] = guest.read_from_host(); env::write(&"Prev root hash read\n"); // Step 1: read tx list @@ -44,18 +51,18 @@ pub fn main() { env::write(&"blobs have been read\n"); // Step 2: Apply blobs - let mut app = create_zk_app_template::(prev_state_root_hash); + let mut app = create_zk_app_template::(prev_state_root_hash); let witness: ArrayWitness = guest.read_from_host(); env::write(&"Witness have been read\n"); env::write(&"Applying slot...\n"); - let result = app.apply_slot(witness, &mut blobs); + let result = app.apply_slot(witness, &header, &mut blobs); env::write(&"Slot has been applied\n"); // Step 3: Verify tx list - let verifier = CelestiaVerifier::new(jupiter::verifier::RollupParams { + let verifier = CelestiaVerifier::new(celestia::verifier::RollupParams { namespace: ROLLUP_NAMESPACE, }); @@ -64,11 +71,39 @@ pub fn main() { .expect("Transaction list must be correct"); env::write(&"Relevant txs verified\n"); + // TODO: https://github.com/Sovereign-Labs/sovereign-sdk/issues/647 + let rewarded_address = CelestiaAddress::from_str(SEQUENCER_DA_ADDRESS).unwrap(); let output = StateTransition { initial_state_root: prev_state_root_hash, final_state_root: result.state_root.0, validity_condition, + rewarded_address: rewarded_address.as_ref().to_vec(), + slot_hash: header.hash(), }; env::commit(&output); + env::write(&"new state root committed\n"); + + #[cfg(feature = "bench")] + let end_cycles = env::get_cycle_count(); + + #[cfg(feature = "bench")] + { + let tuple = ( + "Cycles per block".to_string(), + (end_cycles - start_cycles) as u64, + ); + let mut serialized = Vec::new(); + serialized.extend(tuple.0.as_bytes()); + serialized.push(0); + let size_bytes = tuple.1.to_ne_bytes(); + serialized.extend(&size_bytes); + + // calculate the syscall name. + let cycle_string = String::from("cycle_metrics\0"); + let metrics_syscall_name = unsafe { + risc0_zkvm_platform::syscall::SyscallName::from_bytes_with_nul(cycle_string.as_ptr()) + }; + risc0_zkvm::guest::env::send_recv_slice::(metrics_syscall_name, &serialized); + } } diff --git a/examples/demo-rollup-avail/.gitignore b/examples/demo-rollup-avail/.gitignore index 0ab74a9f5..bbb9c77c2 100644 --- a/examples/demo-rollup-avail/.gitignore +++ b/examples/demo-rollup-avail/.gitignore @@ -1,4 +1,3 @@ /target /demo_data seed-phrase.json -da-config.json diff --git a/examples/demo-rollup-avail/Cargo.toml b/examples/demo-rollup-avail/Cargo.toml index 5eba5fbe4..d1de24625 100644 --- a/examples/demo-rollup-avail/Cargo.toml +++ b/examples/demo-rollup-avail/Cargo.toml @@ -1,49 +1,25 @@ [package] name = "sov-demo-rollup-avail" -version = "0.1.0" -edition = "2021" -resolver = "2" -authors = ["Sovereign Labs "] +version = { workspace = true } +edition = { workspace = true } +authors = { workspace = true } +license = { workspace = true } homepage = "sovereign.xyz" publish = false +resolver = "2" +default-run = "sov-demo-rollup-avail" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -anyhow = { workspace = true } -borsh = { workspace = true, features = ["bytes"] } -jsonrpsee = { workspace = true, features = ["http-client", "server"] } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } -sha2 = { workspace = true } -tracing = { workspace = true } -hex = { workspace = true } -bytes = { workspace = true } -reqwest = "0.11" -futures = "0.3" - -# Crates which only this package depends on -tendermint = "0.32" -tokio = { version = "1", features = ["full"] } tracing-subscriber = "0.3.17" - +sov-demo-rollup = { path = "../demo-rollup" } presence = { path = "../../adapters/avail" } -demo-stf = { path = "../demo-stf", features = ["native"] } sov-rollup-interface = { path = "../../rollup-interface" } -sov-db = { path = "../../full-node/db/sov-db" } -sov-sequencer = { path = "../../full-node/sov-sequencer" } -sov-stf-runner = { path = "../../full-node/sov-stf-runner" } -risc0-adapter = { path = "../../adapters/risc0" } -sov-modules-stf-template = { path = "../../module-system/sov-modules-stf-template" } - -sov-bank = { path = "../../module-system/module-implementations/sov-bank", default-features = false } -sov-election = { path = "../../module-system/module-implementations/examples/sov-election", default-features = false } -sov-value-setter = { path = "../../module-system/module-implementations/examples/sov-value-setter", default-features = false } -sov-modules-api = { path = "../../module-system/sov-modules-api", features = ["native"] } -sov-state = { path = "../../module-system/sov-state", features = ["native"] } -const-rollup-config = { path = "../const-rollup-config" } +demo-stf = { path = "../demo-stf", features = ["native"] } +anyhow = { workspace = true } +tokio = { workspace = true } -[dev-dependencies] -tempfile = "3.5.0" -proptest = { workspace = true } -sov-rollup-interface = { path = "../../rollup-interface", features = ["fuzzing"] } +[[bin]] +name = "sov-cli" +path = "src/sov-cli/main.rs" diff --git a/examples/demo-rollup-avail/Dockerfile b/examples/demo-rollup-avail/Dockerfile index 794acdec9..ce046f61f 100644 --- a/examples/demo-rollup-avail/Dockerfile +++ b/examples/demo-rollup-avail/Dockerfile @@ -1,30 +1,36 @@ FROM ubuntu:jammy -RUN apt-get update && \ - apt-get install -y curl libssl3 ca-certificates && \ - rm -rf /var/lib/apt/lists/* +RUN apt-get update --fix-missing || \ + (sleep 5 && apt-get update --fix-missing) || \ + (sleep 10 && apt-get update --fix-missing) && \ + apt-get install -y curl ca-certificates || \ + (sleep 5 && apt-get install -y curl ca-certificates) || \ + (sleep 10 && apt-get install -y curl ca-certificates) && \ + rm -rf /var/lib/apt/lists/* RUN mkdir -p /da/ && \ groupadd -r avail && \ useradd --no-log-init -r -g avail avail && \ chown -R avail:avail /da +COPY entrypoint.sh /da/entrypoint.sh +RUN chmod +x /da/entrypoint.sh + USER avail:avail WORKDIR /da ARG NODE_CLIENT_URL ARG APP_ID -RUN curl -L https://availproject.github.io/configs/kate/avail-light-1.4.3/config.yaml --output config.yaml && \ - curl -L https://github.com/availproject/avail-light/releases/download/v1.4.4/avail-light-linux-aarch64.tar.gz --output avail-light-linux-aarch64.tar.gz && \ - tar -xf avail-light-linux-aarch64.tar.gz && \ - echo "app_id = ${APP_ID}" >> config.yaml && \ - sed -i "s#full_node_ws = .*#full_node_ws = ['$NODE_CLIENT_URL']#" config.yaml +RUN curl -L https://raw.githubusercontent.com/availproject/availproject.github.io/c804aa520b66838209bb1bafbf7ffefdb249a2ac/static/kate/avail-light-1.4.3/config.yaml --output config.yaml && \ + curl -L https://github.com/availproject/avail-light/releases/download/v1.6.0-rc1/avail-light-linux-amd64.tar.gz --output avail-light-linux-amd64.tar.gz && \ + tar -xf avail-light-linux-amd64.tar.gz && \ + echo "app_id = ${APP_ID}" >> config.yaml && \ + sed -i "s#full_node_ws = .*#full_node_ws = ['$NODE_CLIENT_URL']#" config.yaml && \ + sed -i "s#http_server_host = .*#http_server_host = '0.0.0.0'#" config.yaml -ENV \ - APP_ID=0 +ENV APP_ID=0 -# Opencontainers annotations LABEL \ org.opencontainers.image.authors="The Avail Project Team" \ org.opencontainers.image.url="https://www.availproject.org/" \ @@ -36,4 +42,4 @@ LABEL \ org.opencontainers.image.title="Avail Light Client" \ org.opencontainers.image.description="Data Availability Docker Node" -CMD ls; cat config.yaml; ./avail-light-linux-aarch64 +CMD ["/bin/sh", "entrypoint.sh"] diff --git a/examples/demo-rollup-avail/Makefile b/examples/demo-rollup-avail/Makefile index bf9c58ccd..9c43ac8ed 100644 --- a/examples/demo-rollup-avail/Makefile +++ b/examples/demo-rollup-avail/Makefile @@ -4,6 +4,7 @@ CONTAINER_NAME=avail-light IMAGE_NAME=avail-light TEST_PRIVATE_KEY_PATH=../test-data/keys/minter_private_key.json SOV_CLI_REL_PATH=../../target/debug/sov-cli +# TODO: create-new-app-key should update APP_ID. APP_ID=7 ifndef SERIALIZED_BLOB_PATH @@ -19,20 +20,12 @@ SEED_PHRASE := $(shell cat seed-phrase.json | grep -o '"secretPhrase": *"[^"]*"' endif key-exists: - @test -s seed-phrase.json || make create-new-key && make update-da-address - @make update-da-address + @test -s seed-phrase.json || echo "Error: Call make create-new-key and replace const SEQUENCER_AVAIL_DA_ADDRESS in const-rollup-config with publicKey from the created seed-phrase.json" create-new-key: check-container-running @echo "Creating new key..." @docker run -it --pull=always docker.io/parity/subkey:latest generate --output-type json > seed-phrase.json -update-da-address: -ifeq ($(shell uname -s),Darwin) - @sed -i '' 's/^\(sequencer_da_address = \)"[^"]*"/\1"$(shell cat seed-phrase.json | grep -o '"publicKey": *"[^"]*"' | cut -d '"' -f 4 | cut -c3-)"/' rollup_config.toml -else - @sed -i 's/^\(sequencer_da_address = \)"[^"]*"/\1"$(shell cat seed-phrase.json | grep -o '"publicKey": *"[^"]*"' | cut -d '"' -f 4 | cut -c3-)"/' rollup_config.toml -endif - create-new-app-key: @cd ../avail-helper/ && cargo run --bin create_app_id -- --ws_uri wss://kate.avail.tools:443/ws --seed "$(SEED_PHRASE)" @@ -79,7 +72,7 @@ clean: check-docker echo 2 $(MAKE) clean-rollup-db -submit-txn : check-container-running build-sov-cli +submit-txn: check-container-running build-sov-cli ifndef SERIALIZED_BLOB_PATH $(error SERIALIZED_BLOB_PATH is not defined) else ifeq ($(wildcard $(SERIALIZED_BLOB_PATH)),) @@ -89,16 +82,22 @@ else endif build-sov-cli: - cd ../demo-stf && cargo build --bin sov-cli + cargo build --bin sov-cli test-generate-create-token-tx: check-container-running build-sov-cli - $(SOV_CLI_REL_PATH) generate-transaction-from-json ../test-data/keys/token_deployer_private_key.json Bank ../test-data/requests/create_token.json 0 + $(SOV_CLI_REL_PATH) transactions import from-file bank --path ../test-data/requests/create_token.json + +set-rpc-url: build-sov-cli + $(SOV_CLI_REL_PATH) rpc set-url http://localhost:12345 + +import-keys: build-sov-cli + $(SOV_CLI_REL_PATH) keys import --nickname DANGER__DO_NOT_USE_WITH_REAL_MONEY --path ../test-data/keys/minter_private_key.json -test-build-blob-from-create-token: test-generate-create-token-tx - $(SOV_CLI_REL_PATH) make-batch ../test-data/requests/create_token.dat > ../test-data/requests/test_blob.dat +test-create-token: set-rpc-url test-generate-create-token-tx import-keys + $(SOV_CLI_REL_PATH) rpc submit-batch -test-create-token: test-build-blob-from-create-token - $(MAKE) submit-txn SERIALIZED_BLOB_PATH=../test-data/requests/test_blob.dat +remove-insecure-keys: build-sov-cli + $(SOV_CLI_REL_PATH) keys remove by-address sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc clean-rollup-db: $(eval path := ./$(shell awk -F'=' '/^path/ {print $$2}' rollup_config.toml | tr -d '[:space:]"\n')) diff --git a/examples/demo-rollup-avail/entrypoint.sh b/examples/demo-rollup-avail/entrypoint.sh new file mode 100644 index 000000000..4f10df0ce --- /dev/null +++ b/examples/demo-rollup-avail/entrypoint.sh @@ -0,0 +1,4 @@ +#!/bin/sh +ls +cat config.yaml +exec ./avail-light-linux-amd64 diff --git a/examples/demo-rollup-avail/rollup_config.toml b/examples/demo-rollup-avail/rollup_config.toml index 7b9a0ac29..cb90ec930 100644 --- a/examples/demo-rollup-avail/rollup_config.toml +++ b/examples/demo-rollup-avail/rollup_config.toml @@ -1,19 +1,18 @@ -# We define the rollup's genesis to occur at Avail block number `start_height`. The rollup will ignore -# any Avail blocks before this height -sequencer_da_address = "b4dc7fc57630d2a7be7f358cbefc1e52bd6d0f250d19647cf264ecf2d8764d7b" - -[rollup_config] -start_height = 2 - [da] light_client_url = "http://127.0.0.1:8000" node_client_url = "wss://kate.avail.tools:443/ws" +seed = "secret_seed" -[rollup_config.runner.storage] +[storage] # The path to the rollup's data directory. Paths that do not begin with `/` are interpreted as relative paths. path = "demo_data" -[rollup_config.rpc_config] +# We define the rollup's genesis to occur at block number `start_height`. The rollup will ignore +# any blocks before this height +[runner] +start_height = 1 + +[runner.rpc_config] # the host and port to bind the rpc server for bind_host = "127.0.0.1" bind_port = 12345 diff --git a/examples/demo-rollup-avail/src/config.rs b/examples/demo-rollup-avail/src/config.rs deleted file mode 100644 index 5e691e800..000000000 --- a/examples/demo-rollup-avail/src/config.rs +++ /dev/null @@ -1,76 +0,0 @@ -use serde::Deserialize; -use sov_stf_runner::RollupConfig; - -//TODO - replace with runtime config. -#[derive(Debug, Clone, PartialEq, Deserialize)] -pub struct DaServiceConfig { - pub light_client_url: String, - pub node_client_url: String, -} - -#[derive(Debug, Clone, PartialEq, Deserialize)] -pub struct Config { - pub rollup_config: RollupConfig, - pub sequencer_da_address: String, - pub da: DaServiceConfig, -} - -#[cfg(test)] -mod tests { - use std::io::Write; - use std::path::PathBuf; - - use sov_stf_runner::{from_toml_path, RpcConfig, StorageConfig, Config as RunnerConfig}; - use tempfile::NamedTempFile; - - use super::*; - - fn create_config_from(content: &str) -> NamedTempFile { - let mut config_file = NamedTempFile::new().unwrap(); - config_file.write_all(content.as_bytes()).unwrap(); - config_file - } - - #[test] - fn test_correct_config() { - let config = r#" - sequencer_da_address = "b4dc7fc57630d2a7be7f358cbefc1e52bd6d0f250d19647cf264ecf2d8764d7b" - [rollup_config] - start_height = 2 - [da] - light_client_url = "http://127.0.0.1:8000" - node_client_url = "wss://kate.avail.tools:443/ws" - [rollup_config.runner.storage] - path = "demo_data" - [rollup_config.rpc_config] - bind_host = "127.0.0.1" - bind_port = 12345 - "#; - - let config_file = create_config_from(config); - - let config: Config = from_toml_path(config_file.path()).unwrap(); - let expected = Config { - sequencer_da_address: String::from( - "b4dc7fc57630d2a7be7f358cbefc1e52bd6d0f250d19647cf264ecf2d8764d7b", - ), - da: DaServiceConfig { - light_client_url: "http://127.0.0.1:8000".to_string(), - node_client_url: "wss://kate.avail.tools:443/ws".into(), - }, - rollup_config: RollupConfig { - start_height: 2, - runner: RunnerConfig { - storage: StorageConfig { - path: PathBuf::from("demo_data"), - }, - }, - rpc_config: RpcConfig { - bind_host: "127.0.0.1".to_string(), - bind_port: 12345, - }, - }, - }; - assert_eq!(config, expected); - } -} diff --git a/examples/demo-rollup-avail/src/ledger_rpc.rs b/examples/demo-rollup-avail/src/ledger_rpc.rs deleted file mode 100644 index 22e290db1..000000000 --- a/examples/demo-rollup-avail/src/ledger_rpc.rs +++ /dev/null @@ -1,93 +0,0 @@ -use jsonrpsee::RpcModule; -use serde::de::DeserializeOwned; -use serde::Serialize; -use sov_db::ledger_db::LedgerDB; -use sov_modules_api::utils::to_jsonrpsee_error_object; -use sov_rollup_interface::rpc::{ - BatchIdentifier, EventIdentifier, LedgerRpcProvider, SlotIdentifier, TxIdentifier, -}; - -const LEDGER_RPC_ERROR: &str = "LEDGER_RPC_ERROR"; - -use self::query_args::{extract_query_args, QueryArgs}; - -/// Registers the following RPC methods -/// - `ledger_head` -/// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_head","params":[],"id":1}' http://127.0.0.1:12345` -/// - ledger_getSlots -/// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_getSlots","params":[[1, 2], "Compact"],"id":1}' http://127.0.0.1:12345` -/// - ledger_getBatches -/// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_getBatches","params":[[1, 2], "Standard"],"id":1}' http://127.0.0.1:12345` -/// - ledger_getTransactions -/// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_getBatches","params":[[1, 2], "Full"],"id":1}' http://127.0.0.1:12345` -/// - ledger_getEvents -/// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_getBatches","params":[1, 2],"id":1}' http://127.0.0.1:12345` -fn register_ledger_rpc_methods< - B: Serialize + DeserializeOwned + Clone + 'static, - T: Serialize + DeserializeOwned + Clone + 'static, ->( - rpc: &mut RpcModule, -) -> Result<(), jsonrpsee::core::Error> { - rpc.register_method("ledger_getHead", move |_, db| { - db.get_head::() - .map_err(|e| to_jsonrpsee_error_object(e, LEDGER_RPC_ERROR)) - })?; - - rpc.register_method("ledger_getSlots", move |params, db| { - let args: QueryArgs = extract_query_args(params)?; - db.get_slots::(&args.0, args.1) - .map_err(|e| to_jsonrpsee_error_object(e, LEDGER_RPC_ERROR)) - })?; - - rpc.register_method("ledger_getBatches", move |params, db| { - let args: QueryArgs = extract_query_args(params)?; - db.get_batches::(&args.0, args.1) - .map_err(|e| to_jsonrpsee_error_object(e, LEDGER_RPC_ERROR)) - })?; - - rpc.register_method("ledger_getTransactions", move |params, db| { - let args: QueryArgs = extract_query_args(params)?; - db.get_transactions::(&args.0, args.1) - .map_err(|e| to_jsonrpsee_error_object(e, LEDGER_RPC_ERROR)) - })?; - - rpc.register_method("ledger_getEvents", move |params, db| { - let ids: Vec = params.parse()?; - db.get_events(&ids) - .map_err(|e| to_jsonrpsee_error_object(e, LEDGER_RPC_ERROR)) - })?; - - Ok(()) -} - -pub fn get_ledger_rpc< - B: Serialize + DeserializeOwned + Clone + 'static, - T: Serialize + DeserializeOwned + Clone + 'static, ->( - ledger_db: LedgerDB, -) -> RpcModule { - let mut rpc = RpcModule::new(ledger_db); - register_ledger_rpc_methods::(&mut rpc).expect("Failed to register ledger RPC methods"); - rpc -} - -mod query_args { - use jsonrpsee::types::ErrorObjectOwned; - use serde::de::DeserializeOwned; - use sov_rollup_interface::rpc::QueryMode; - - #[derive(serde::Deserialize)] - pub struct QueryArgs(pub Vec, #[serde(default)] pub QueryMode); - - /// Extract the args from an RPC query, being liberal in what is accepted. - /// To query for a list of items, users can either pass a list of ids, or tuple containing a list of ids and a query mode - pub fn extract_query_args( - params: jsonrpsee::types::Params, - ) -> Result, ErrorObjectOwned> { - if let Ok(args) = params.parse() { - return Ok(args); - } - let ids: Vec = params.parse()?; - Ok(QueryArgs(ids, Default::default())) - } -} diff --git a/examples/demo-rollup-avail/src/main.rs b/examples/demo-rollup-avail/src/main.rs index 35868ff2e..224980b7d 100644 --- a/examples/demo-rollup-avail/src/main.rs +++ b/examples/demo-rollup-avail/src/main.rs @@ -1,142 +1,26 @@ -mod config; use std::env; -use std::sync::Arc; +use std::str::FromStr; -use anyhow::Context; -use demo_stf::app::{App, DefaultContext, DefaultPrivateKey}; -use demo_stf::genesis_config::create_demo_genesis_config; -use demo_stf::runtime::{get_rpc_methods, GenesisConfig}; -use presence::service::DaProvider as AvailDaProvider; -use presence::spec::transaction::AvailBlobTransaction; -use risc0_adapter::host::Risc0Verifier; -use sov_db::ledger_db::LedgerDB; -use sov_modules_stf_template::{SequencerOutcome, TxEffect}; -use sov_rollup_interface::services::da::DaService; -use sov_sequencer::get_sequencer_rpc; -use sov_state::Storage; -use sov_stf_runner::{from_toml_path, get_ledger_rpc, StateTransitionRunner}; -use tracing::{debug, Level}; +use sov_demo_rollup::new_rollup_with_avail_da; +use tracing_subscriber::prelude::*; +use tracing_subscriber::{fmt, EnvFilter}; -use crate::config::Config; - -#[cfg(test)] -mod test_rpc; - -pub fn initialize_ledger(path: impl AsRef) -> LedgerDB { - LedgerDB::with_path(path).expect("Ledger DB failed to open") -} - -// TODO: Remove this when sov-cli is in its own crate. -#[derive(Debug, serde::Serialize, serde::Deserialize)] -struct HexKey { - hex_priv_key: String, - address: String, -} - -pub fn get_genesis_config(sequencer_da_address: &str) -> GenesisConfig { - let hex_key: HexKey = serde_json::from_slice(include_bytes!( - "../../test-data/keys/token_deployer_private_key.json" - )) - .expect("Broken key data file"); - let sequencer_private_key = DefaultPrivateKey::from_hex(&hex_key.hex_priv_key).unwrap(); - assert_eq!( - sequencer_private_key.default_address().to_string(), - hex_key.address, - "Inconsistent key data", - ); - create_demo_genesis_config( - 100000000, - sequencer_private_key.default_address(), - hex::decode(sequencer_da_address).unwrap(), - &sequencer_private_key, - &sequencer_private_key, - ) -} - -//TODO: Add validity checker? +/// Main demo runner. Initialize a DA chain, and starts a demo-rollup using the config provided +/// (or a default config if not provided). Then start checking the blocks sent to the DA layer in +/// the main event loop. #[tokio::main] async fn main() -> Result<(), anyhow::Error> { + // Initializing logging + tracing_subscriber::registry() + .with(fmt::layer()) + .with(EnvFilter::from_str("info,sov_sequencer=warn").unwrap()) + .init(); + let rollup_config_path = env::args() .nth(1) .unwrap_or_else(|| "rollup_config.toml".to_string()); - debug!("Starting demo rollup with config {}", rollup_config_path); - let config: Config = - from_toml_path(&rollup_config_path).context("Failed to read rollup configuration")?; - - // Initializing logging - let subscriber = tracing_subscriber::fmt() - .with_max_level(Level::INFO) - .finish(); - tracing::subscriber::set_global_default(subscriber) - .map_err(|_err| eprintln!("Unable to set global default subscriber")) - .expect("Cannot fail to set subscriber"); - - // Initialize the ledger database, which stores blocks, transactions, events, etc. - let ledger_db = initialize_ledger(&config.rollup_config.runner.storage.path); - - let node_client = presence::build_client(config.da.node_client_url.to_string(), false) - .await - .unwrap(); - let light_client_url = config.da.light_client_url.to_string(); - // Initialize the Avail service using the DaService interface - let da_service = AvailDaProvider { - node_client, - light_client_url, - }; - - let mut app = App::::new( - config.rollup_config.runner.storage.clone(), - ); - - let storage = app.get_storage(); - let mut methods = get_rpc_methods::(storage); - - // register rpc methods - { - register_ledger(ledger_db.clone(), &mut methods)?; - register_sequencer(da_service.clone(), &mut app, &mut methods)?; - } - - let storage = app.get_storage(); - let genesis_config = get_genesis_config(&config.sequencer_da_address); - - let mut runner = StateTransitionRunner::new( - config.rollup_config, - da_service, - ledger_db, - app.stf, - storage.is_empty(), - genesis_config, - )?; - - runner.start_rpc_server(methods).await; - runner.run().await?; - - Ok(()) -} - -fn register_sequencer( - da_service: DA, - demo_runner: &mut App, - methods: &mut jsonrpsee::RpcModule<()>, -) -> Result<(), anyhow::Error> -where - DA: DaService + Send + Sync + 'static, -{ - let batch_builder = demo_runner.batch_builder.take().unwrap(); - let sequencer_rpc = get_sequencer_rpc(batch_builder, Arc::new(da_service)); - methods - .merge(sequencer_rpc) - .context("Failed to merge Txs RPC modules") -} - -fn register_ledger( - ledger_db: LedgerDB, - methods: &mut jsonrpsee::RpcModule<()>, -) -> Result<(), anyhow::Error> { - let ledger_rpc = get_ledger_rpc::(ledger_db); - methods - .merge(ledger_rpc) - .context("Failed to merge ledger RPC modules") + + let rollup = new_rollup_with_avail_da(&rollup_config_path).await?; + rollup.run().await } diff --git a/examples/demo-stf/src/sov-cli/README.md b/examples/demo-rollup-avail/src/sov-cli/README.md similarity index 99% rename from examples/demo-stf/src/sov-cli/README.md rename to examples/demo-rollup-avail/src/sov-cli/README.md index 24cd0030f..75dcf0a4e 100644 --- a/examples/demo-stf/src/sov-cli/README.md +++ b/examples/demo-rollup-avail/src/sov-cli/README.md @@ -107,6 +107,6 @@ Options: demo-stf % cargo run --bin sov-cli generate-transaction-from-json my_private_key.json Bank src/sov-cli/test_data/create_token.json 1 ``` -- By default the file is formatted in `hex` and contains a blob ready for submission to celestia - the blob only contains a single transactions for now +- By default the file is formatted in `hex` and contains a blob ready for submission to celestia - the blob only contains a single transaction for now - Other formats include `borsh` - In order to know what the token is the `derive-token-address` command from the `utils` subcommand can be used diff --git a/examples/demo-rollup-avail/src/sov-cli/main.rs b/examples/demo-rollup-avail/src/sov-cli/main.rs new file mode 100644 index 000000000..ddd7a2d40 --- /dev/null +++ b/examples/demo-rollup-avail/src/sov-cli/main.rs @@ -0,0 +1,7 @@ +#[tokio::main] +async fn main() -> Result<(), anyhow::Error> { + demo_stf::cli::run::< + ::Spec, + >() + .await +} diff --git a/examples/demo-rollup-avail/src/test_rpc.rs b/examples/demo-rollup-avail/src/test_rpc.rs deleted file mode 100644 index 4180d6fab..000000000 --- a/examples/demo-rollup-avail/src/test_rpc.rs +++ /dev/null @@ -1,579 +0,0 @@ -use std::collections::HashMap; -use std::net::SocketAddr; - -use proptest::prelude::any_with; -use proptest::strategy::Strategy; -use proptest::{prop_compose, proptest}; -use reqwest::header::CONTENT_TYPE; -use serde_json::json; -use sov_db::ledger_db::{LedgerDB, SlotCommit}; -#[cfg(test)] -use sov_rollup_interface::mocks::{TestBlock, TestBlockHeader, TestHash}; -use sov_rollup_interface::services::da::SlotData; -use sov_rollup_interface::stf::fuzzing::BatchReceiptStrategyArgs; -use sov_rollup_interface::stf::{BatchReceipt, Event, TransactionReceipt}; -#[cfg(test)] -use sov_stf_runner::get_ledger_rpc; -use sov_stf_runner::RpcConfig; -use tendermint::crypto::Sha256; -use tokio::sync::oneshot; - -struct TestExpect { - payload: serde_json::Value, - expected: serde_json::Value, -} - -async fn queries_test_runner(test_queries: Vec, rpc_config: RpcConfig) { - let (addr, port) = (rpc_config.bind_host, rpc_config.bind_port); - let client = reqwest::Client::new(); - let url_str = format!("http://{addr}:{port}"); - - for query in test_queries { - let res = client - .post(url_str.clone()) - .header(CONTENT_TYPE, "application/json") - .body(query.payload.to_string()) - .send() - .await - .unwrap(); - - assert_eq!(res.status().as_u16(), 200); - - let response_body = res.text().await.unwrap(); - assert_eq!( - serde_json::from_str::(&response_body).unwrap(), - query.expected, - ); - } -} - -fn populate_ledger(ledger_db: &mut LedgerDB, slots: Vec>) { - for slot in slots { - ledger_db.commit_slot(slot).unwrap(); - } -} - -fn test_helper(test_queries: Vec, slots: Vec>) { - let rt = tokio::runtime::Builder::new_multi_thread() - .enable_io() - .enable_time() - .build() - .unwrap(); - - rt.block_on(async { - let (tx_start, rx_start) = oneshot::channel(); - let (tx_end, rx_end) = oneshot::channel(); - - let address = SocketAddr::new("127.0.0.1".parse().unwrap(), 0); - - // Initialize the ledger database, which stores blocks, transactions, events, etc. - let tmpdir = tempfile::tempdir().unwrap(); - let mut ledger_db = LedgerDB::with_path(tmpdir.path()).unwrap(); - - populate_ledger(&mut ledger_db, slots); - - let ledger_rpc_module = get_ledger_rpc::(ledger_db.clone()); - - rt.spawn(async move { - let server = jsonrpsee::server::ServerBuilder::default() - .build([address].as_ref()) - .await - .unwrap(); - let actual_address = server.local_addr().unwrap(); - let _server_handle = server.start(ledger_rpc_module).unwrap(); - tx_start.send(actual_address.port()).unwrap(); - rx_end.await.unwrap(); - }); - - let bind_port = rx_start.await.unwrap(); - let rpc_config = RpcConfig { - bind_host: "127.0.0.1".to_string(), - bind_port, - }; - - queries_test_runner(test_queries, rpc_config).await; - - tx_end.send("drop server").unwrap(); - }); -} - -fn batch2_tx_receipts() -> Vec> { - (0..260u64) - .map(|i| TransactionReceipt:: { - tx_hash: ::sha2::Sha256::digest(i.to_string()), - body_to_save: Some(b"tx body".to_vec()), - events: vec![], - receipt: 0, - }) - .collect() -} - -fn regular_test_helper(payload: serde_json::Value, expected: &serde_json::Value) { - let mut slots: Vec> = vec![SlotCommit::new(TestBlock { - curr_hash: sha2::Sha256::digest(b"slot_data"), - header: TestBlockHeader { - prev_hash: TestHash(sha2::Sha256::digest(b"prev_header")), - }, - height: 0, - })]; - - let batches = vec![ - BatchReceipt { - batch_hash: ::sha2::Sha256::digest(b"batch_receipt"), - tx_receipts: vec![ - TransactionReceipt:: { - tx_hash: ::sha2::Sha256::digest(b"tx1"), - body_to_save: Some(b"tx1 body".to_vec()), - events: vec![], - receipt: 0, - }, - TransactionReceipt:: { - tx_hash: ::sha2::Sha256::digest(b"tx2"), - body_to_save: Some(b"tx2 body".to_vec()), - events: vec![ - Event::new("event1_key", "event1_value"), - Event::new("event2_key", "event2_value"), - ], - receipt: 1, - }, - ], - inner: 0, - }, - BatchReceipt { - batch_hash: ::sha2::Sha256::digest(b"batch_receipt2"), - tx_receipts: batch2_tx_receipts(), - inner: 1, - }, - ]; - - for batch in batches { - slots.get_mut(0).unwrap().add_batch(batch) - } - - test_helper( - vec![TestExpect { - payload, - expected: expected.clone(), - }], - slots, - ) -} - -/// Concisely generate a [JSON-RPC 2.0](https://www.jsonrpc.org/specification) -/// request [`String`]. You must provide the method name and the parameters of -/// the request, using [`serde_json::json!`] syntax. -/// -/// ``` -/// let req: String = jsonrpc_req!("method", ["param1", "param2"]); -/// ``` -macro_rules! jsonrpc_req { - ($method:expr, $params:tt) => { - ::serde_json::json!({ - "jsonrpc": "2.0", - "method": $method, - "params": $params, - "id": 1 - }) - }; -} - -/// A counterpart to [`jsonrpc_req!`] which generates successful responses. -macro_rules! jsonrpc_result { - ($result:tt) => {{ - ::serde_json::json!({ - "jsonrpc": "2.0", - "result": $result, - "id": 1 - }) - }}; -} - -// These tests reproduce the README workflow for the ledger_rpc, ie: -// - It creates and populate a simple ledger with a few transactions -// - It initializes the rpc server -// - It successively calls the different rpc methods registered and tests the answer -#[test] -fn test_get_head() { - let payload = jsonrpc_req!("ledger_getHead", []); - let expected = jsonrpc_result!({"number":1,"hash":"0xd1231a38586e68d0405dc55ae6775e219f29fff1f7e0c6410d0ac069201e550b","batch_range":{"start":1,"end":3}}); - - regular_test_helper(payload, &expected); -} - -#[test] -fn test_get_transactions_offset_first_batch() { - // Tests for different types of argument - let payload = jsonrpc_req!("ledger_getTransactions", [[{"batch_id": 1, "offset": 0}]]); - let expected = jsonrpc_result!([{"hash":"0x709b55bd3da0f5a838125bd0ee20c5bfdd7caba173912d4281cae816b79a201b","event_range":{"start":1,"end":1},"body":[116,120,49,32,98,111,100,121],"custom_receipt":0}]); - regular_test_helper(payload, &expected); - - // Tests for flattened args - let payload = jsonrpc_req!("ledger_getTransactions", [1]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getTransactions", [[1]]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getTransactions", [[1], "Standard"]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getTransactions", [[1], "Compact"]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getTransactions", [[1], "Full"]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getTransactions", [[{ "batch_id": 1, "offset": 1}]]); - let expected = jsonrpc_result!([{"hash":"0x27ca64c092a959c7edc525ed45e845b1de6a7590d173fd2fad9133c8a779a1e3","event_range":{"start":1,"end":3},"body":[116,120,50,32,98,111,100,121],"custom_receipt":1}]); - regular_test_helper(payload, &expected); -} - -#[test] -fn test_get_batches() { - let payload = jsonrpc_req!("ledger_getBatches", [[2], "Standard"]); - let expected = jsonrpc_result!([{ - "hash":"0xf85fe0cb36fdaeca571c896ed476b49bb3c8eff00d935293a8967e1e9a62071e", - "tx_range":{"start":3,"end":263}, - "txs": batch2_tx_receipts().into_iter().map(|tx_receipt| format!("0x{}", hex::encode(tx_receipt.tx_hash) )).collect::>(), - "custom_receipt":1 - }]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getBatches", [[2]]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getBatches", [2]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getBatches", [[1], "Compact"]); - let expected = jsonrpc_result!([{"hash":"0xb5515a80204963f7db40e98af11aedb49a394b1c7e3d8b5b7a33346b8627444f","tx_range":{"start":1,"end":3},"custom_receipt":0}]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getBatches", [[1], "Full"]); - let expected = jsonrpc_result!([{"hash":"0xb5515a80204963f7db40e98af11aedb49a394b1c7e3d8b5b7a33346b8627444f","tx_range":{"start":1,"end":3},"txs":[{"hash":"0x709b55bd3da0f5a838125bd0ee20c5bfdd7caba173912d4281cae816b79a201b","event_range":{"start":1,"end":1},"body":[116,120,49,32,98,111,100,121],"custom_receipt":0},{"hash":"0x27ca64c092a959c7edc525ed45e845b1de6a7590d173fd2fad9133c8a779a1e3","event_range":{"start":1,"end":3},"body":[116,120,50,32,98,111,100,121],"custom_receipt":1}],"custom_receipt":0}]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getBatches", [[0], "Compact"]); - let expected = jsonrpc_result!([null]); - regular_test_helper(payload, &expected); -} - -#[test] -fn test_get_events() { - let payload = jsonrpc_req!("ledger_getEvents", [1]); - let expected = jsonrpc_result!([{ - "key":[101,118,101,110,116,49,95,107,101,121], - "value":[101,118,101,110,116,49,95,118,97,108,117,101] - }]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getEvents", [2]); - let expected = jsonrpc_result!([{ - "key":[101,118,101,110,116,50,95,107,101,121], - "value":[101,118,101,110,116,50,95,118,97,108,117,101] - }]); - regular_test_helper(payload, &expected); - - let payload = jsonrpc_req!("ledger_getEvents", [3]); - let expected = jsonrpc_result!([null]); - regular_test_helper(payload, &expected); -} - -fn batch_receipt_without_hasher() -> impl Strategy> { - let mut args = BatchReceiptStrategyArgs { - hasher: None, - ..Default::default() - }; - args.transaction_strategy_args.hasher = None; - any_with::>(args) -} - -prop_compose! { - fn arb_batches_and_slot_hash(max_batches : usize) - (slot_hash in proptest::array::uniform32(0_u8..), batches in proptest::collection::vec(batch_receipt_without_hasher(), 1..max_batches)) -> - (Vec>, [u8;32]) { - (batches, slot_hash) - } -} - -prop_compose! { - fn arb_slots(max_slots: usize, max_batches: usize) - (batches_and_hashes in proptest::collection::vec(arb_batches_and_slot_hash(max_batches), 1..max_slots)) -> (Vec>, HashMap, usize) - { - let mut slots = std::vec::Vec::with_capacity(max_slots); - - let mut total_num_batches = 1; - - let mut prev_hash = TestHash([0;32]); - - let mut curr_tx_id = 1; - let mut curr_event_id = 1; - - let mut tx_id_to_event_range = HashMap::new(); - - for (batches, hash) in batches_and_hashes{ - let mut new_slot = SlotCommit::new(TestBlock { - curr_hash: hash, - header: TestBlockHeader { - prev_hash, - }, - height: 0 - }); - - total_num_batches += batches.len(); - - for batch in batches { - for tx in &batch.tx_receipts{ - tx_id_to_event_range.insert(curr_tx_id, (curr_event_id, curr_event_id + tx.events.len())); - - curr_event_id += tx.events.len(); - curr_tx_id += 1; - } - - new_slot.add_batch(batch); - } - - - slots.push(new_slot); - - prev_hash = TestHash(hash); - } - - (slots, tx_id_to_event_range, total_num_batches) - } -} - -fn full_tx_json( - tx_id: usize, - tx: &TransactionReceipt, - tx_id_to_event_range: &HashMap, -) -> serde_json::Value { - let (event_range_begin, event_range_end) = tx_id_to_event_range.get(&tx_id).unwrap(); - let tx_hash_hex = hex::encode(tx.tx_hash); - match &tx.body_to_save { - None => json!({ - "hash": format!("0x{tx_hash_hex}"), - "event_range": { - "start": event_range_begin, - "end": event_range_end - }, - "custom_receipt": tx.receipt, - }), - Some(body) => { - json!({ - "hash": format!("0x{tx_hash_hex}"), - "event_range": { - "start": event_range_begin, - "end": event_range_end - }, - "body": body, - "custom_receipt": tx.receipt, - }) - } - } -} - -proptest!( - #[test] - fn proptest_get_head((slots, _, total_num_batches) in arb_slots(10, 10)){ - let last_slot = slots.last().unwrap(); - let last_slot_num_batches = last_slot.batch_receipts().len(); - - let last_slot_start_batch = total_num_batches - last_slot_num_batches; - let last_slot_end_batch = total_num_batches; - - let payload = jsonrpc_req!("ledger_getHead", []); - let expected = jsonrpc_result!({ - "number": slots.len(), - "hash": format!("0x{}", hex::encode(last_slot.slot_data().hash())), - "batch_range": { - "start": last_slot_start_batch, - "end": last_slot_end_batch - } - }); - test_helper(vec![TestExpect{ payload, expected }], slots); - } - - - #[test] - fn proptest_get_batches((slots, tx_id_to_event_range, _total_num_batches) in arb_slots(10, 10), random_batch_num in 1..100){ - let mut curr_batch_num = 1; - let mut curr_tx_num = 1; - - let random_batch_num_usize = usize::try_from(random_batch_num).unwrap(); - - for slot in &slots { - if curr_batch_num > random_batch_num_usize { - break; - } - - if curr_batch_num + slot.batch_receipts().len() > random_batch_num_usize { - let curr_slot_batches = slot.batch_receipts(); - - let batch_index = random_batch_num_usize - curr_batch_num; - - for i in 0..batch_index{ - curr_tx_num += curr_slot_batches.get(i).unwrap().tx_receipts.len(); - } - - let first_tx_num = curr_tx_num; - - let curr_batch = curr_slot_batches.get(batch_index).unwrap(); - let last_tx_num = first_tx_num + curr_batch.tx_receipts.len(); - - let batch_hash = hex::encode(curr_batch.batch_hash); - let batch_receipt= curr_batch.inner; - - let tx_hashes: Vec = curr_batch.tx_receipts.iter().map(|tx| { - format!("0x{}", hex::encode(tx.tx_hash)) - }).collect(); - - let full_txs = curr_batch.tx_receipts.iter().enumerate().map(|(tx_id, tx)| - full_tx_json(curr_tx_num + tx_id, tx, &tx_id_to_event_range) - ).collect::>(); - - test_helper( - vec![TestExpect{ - payload: - jsonrpc_req!("ledger_getBatches", [[random_batch_num], "Compact"]), - expected: - jsonrpc_result!([{"hash": format!("0x{batch_hash}"),"tx_range": {"start":first_tx_num,"end":last_tx_num},"custom_receipt": batch_receipt}])}, - TestExpect{ - payload: - jsonrpc_req!("ledger_getBatches", [[random_batch_num], "Standard"]), - expected: - jsonrpc_result!([{"hash":format!("0x{batch_hash}"),"tx_range":{"start":first_tx_num,"end":last_tx_num},"txs":tx_hashes,"custom_receipt":batch_receipt}])}, - TestExpect{ - payload: - jsonrpc_req!("ledger_getBatches", [[random_batch_num]]), - expected: - jsonrpc_result!([{"hash":format!("0x{batch_hash}"),"tx_range":{"start":first_tx_num,"end":last_tx_num},"txs":tx_hashes,"custom_receipt":batch_receipt}])}, - TestExpect{ - payload: - jsonrpc_req!("ledger_getBatches", [random_batch_num]), - expected: - jsonrpc_result!([{"hash":format!("0x{batch_hash}"),"tx_range":{"start":first_tx_num,"end":last_tx_num},"txs":tx_hashes,"custom_receipt":batch_receipt}])}, - TestExpect{ - payload: - jsonrpc_req!("ledger_getBatches", [[random_batch_num], "Full"]), - expected: - jsonrpc_result!([{"hash":format!("0x{batch_hash}"),"tx_range":{"start":first_tx_num,"end":last_tx_num},"txs":full_txs,"custom_receipt":batch_receipt}])}, - ], - slots); - - return Ok(()); - } - - curr_batch_num += slot.batch_receipts().len(); - - for batch in slot.batch_receipts(){ - curr_tx_num += batch.tx_receipts.len(); - } - - } - - let payload = jsonrpc_req!("ledger_getBatches", [[random_batch_num], "Compact"]); - let expected = jsonrpc_result!([null]); - test_helper(vec![TestExpect{payload, expected}], slots); - } - - #[test] - fn proptest_get_transactions((slots, tx_id_to_event_range, _total_num_batches) in arb_slots(10, 10), random_tx_num in 1..1000){ - let mut curr_tx_num = 1; - - let random_tx_num_usize = usize::try_from(random_tx_num).unwrap(); - - for slot in &slots{ - for batch in slot.batch_receipts(){ - if curr_tx_num > random_tx_num_usize { - break; - } - - if curr_tx_num + batch.tx_receipts.len() > random_tx_num_usize { - let tx_index = random_tx_num_usize - curr_tx_num; - let tx = batch.tx_receipts.get(tx_index).unwrap(); - - let tx_formatted = full_tx_json(curr_tx_num + tx_index, tx, &tx_id_to_event_range); - - test_helper(vec![TestExpect{ - payload: - jsonrpc_req!("ledger_getTransactions", [[random_tx_num]]), - expected: - jsonrpc_result!([tx_formatted])}, - TestExpect{ - payload: - jsonrpc_req!("ledger_getTransactions", [random_tx_num]), - expected: - jsonrpc_result!([tx_formatted])}, - TestExpect{ - payload: - jsonrpc_req!("ledger_getTransactions", [[random_tx_num], "Compact"]), - expected: - jsonrpc_result!([tx_formatted])}, - TestExpect{ - payload: - jsonrpc_req!("ledger_getTransactions", [[random_tx_num], "Standard"]), - expected: - jsonrpc_result!([tx_formatted])}, - TestExpect{ - payload: - jsonrpc_req!("ledger_getTransactions", [[random_tx_num], "Full"]), - expected: - jsonrpc_result!([tx_formatted])}, - ] - , slots); - - return Ok(()); - } - - curr_tx_num += batch.tx_receipts.len(); - } - } - - let payload = jsonrpc_req!("ledger_getTransactions", [[random_tx_num]]); - let expected = jsonrpc_result!([null]); - test_helper(vec![TestExpect{payload, expected}], slots); - - } - - #[test] - fn proptest_get_events((slots, tx_id_to_event_range, _total_num_batches) in arb_slots(10, 10), random_event_num in 1..10000){ - let mut curr_tx_num = 1; - - let random_event_num_usize = usize::try_from(random_event_num).unwrap(); - - for slot in &slots { - for batch in slot.batch_receipts(){ - for tx in &batch.tx_receipts{ - let (start_event_range, end_event_range) = tx_id_to_event_range.get(&curr_tx_num).unwrap(); - if *start_event_range > random_event_num_usize { - break; - } - - if random_event_num_usize < *end_event_range { - let event_index = random_event_num_usize - *start_event_range; - let event: &Event = tx.events.get(event_index).unwrap(); - let event_json = json!({ - "key": event.key().inner(), - "value": event.value().inner(), - }); - - test_helper(vec![TestExpect{ - payload: - jsonrpc_req!("ledger_getEvents", [random_event_num_usize]), - expected: - jsonrpc_result!([event_json])}] - , slots); - - return Ok(()); - } - curr_tx_num += 1; - } - } - } - - let payload = jsonrpc_req!("ledger_getEvents", [random_event_num]); - let expected = jsonrpc_result!([null]); - test_helper(vec![TestExpect{payload, expected}], slots); - } -); diff --git a/examples/demo-rollup/.gitignore b/examples/demo-rollup/.gitignore index 5d583c8e1..ca274e832 100644 --- a/examples/demo-rollup/.gitignore +++ b/examples/demo-rollup/.gitignore @@ -1,2 +1,4 @@ /target /demo_data +/path_readme +/tests/test_data/tmp \ No newline at end of file diff --git a/examples/demo-rollup/Cargo.toml b/examples/demo-rollup/Cargo.toml index bc6b7211b..e14bd8ce3 100644 --- a/examples/demo-rollup/Cargo.toml +++ b/examples/demo-rollup/Cargo.toml @@ -7,6 +7,7 @@ license = { workspace = true } homepage = "sovereign.xyz" publish = false resolver = "2" +default-run = "sov-demo-rollup" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -25,26 +26,26 @@ futures = "0.3" # Crates which only this package depends on tokio = { workspace = true } -tracing-subscriber = "0.3.17" +tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } -jupiter = { path = "../../adapters/celestia" } +celestia = { path = "../../adapters/celestia", features = ["native"] } +presence = { path = "../../adapters/avail" } demo-stf = { path = "../demo-stf", features = ["native"] } sov-rollup-interface = { path = "../../rollup-interface" } sov-db = { path = "../../full-node/db/sov-db" } sov-ethereum = { path = "../../full-node/sov-ethereum", optional = true } sov-sequencer = { path = "../../full-node/sov-sequencer" } sov-stf-runner = { path = "../../full-node/sov-stf-runner" } -risc0-adapter = { path = "../../adapters/risc0" } -sov-modules-stf-template = { path = "../../module-system/sov-modules-stf-template" } - -sov-bank = { path = "../../module-system/module-implementations/sov-bank", default-features = false } -sov-election = { path = "../../module-system/module-implementations/examples/sov-election", default-features = false } -sov-value-setter = { path = "../../module-system/module-implementations/examples/sov-value-setter", default-features = false } +risc0-adapter = { path = "../../adapters/risc0", features = ["native"] } +sov-modules-stf-template = { path = "../../module-system/sov-modules-stf-template", features = ["native"] } sov-modules-api = { path = "../../module-system/sov-modules-api", features = ["native"] } sov-state = { path = "../../module-system/sov-state", features = ["native"] } const-rollup-config = { path = "../const-rollup-config" } +sov-cli = { path = "../../module-system/sov-cli" } [dev-dependencies] +sov-evm = { path = "../../module-system/module-implementations/sov-evm", features = ["smart_contracts"] } +sov-bank = { path = "../../module-system/module-implementations/sov-bank", features = ["native"] } sha2 = { workspace = true } reqwest = "0.11" tendermint = "0.32" @@ -56,7 +57,19 @@ prometheus = "0.11.0" prettytable-rs = "^0.10" criterion = "0.5.1" +ethereum-types = "0.14.1" +ethers-core = { workspace = true } +ethers-contract = { workspace = true } +ethers-middleware = { workspace = true } +ethers-providers = { workspace = true } +ethers-signers = { workspace = true } +ethers = { workspace = true } +revm = { workspace = true } + +sov-demo-rollup = { path = ".", features = ["experimental"] } + [features] +default = [] experimental = ["sov-ethereum/experimental"] [[bench]] @@ -66,3 +79,7 @@ harness = false [[bench]] name = "rollup_coarse_measure" harness = false + +[[bin]] +name = "sov-cli" +path = "src/sov-cli/main.rs" \ No newline at end of file diff --git a/examples/demo-rollup/Makefile b/examples/demo-rollup/Makefile index 72a387db2..b44d83d28 100644 --- a/examples/demo-rollup/Makefile +++ b/examples/demo-rollup/Makefile @@ -91,16 +91,22 @@ else endif build-sov-cli: - cd ../demo-stf && cargo build --bin sov-cli + cargo build --bin sov-cli test-generate-create-token-tx: check-container-running build-sov-cli - $(SOV_CLI_REL_PATH) generate-transaction-from-json ../test-data/keys/token_deployer_private_key.json Bank ../test-data/requests/create_token.json 0 + $(SOV_CLI_REL_PATH) transactions import from-file bank --path ../test-data/requests/create_token.json -test-build-blob-from-create-token: test-generate-create-token-tx - $(SOV_CLI_REL_PATH) make-batch ../test-data/requests/create_token.dat > ../test-data/requests/test_blob.dat +set-rpc-url: build-sov-cli + $(SOV_CLI_REL_PATH) rpc set-url http://localhost:12345 -test-create-token: test-build-blob-from-create-token - $(MAKE) submit-txn SERIALIZED_BLOB_PATH=../test-data/requests/test_blob.dat +import-keys: build-sov-cli + $(SOV_CLI_REL_PATH) keys import --nickname DANGER__DO_NOT_USE_WITH_REAL_MONEY --path ../test-data/keys/minter_private_key.json + +test-create-token: set-rpc-url test-generate-create-token-tx import-keys + $(SOV_CLI_REL_PATH) rpc submit-batch + +remove-insecure-keys: build-sov-cli + $(SOV_CLI_REL_PATH) keys remove by-address sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc clean-rollup-db: $(eval path := ./$(shell awk -F'=' '/^path/ {print $$2}' rollup_config.toml | tr -d '[:space:]"\n')) diff --git a/examples/demo-rollup/README.md b/examples/demo-rollup/README.md index 539f3edf0..490ee09ba 100644 --- a/examples/demo-rollup/README.md +++ b/examples/demo-rollup/README.md @@ -25,15 +25,13 @@ This is a demo full node running a simple Sovereign SDK rollup on [Celestia](htt - [How to Submit Transactions](#how-to-submit-transactions) - [1. Build `sov-cli`](#1-build-sov-cli) - [2. Generate the Transaction](#2-generate-the-transaction) - - [3. Bundle the Serialized Transaction](#3-bundle-the-serialized-transaction) - - [4. Submit the Transaction](#4-submit-the-transaction) - - [5. Verify the Token Supply](#5-verify-the-token-supply) + - [3. Submit the Transaction(s)](#3-submit-the-transactions) + - [4. Verify the Token Supply](#4-verify-the-token-supply) - [Makefile](#makefile) - [Remote setup](#remote-setup) - [How to Customize This Example](#how-to-customize-this-example) - [1. Initialize the DA Service](#1-initialize-the-da-service) - - [2. Initialize the State Transition Function](#2-initialize-the-state-transition-function) - - [3. Run the Main Loop](#3-run-the-main-loop) + - [2. Run the Main Loop](#2-run-the-main-loop) - [Disclaimer](#disclaimer) - [Interacting with your Node via RPC](#interacting-with-your-node-via-rpc) - [Key Concepts](#key-concepts) @@ -67,41 +65,41 @@ understand how to build your own state transition function, check out at the doc 2. Switch to the `examples/demo-rollup` directory (which is where this `README.md` is located!). - ``` - $ cd examples/demo-rollup/ - ``` +```shell +$ cd examples/demo-rollup/ +``` 3. Spin up a local Celestia instance as your DA layer. We've built a small Makefile to simplify that process: - ``` - $ make clean - $ make start # Make sure to run `make stop` when you're done with this demo! - ``` +```sh +$ make clean +$ make start # Make sure to run `make stop` when you're done with this demo! +``` - If interested, you can check out what the Makefile does [here](#Makefile). - The above command will also modify some configuration files: +If interested, you can check out what the Makefile does [here](#Makefile). + The above command will also modify some configuration files: - ``` - $ git status - .. - .. - modified: rollup_config.toml - ``` +```sh +$ git status +.. +.. + modified: rollup_config.toml +``` ### Start the Rollup Full Node Now run the demo-rollup full node, as shown below. You will see it consuming blocks from the Celestia node running inside Docker: -``` +```sh # Make sure you're still in the examples/demo-rollup directory. $ cargo run -2023-06-07T10:03:25.473920Z INFO jupiter::da_service: Fetching header at height=1... +2023-06-07T10:03:25.473920Z INFO celestia::da_service: Fetching header at height=1... 2023-06-07T10:03:25.496853Z INFO sov_demo_rollup: Received 0 blobs 2023-06-07T10:03:25.497700Z INFO sov_demo_rollup: Requesting data for height 2 and prev_state_root 0xa96745d3184e54d098982daf44923d84c358800bd22c1864734ccb978027a670 -2023-06-07T10:03:25.497719Z INFO jupiter::da_service: Fetching header at height=2... +2023-06-07T10:03:25.497719Z INFO celestia::da_service: Fetching header at height=2... 2023-06-07T10:03:25.505412Z INFO sov_demo_rollup: Received 0 blobs 2023-06-07T10:03:25.505992Z INFO sov_demo_rollup: Requesting data for height 3 and prev_state_root 0xa96745d3184e54d098982daf44923d84c358800bd22c1864734ccb978027a670 -2023-06-07T10:03:25.506003Z INFO jupiter::da_service: Fetching header at height=3... +2023-06-07T10:03:25.506003Z INFO celestia::da_service: Fetching header at height=3... 2023-06-07T10:03:25.511237Z INFO sov_demo_rollup: Received 0 blobs 2023-06-07T10:03:25.511815Z INFO sov_demo_rollup: Requesting data for height 4 and prev_state_root 0xa96745d3184e54d098982daf44923d84c358800bd22c1864734ccb978027a670 ``` @@ -112,14 +110,14 @@ Leave it running while you proceed with the rest of the demo. After switching to a new terminal tab, let's submit our first transaction by creating a token: -``` +```sh $ make test-create-token ``` ...wait a few seconds and you will see the transaction receipt in the output of the demo-rollup full node: -``` -2023-07-12T15:04:52.291073Z INFO jupiter::da_service: Fetching header at height=31... +```sh +2023-07-12T15:04:52.291073Z INFO celestia::da_service: Fetching header at height=31... 2023-07-12T15:05:02.304393Z INFO sov_demo_rollup: Received 1 blobs at height 31 2023-07-12T15:05:02.305257Z INFO sov_demo_rollup: blob #0 at height 31 with blob_hash 0x4876c2258b57104356efa4630d3d9f901ccfda5dde426ba8aef81d4a3e357c79 has been applied with #1 transactions, sequencer outcome Rewarded(0) 2023-07-12T15:05:02.305280Z INFO sov_demo_rollup: tx #0 hash: 0x1e1892f77cf42c0abd2ca2acdd87eabb9aa65ec7497efea4ff9f5f33575f881a result Successful @@ -134,7 +132,7 @@ The `make test-create-token` command above was useful to test if everything is r You'll need the `sov-cli` binary in order to create transactions. Build it with these commands: -```console +```sh $ cd ../demo-stf # Assuming you're still in examples/demo-rollup/ $ cargo build --bin sov-cli $ cd ../.. # Go back to the root of the repository @@ -144,13 +142,10 @@ Main entry point for CLI Usage: sov-cli Commands: - generate-transaction-from-json Serialize a call to a module. This creates a .dat file containing the serialized transaction - submit-transaction Submits transaction to sequencer - publish-batch Tells Sequencer to publish batch - make-batch Combine a list of files generated by GenerateTransaction into a blob for submission to Celestia - util Utility commands - generate-transaction Generate a transaction from the command line - help Print this message or the help of the given subcommand(s) + transactions Generate, sign, and send transactions + keys View and manage keys associated with this wallet + rpc Query the current state of the rollup and send transactions + help Print this message or the help of the given subcommand(s) Options: -h, --help Print help @@ -160,10 +155,14 @@ Options: Each transaction that we want to submit is a member of the `CallMessage` enum defined as part of creating a module. For example, let's consider the `Bank` module's `CallMessage`: ```rust +use sov_bank::CallMessage::Transfer; +use sov_bank::Coins; +use sov_bank::Amount; + pub enum CallMessage { /// Creates a new token with the specified name and initial balance. CreateToken { - /// Random value use to create a unique token address. + /// Random value used to create a unique token address. salt: u64, /// The name of the new token. token_name: String, @@ -180,19 +179,19 @@ pub enum CallMessage { /// The address to which the tokens will be transferred. to: C::Address, /// The amount of tokens to transfer. - coins: Coins, + coins: Coins::, }, /// Burns a specified amount of tokens. Burn { /// The amount of tokens to burn. - coins: Coins, + coins: Coins::, }, /// Mints a specified amount of tokens. Mint { /// The amount of tokens to mint. - coins: Coins, + coins: Coins::, /// Address to mint tokens to minter_address: C::Address, }, @@ -205,10 +204,12 @@ pub enum CallMessage { } ``` -In the above snippet, we can see that `CallMessage` in `Bank` support five different types of calls. The `sov-cli` has the ability to parse a JSON file that aligns with any of these calls and subsequently serialize them. The structure of the JSON file, which represents the call, closely mirrors that of the Enum member. Consider the `Transfer` message as an example: +In the above snippet, we can see that `CallMessage` in `Bank` supports five different types of calls. The `sov-cli` has the ability to parse a JSON file that aligns with any of these calls and subsequently serialize them. The structure of the JSON file, which represents the call, closely mirrors that of the Enum member. Consider the `Transfer` message as an example: ```rust -Transfer { +use sov_bank::Coins; + +struct Transfer { /// The address to which the tokens will be transferred. to: C::Address, /// The amount of tokens to transfer. @@ -224,7 +225,7 @@ Here's an example of a JSON representing the above call: "to": "sov1zgfpyysjzgfpyysjzgfpyysjzgfpyysjzgfpyysjzgfpyysjzgfqve8h6h", "coins": { "amount": 200, - "token_address": "sov1zdwj8thgev2u3yyrrlekmvtsz4av4tp3m7dm5mx5peejnesga27svq9m72" + "token_address": "sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft" } } } @@ -232,80 +233,68 @@ Here's an example of a JSON representing the above call: #### 2. Generate the Transaction -The JSON above is the contents of the file `examples/test-data/requests/transfer.json`. We'll use this transaction as our example for the rest of the tutorial. In order to serialize the transaction JSON to submit to our local Celestia node, we need to perform 2 operations: +The JSON above is the contents of the file `examples/test-data/requests/transfer.json`. We'll use this transaction as our example for the rest of the tutorial. In order to send the transaction, we need to perform 2 operations: -- Serialize the JSON representation of the transaction. -- Bundle serialized transaction files into a blob (since DA layers accept blobs which can contain multiple transactions). +- Import the transaction data into the wallet +- Sign and submit the transaction Note: we're able to make a `Transfer` call here because we already created the token as part of the sanity check above, using `make test-create-token`. -To generate transactions you can use the `sov-cli generate-transaction-from-json` subcommand, as shown below: - -``` -$ ./target/debug/sov-cli generate-transaction-from-json -h -Serialize a call to a module. This creates a .dat file containing the serialized transaction - -Usage: sov-cli generate-transaction-from-json - -Arguments: - Path to the json file containing the private key of the sender - Name of the module to generate the call. Modules defined in your Runtime are supported. (eg: Bank, Accounts) - Path to the json file containing the parameters for a module call - Nonce for the transaction -``` - -For our test, we'll use the test private key located at `examples/test-data/keys/minter_private_key.json`. This private key also corresponds to the address used in the `minter_address` field of the `create_token.json` file. This was the address that `make test-create-token` minted the new tokens to. +To generate transactions you can use the `transactions import from-file` subcommand, as shown below: -Let's go ahead and serialize the transaction: +```sh +$ ./target/debug/sov-cli transactions import from-file -h +Import a transaction from a JSON file at the provided path -``` -$ ./target/debug/sov-cli generate-transaction-from-json ./examples/test-data/keys/minter_private_key.json Bank ./examples/test-data/requests/transfer.json 0 -``` +Usage: sov-cli transactions import from-file -Once the above command executes successfully, there will be a file named `./examples/test-data/requests/transfer.dat`: +Commands: + bank Generates a transaction for the `bank` module + sequencer-registry Generates a transaction for the `sequencer_registry` module + election Generates a transaction for the `election` module + value-setter Generates a transaction for the `value_setter` module + accounts Generates a transaction for the `accounts` module + help Print this message or the help of the given subcommand(s) -``` -$ cat ./examples/test-data/requests/transfer.dat -5ef848746e8d2b9c27ee46210e185dc9f3b690d5cef42a13fb9c336bd40c798210bf7af613997f7af57c9681a242f5fe4121a1539ba4f5f32f14c49f978b990a7b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a0000000001fea6ac5b8751120fb62fff67b54d2eac66aef307c7dde1d394dea1e09e43dd44c800000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd0000000000000000 +Options: + -h, --help Print help ``` -The above is the hex representation of the serialized transaction. +Let's go ahead and import the transaction into the wallet -#### 3. Bundle the Serialized Transaction - -After serializing your transactions (just one in this case), you must bundle them into a blob. You can use the `sov-cli make-batch` subcommand: - -``` -$ ./target/debug/sov-cli make-batch -h -Usage: sov-cli make-batch [PATH_LIST]... - -Arguments: - [PATH_LIST]... List of serialized transactions +```bash +$ ./target/debug/sov-cli transactions import from-file bank --path ./examples/test-data/requests/transfer.json +Adding the following transaction to batch: +{ + "bank": { + "Transfer": { + "to": "sov1l6n2cku82yfqld30lanm2nfw43n2auc8clw7r5u5m6s7p8jrm4zqrr8r94", + "coins": { + "amount": 200, + "token_address": "sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft" + } + } + } +} ``` -Use the command below to store the serialized blob in `./examples/test-data/requests/tx_blob`: +This output indicates that the wallet has saved the transaction details for later signing. -``` -$ ./target/debug/sov-cli make-batch ./examples/test-data/requests/transfer.dat > ./examples/test-data/requests/tx_blob -$ cat ./examples/test-data/requests/tx_blob -01000000b60000005ef848746e8d2b9c27ee46210e185dc9f3b690d5cef42a13fb9c336bd40c798210bf7af613997f7af57c9681a242f5fe4121a1539ba4f5f32f14c49f978b990a7b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe64a0000000001fea6ac5b8751120fb62fff67b54d2eac66aef307c7dde1d394dea1e09e43dd44c800000000000000135d23aee8cb15c890831ff36db170157acaac31df9bba6cd40e7329e608eabd0000000000000000 -``` +#### 3. Submit the Transaction(s) -#### 4. Submit the Transaction +You now have a batch with a single transaction in your wallet. If you want to submit any more transactions as part of this +batch, you can import them now. Finally, let's submit your transaction to the rollup. -You now have a blob with one serialized transaction in `./examples/test-data/requests/tx_blob`. Switch back to the `examples/demo-rollup` directory and use the Makefile to submit it: - -``` -$ cd examples/demo-rollup -$ SERIALIZED_BLOB_PATH=../test-data/requests/tx_blob make submit-txn +```bash +$ cargo run rpc submit-batch by-address sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc ``` -Here the `make submit-txn` command locates the Docker container the Celestia instance is running in, and runs the Celestia-specific command to submit the transaction. +This command will use your default private key -#### 5. Verify the Token Supply +#### 4. Verify the Token Supply -``` -$ curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"bank_supplyOf","params":["sov1zdwj8thgev2u3yyrrlekmvtsz4av4tp3m7dm5mx5peejnesga27svq9m72"],"id":1}' http://127.0.0.1:12345 +```bash +$ curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"bank_supplyOf","params":["sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft"],"id":1}' http://127.0.0.1:12345 {"jsonrpc":"2.0","result":{"amount":1000},"id":1} ``` @@ -374,17 +363,7 @@ a remote node. Whichever option you pick, simply place the URL and authenticatio in the `rollup_config.toml` file and it will be automatically picked up by the node implementation. For this tutorial, the Makefile below (which also helps start a local Celestia instance) handles this step for you. -### 2. Initialize the State Transition Function - -The next step is to initialize your state transition function. - -```rust - let mut app: App = - App::new(config); -``` - - -### 3. Run the Main Loop +### 2. Run the Main Loop The full node implements a simple loop for processing blocks. The workflow is: @@ -422,7 +401,7 @@ Most queries for ledger information accept an optional `QueryMode` argument. The **Identifiers** -There are a several ways to uniquely identify items in the Ledger DB. +There are several ways to uniquely identify items in the Ledger DB. - By _number_. Each family of structs (`slots`, `blocks`, `transactions`, and `events`) is numbered in order starting from `1`. So, for example, the first transaction to appear on the DA layer will be numered `1` and might emit events `1`-`5`. Or, slot `17` might contain batches `41` - `44`. @@ -433,7 +412,7 @@ There are a several ways to uniquely identify items in the Ledger DB. To request an item from the ledger DB, you can provide any identifier - and even mix and match different identifiers. We recommend using item number wherever possible, though, since resolving other identifiers may require additional database lookups. -Some examples will make this clearer. Suppose that slot number `5` contaisn batches `9`, `10`, and `11`, that batch `10` contains +Some examples will make this clearer. Suppose that slot number `5` contains batches `9`, `10`, and `11`, that batch `10` contains transactions `50`-`81`, and that transaction `52` emits event number `17`. If we want to fetch events number `17`, we can use any of the following queries: - `{"jsonrpc":"2.0","method":"ledger_getEvents","params":[[17]], ... }` diff --git a/examples/demo-rollup/benches/README.md b/examples/demo-rollup/benches/README.md index 4ea03ba91..25714be02 100644 --- a/examples/demo-rollup/benches/README.md +++ b/examples/demo-rollup/benches/README.md @@ -1,7 +1,17 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Native Benchmarks](#native-benchmarks) + - [Methodology](#methodology) +- [Makefile](#makefile) + + + # Native Benchmarks Native benchmarks refer to the performance of the rollup SDK in native mode - this does not involve proving ## Methodology -* We use the Bank module's Transfer call as the main transaction for running this benchmark. So what we're measuring is the number of value transfers can be done per second. +* We use the Bank module's Transfer call as the main transaction for running this benchmark. So what we're measuring is the number of value transfers that can be done per second. * We do not connect to the DA layer since that will be the bottleneck if we do. We pre-populate 10 blocks (configurable via env var BLOCKS) with 1 blob each containing 10,000 transactions each (configurable via env var TXNS_PER_BLOCK). * The first block only contains a "CreateToken" transaction. Subsequent blocks contain "Transfer" transactions. * All token transfers are initiated from the created token's mint address @@ -61,4 +71,4 @@ The Makefile is located in the demo-rollup/benches folder and supports the follo The Makefile supports setting number of blocks and transactions per block using BLOCKS and TXNS_PER_BLOCK env vars. Defaults are 100 blocks and 10,000 transactions per block when using the Makefile -![Flamgraph](flamegraph_sample.svg) \ No newline at end of file +![Flamegraph](flamegraph_sample.svg) diff --git a/examples/demo-rollup/src/rng_xfers.rs b/examples/demo-rollup/benches/rng_xfers.rs similarity index 77% rename from examples/demo-rollup/src/rng_xfers.rs rename to examples/demo-rollup/benches/rng_xfers.rs index e07efe174..cfa5a0a04 100644 --- a/examples/demo-rollup/src/rng_xfers.rs +++ b/examples/demo-rollup/benches/rng_xfers.rs @@ -2,18 +2,22 @@ use std::env; use async_trait::async_trait; use borsh::ser::BorshSerialize; -use const_rollup_config::SEQUENCER_DA_ADDRESS; use demo_stf::runtime::Runtime; -use jupiter::verifier::address::CelestiaAddress; -use sov_bank::{CallMessage, Coins}; +use sov_bank::{Bank, CallMessage, Coins}; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::transaction::Transaction; -use sov_modules_api::{Address, AddressBech32, PrivateKey, PublicKey, Spec}; +use sov_modules_api::{Address, AddressBech32, EncodeCall, PrivateKey, PublicKey, Spec}; use sov_rollup_interface::da::DaSpec; -use sov_rollup_interface::mocks::{TestBlob, TestBlock, TestBlockHeader, TestHash}; +use sov_rollup_interface::mocks::{ + MockAddress, MockBlob, MockBlock, MockBlockHeader, MockHash, MockValidityCond, +}; use sov_rollup_interface::services::da::DaService; +pub(crate) const SEQUENCER_DA_ADDRESS: [u8; 32] = [99; 32]; + +#[derive(Clone)] +/// A simple DaService for a random number generator. pub struct RngDaService; fn generate_transfers(n: usize, start_nonce: u64) -> Vec { @@ -34,10 +38,13 @@ fn generate_transfers(n: usize, start_nonce: u64) -> Vec { to: address, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; - let enc_msg = Runtime::::encode_bank_call(msg); + let enc_msg = + as EncodeCall>>::encode_call( + msg, + ); let tx = Transaction::::new_signed_tx(&pk, enc_msg, start_nonce + (i as u64)); let ser_tx = tx.try_to_vec().unwrap(); @@ -60,10 +67,11 @@ fn generate_create(start_nonce: u64) -> Vec { salt: 11, token_name: "sov-test-token".to_string(), initial_balance: 100000000, - minter_address: minter_address.clone(), + minter_address, authorized_minters: vec![minter_address], }; - let enc_msg = Runtime::::encode_bank_call(msg); + let enc_msg = + as EncodeCall>>::encode_call(msg); let tx = Transaction::::new_signed_tx(&pk, enc_msg, start_nonce); let ser_tx = tx.try_to_vec().unwrap(); message_vec.push(ser_tx); @@ -71,6 +79,7 @@ fn generate_create(start_nonce: u64) -> Vec { } impl RngDaService { + /// Instantiate a new [`RngDaService`] pub fn new() -> Self { RngDaService } @@ -82,12 +91,15 @@ impl Default for RngDaService { } } +/// A simple DaSpec for a random number generator. +#[derive(serde::Serialize, serde::Deserialize)] pub struct RngDaSpec; impl DaSpec for RngDaSpec { - type SlotHash = TestHash; - type BlockHeader = TestBlockHeader; - type BlobTransaction = TestBlob; + type SlotHash = MockHash; + type BlockHeader = MockBlockHeader; + type BlobTransaction = MockBlob; + type ValidityCondition = MockValidityCond; type InclusionMultiProof = [u8; 32]; type CompletenessProof = (); type ChainParams = (); @@ -95,29 +107,23 @@ impl DaSpec for RngDaSpec { #[async_trait] impl DaService for RngDaService { - type RuntimeConfig = (); type Spec = RngDaSpec; - type FilteredBlock = TestBlock; + type FilteredBlock = MockBlock; type Error = anyhow::Error; - async fn new( - _config: Self::RuntimeConfig, - _chain_params: ::ChainParams, - ) -> Self { - RngDaService::new() - } - async fn get_finalized_at(&self, height: u64) -> Result { let num_bytes = height.to_le_bytes(); let mut barray = [0u8; 32]; barray[..num_bytes.len()].copy_from_slice(&num_bytes); - let block = TestBlock { + let block = MockBlock { curr_hash: barray, - header: TestBlockHeader { - prev_hash: TestHash([0u8; 32]), + header: MockBlockHeader { + prev_hash: MockHash([0u8; 32]), }, height, + validity_cond: MockValidityCond { is_valid: true }, + blobs: Default::default(), }; Ok(block) @@ -146,8 +152,8 @@ impl DaService for RngDaService { generate_transfers(num_txns, (block.height - 1) * (num_txns as u64)) }; - let address = CelestiaAddress::try_from(&SEQUENCER_DA_ADDRESS[..]).unwrap(); - let blob = TestBlob::new(data, address, [0u8; 32]); + let address = MockAddress::from(SEQUENCER_DA_ADDRESS); + let blob = MockBlob::new(data, address, [0u8; 32]); vec![blob] } diff --git a/examples/demo-rollup/benches/rollup_bench.rs b/examples/demo-rollup/benches/rollup_bench.rs index ab54b659f..502d43c5f 100644 --- a/examples/demo-rollup/benches/rollup_bench.rs +++ b/examples/demo-rollup/benches/rollup_bench.rs @@ -1,20 +1,19 @@ +mod rng_xfers; use std::env; use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; use anyhow::Context; -use const_rollup_config::SEQUENCER_DA_ADDRESS; use criterion::{criterion_group, criterion_main, Criterion}; use demo_stf::app::App; use demo_stf::genesis_config::create_demo_genesis_config; -use jupiter::verifier::address::CelestiaAddress; use risc0_adapter::host::Risc0Verifier; +use rng_xfers::{RngDaService, RngDaSpec, SEQUENCER_DA_ADDRESS}; use sov_db::ledger_db::{LedgerDB, SlotCommit}; -use sov_demo_rollup::rng_xfers::RngDaService; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::PrivateKey; -use sov_rollup_interface::mocks::{TestBlob, TestBlock, TestBlockHeader, TestHash}; +use sov_rollup_interface::mocks::{MockAddress, MockBlock, MockBlockHeader, MockHash}; use sov_rollup_interface::services::da::DaService; use sov_rollup_interface::stf::StateTransitionFunction; use sov_stf_runner::{from_toml_path, RollupConfig}; @@ -31,36 +30,31 @@ fn rollup_bench(_bench: &mut Criterion) { .sample_size(10) .measurement_time(Duration::from_secs(20)); let rollup_config_path = "benches/rollup_config.toml".to_string(); - let mut rollup_config: RollupConfig = from_toml_path(&rollup_config_path) - .context("Failed to read rollup configuration") - .unwrap(); + let mut rollup_config: RollupConfig = + from_toml_path(&rollup_config_path) + .context("Failed to read rollup configuration") + .unwrap(); let temp_dir = TempDir::new().expect("Unable to create temporary directory"); - rollup_config.runner.storage.path = PathBuf::from(temp_dir.path()); + rollup_config.storage.path = PathBuf::from(temp_dir.path()); let ledger_db = - LedgerDB::with_path(&rollup_config.runner.storage.path).expect("Ledger DB failed to open"); + LedgerDB::with_path(&rollup_config.storage.path).expect("Ledger DB failed to open"); let da_service = Arc::new(RngDaService::new()); - let demo_runner = - App::>::new(rollup_config.runner.storage); + let demo_runner = App::::new(rollup_config.storage); let mut demo = demo_runner.stf; let sequencer_private_key = DefaultPrivateKey::generate(); + let sequencer_da_address = MockAddress::from(SEQUENCER_DA_ADDRESS); let demo_genesis_config = create_demo_genesis_config( 100000000, sequencer_private_key.default_address(), - SEQUENCER_DA_ADDRESS.to_vec(), - &sequencer_private_key, + sequencer_da_address.as_ref().to_vec(), &sequencer_private_key, ); - let _prev_state_root = { - // Check if the rollup has previously been initialized - demo.init_chain(demo_genesis_config); - let apply_block_result = demo.apply_slot(Default::default(), []); - let prev_state_root = apply_block_result.state_root; - prev_state_root.0 - }; + + demo.init_chain(demo_genesis_config); // data generation let mut blobs = vec![]; @@ -69,12 +63,14 @@ fn rollup_bench(_bench: &mut Criterion) { let num_bytes = height.to_le_bytes(); let mut barray = [0u8; 32]; barray[..num_bytes.len()].copy_from_slice(&num_bytes); - let filtered_block = TestBlock { + let filtered_block = MockBlock { curr_hash: barray, - header: TestBlockHeader { - prev_hash: TestHash([0u8; 32]), + header: MockBlockHeader { + prev_hash: MockHash([0u8; 32]), }, height, + validity_cond: Default::default(), + blobs: Default::default(), }; blocks.push(filtered_block.clone()); @@ -88,9 +84,11 @@ fn rollup_bench(_bench: &mut Criterion) { let filtered_block = &blocks[height as usize]; let mut data_to_commit = SlotCommit::new(filtered_block.clone()); - - let apply_block_result = - demo.apply_slot(Default::default(), &mut blobs[height as usize]); + let apply_block_result = demo.apply_slot( + Default::default(), + data_to_commit.slot_data(), + &mut blobs[height as usize], + ); for receipts in apply_block_result.batch_receipts { data_to_commit.add_batch(receipts); } diff --git a/examples/demo-rollup/benches/rollup_coarse_measure.rs b/examples/demo-rollup/benches/rollup_coarse_measure.rs index 63a79c618..651b084be 100644 --- a/examples/demo-rollup/benches/rollup_coarse_measure.rs +++ b/examples/demo-rollup/benches/rollup_coarse_measure.rs @@ -1,20 +1,22 @@ +mod rng_xfers; use std::env; use std::path::PathBuf; +use std::str::FromStr; use std::sync::Arc; use std::time::{Duration, Instant}; use anyhow::Context; +use celestia::verifier::address::CelestiaAddress; use const_rollup_config::SEQUENCER_DA_ADDRESS; use demo_stf::app::App; use demo_stf::genesis_config::create_demo_genesis_config; -use jupiter::verifier::address::CelestiaAddress; use prometheus::{Histogram, HistogramOpts, Registry}; use risc0_adapter::host::Risc0Verifier; +use rng_xfers::{RngDaService, RngDaSpec}; use sov_db::ledger_db::{LedgerDB, SlotCommit}; -use sov_demo_rollup::rng_xfers::RngDaService; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::PrivateKey; -use sov_rollup_interface::mocks::{TestBlob, TestBlock, TestBlockHeader, TestHash}; +use sov_rollup_interface::mocks::{MockBlock, MockBlockHeader, MockHash}; use sov_rollup_interface::services::da::DaService; use sov_rollup_interface::stf::StateTransitionFunction; use sov_stf_runner::{from_toml_path, RollupConfig}; @@ -79,36 +81,31 @@ async fn main() -> Result<(), anyhow::Error> { } let rollup_config_path = "benches/rollup_config.toml".to_string(); - let mut rollup_config: RollupConfig = from_toml_path(&rollup_config_path) - .context("Failed to read rollup configuration") - .unwrap(); + let mut rollup_config: RollupConfig = + from_toml_path(&rollup_config_path) + .context("Failed to read rollup configuration") + .unwrap(); let temp_dir = TempDir::new().expect("Unable to create temporary directory"); - rollup_config.runner.storage.path = PathBuf::from(temp_dir.path()); + rollup_config.storage.path = PathBuf::from(temp_dir.path()); let ledger_db = - LedgerDB::with_path(&rollup_config.runner.storage.path).expect("Ledger DB failed to open"); + LedgerDB::with_path(&rollup_config.storage.path).expect("Ledger DB failed to open"); let da_service = Arc::new(RngDaService::new()); - let demo_runner = - App::>::new(rollup_config.runner.storage); + let demo_runner = App::::new(rollup_config.storage); let mut demo = demo_runner.stf; let sequencer_private_key = DefaultPrivateKey::generate(); + let sequencer_da_address = CelestiaAddress::from_str(SEQUENCER_DA_ADDRESS).unwrap(); let demo_genesis_config = create_demo_genesis_config( 100000000, sequencer_private_key.default_address(), - SEQUENCER_DA_ADDRESS.to_vec(), - &sequencer_private_key, + sequencer_da_address.as_ref().to_vec(), &sequencer_private_key, ); - let _prev_state_root = { - // Check if the rollup has previously been initialized - demo.init_chain(demo_genesis_config); - let apply_block_result = demo.apply_slot(Default::default(), []); - let prev_state_root = apply_block_result.state_root; - prev_state_root.0 - }; + + demo.init_chain(demo_genesis_config); // data generation let mut blobs = vec![]; @@ -118,12 +115,14 @@ async fn main() -> Result<(), anyhow::Error> { let num_bytes = height.to_le_bytes(); let mut barray = [0u8; 32]; barray[..num_bytes.len()].copy_from_slice(&num_bytes); - let filtered_block = TestBlock { + let filtered_block = MockBlock { curr_hash: barray, - header: TestBlockHeader { - prev_hash: TestHash([0u8; 32]), + header: MockBlockHeader { + prev_hash: MockHash([0u8; 32]), }, height, + validity_cond: Default::default(), + blobs: Default::default(), }; blocks.push(filtered_block.clone()); @@ -141,7 +140,11 @@ async fn main() -> Result<(), anyhow::Error> { let now = Instant::now(); - let apply_block_results = demo.apply_slot(Default::default(), &mut blobs[height as usize]); + let apply_block_results = demo.apply_slot( + Default::default(), + data_to_commit.slot_data(), + &mut blobs[height as usize], + ); apply_block_time += now.elapsed(); h_apply_block.observe(now.elapsed().as_secs_f64()); diff --git a/examples/demo-rollup/benches/rollup_config.toml b/examples/demo-rollup/benches/rollup_config.toml index d2fa8b654..1f8e74250 100644 --- a/examples/demo-rollup/benches/rollup_config.toml +++ b/examples/demo-rollup/benches/rollup_config.toml @@ -1,7 +1,3 @@ -# We define the rollup's genesis to occur at Celestia block number `start_height`. The rollup will ignore -# any Celestia blocks before this height -start_height = 1 - [da] # The JWT used to authenticate with the celestia light client. Instructions for generating this token can be found in the README celestia_rpc_auth_token = "MY.SECRET.TOKEN" @@ -12,11 +8,16 @@ max_celestia_response_body_size = 104_857_600 # The maximum time to wait for a response to an RPC query against Celestia node. Defaults to 60 seconds. celestia_rpc_timeout_seconds = 60 -[runner.storage] +[storage] # The path to the rollup's data directory. Paths that do not begin with `/` are interpreted as relative paths. path = "benches/demo_data" -[rpc_config] +[runner] +# We define the rollup's genesis to occur at block number `start_height`. The rollup will ignore +# any blocks before this height +start_height = 1 + +[runner.rpc_config] # the host and port to bind the rpc server for bind_host = "127.0.0.1" bind_port = 12345 diff --git a/examples/demo-rollup/remote_setup.md b/examples/demo-rollup/remote_setup.md index c11e05377..633542aa5 100644 --- a/examples/demo-rollup/remote_setup.md +++ b/examples/demo-rollup/remote_setup.md @@ -1,3 +1,18 @@ + + +**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* + +- [Remote setup](#remote-setup) + - [Set up Celestia](#set-up-celestia) + - [Submitting transactions](#submitting-transactions) + - [Install celestia-appd](#install-celestia-appd) + - [Create local keypair](#create-local-keypair) + - [Create bank transaction](#create-bank-transaction) + - [Submit blob to celestia](#submit-blob-to-celestia) + - [Verify the supply of the new token created](#verify-the-supply-of-the-new-token-created) + + + ## Remote setup This readme covers the steps necessary to setup the sovereign-sdk to work with a remote DA network which includes @@ -18,7 +33,7 @@ as of Mar 18, 2023. To get started, you'll need to sync a Celestia light node ru 1. Initialize the node: `celestia light init --p2p.network arabica` 1. Start the node with rpc enabled. Our default config uses port 11111: `celestia light start --core.ip https://limani.celestia-devops.dev --p2p.network arabica --gateway --rpc.port 11111`. If you want to use a different port, you can adjust the rollup's configuration in rollup_config.toml. 1. Obtain a JWT for RPC access: `celestia light auth admin --p2p.network arabica` -1. Copy the JWT and and store it in the `celestia_rpc_auth_token` field of the rollup's config file (`rollup_config.toml`). Be careful to paste the entire JWT - it may wrap across several lines in your terminal. +1. Copy the JWT and store it in the `celestia_rpc_auth_token` field of the rollup's config file (`rollup_config.toml`). Be careful to paste the entire JWT - it may wrap across several lines in your terminal. 1. Wait a few minutes for your Celestia node to sync. It needs to have synced to the rollup's configured `start_height `671431` before the demo can run properly. Once your Celestia node is up and running, simply `cargo +nightly run` to test out the prototype. diff --git a/examples/demo-rollup/rollup_config.toml b/examples/demo-rollup/rollup_config.toml index ff579fcbe..b2cd95734 100644 --- a/examples/demo-rollup/rollup_config.toml +++ b/examples/demo-rollup/rollup_config.toml @@ -1,11 +1,6 @@ -# We define the rollup's genesis to occur at Celestia block number `start_height`. The rollup will ignore -# any Celestia blocks before this height -[rollup_config] -start_height = 1 - [da] # The JWT used to authenticate with the celestia light client. Instructions for generating this token can be found in the README -celestia_rpc_auth_token = "MY.SECRET.TOKEN" +celestia_rpc_auth_token = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJBbGxvdyI6WyJwdWJsaWMiLCJyZWFkIiwid3JpdGUiLCJhZG1pbiJdfQ.Y4MAobLjkH5Rp9EdcFWmL4V8TVUabsrAocR0Xf5tjEo" # The address of the *trusted* Celestia light client to interact with celestia_rpc_address = "http://127.0.0.1:26658" # The largest response the rollup will accept from the Celestia node. Defaults to 100 MB @@ -13,11 +8,16 @@ max_celestia_response_body_size = 104_857_600 # The maximum time to wait for a response to an RPC query against Celestia node. Defaults to 60 seconds. celestia_rpc_timeout_seconds = 60 -[rollup_config.runner.storage] +[storage] # The path to the rollup's data directory. Paths that do not begin with `/` are interpreted as relative paths. path = "demo_data" -[rollup_config.rpc_config] +# We define the rollup's genesis to occur at block number `start_height`. The rollup will ignore +# any blocks before this height +[runner] +start_height = 1 + +[runner.rpc_config] # the host and port to bind the rpc server for bind_host = "127.0.0.1" bind_port = 12345 diff --git a/examples/demo-rollup/src/config.rs b/examples/demo-rollup/src/config.rs deleted file mode 100644 index 5542d4c0a..000000000 --- a/examples/demo-rollup/src/config.rs +++ /dev/null @@ -1,9 +0,0 @@ -use jupiter::da_service::DaServiceConfig; -use sov_stf_runner::RollupConfig; -use serde::Deserialize; - -#[derive(Debug, Clone, PartialEq, Deserialize)] -pub struct Config { - pub rollup_config: RollupConfig, - pub da: DaServiceConfig, -} diff --git a/examples/demo-rollup/src/ledger_rpc.rs b/examples/demo-rollup/src/ledger_rpc.rs index 22e290db1..c436f4baf 100644 --- a/examples/demo-rollup/src/ledger_rpc.rs +++ b/examples/demo-rollup/src/ledger_rpc.rs @@ -12,8 +12,8 @@ const LEDGER_RPC_ERROR: &str = "LEDGER_RPC_ERROR"; use self::query_args::{extract_query_args, QueryArgs}; /// Registers the following RPC methods -/// - `ledger_head` -/// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_head","params":[],"id":1}' http://127.0.0.1:12345` +/// - `ledger_getHead` +/// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_getHead","params":[],"id":1}' http://127.0.0.1:12345` /// - ledger_getSlots /// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_getSlots","params":[[1, 2], "Compact"],"id":1}' http://127.0.0.1:12345` /// - ledger_getBatches @@ -60,6 +60,8 @@ fn register_ledger_rpc_methods< Ok(()) } +/// Register rpc methods for the provided `ledger_db`. +/// Calls the internal [`register_ledger_rpc_methods`] function. pub fn get_ledger_rpc< B: Serialize + DeserializeOwned + Clone + 'static, T: Serialize + DeserializeOwned + Clone + 'static, @@ -76,6 +78,7 @@ mod query_args { use serde::de::DeserializeOwned; use sov_rollup_interface::rpc::QueryMode; + /// A structure containing serialized query arguments for RPC queries. #[derive(serde::Deserialize)] pub struct QueryArgs(pub Vec, #[serde(default)] pub QueryMode); diff --git a/examples/demo-rollup/src/lib.rs b/examples/demo-rollup/src/lib.rs index 12bcceea9..a45fe5c27 100644 --- a/examples/demo-rollup/src/lib.rs +++ b/examples/demo-rollup/src/lib.rs @@ -1 +1,59 @@ -pub mod rng_xfers; +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] + +pub mod register_rpc; +mod rollup; + +use celestia::types::NamespaceId; +use const_rollup_config::ROLLUP_NAMESPACE_RAW; +use demo_stf::genesis_config::create_demo_genesis_config; +use demo_stf::runtime::GenesisConfig; +#[cfg(feature = "experimental")] +pub use rollup::read_tx_signer_priv_key; +pub use rollup::{new_rollup_with_celestia_da, Rollup, new_rollup_with_avail_da}; +use sov_cli::wallet_state::{HexPrivateAndAddress, PrivateKeyAndAddress}; +use sov_db::ledger_db::LedgerDB; +use sov_modules_api::default_context::DefaultContext; +use sov_rollup_interface::da::{BlobReaderTrait, DaSpec}; + +/// The rollup stores its data in the namespace b"sov-test" on Celestia +/// You can change this constant to point your rollup at a different namespace +pub const ROLLUP_NAMESPACE: NamespaceId = NamespaceId(ROLLUP_NAMESPACE_RAW); + +/// Initializes a [`LedgerDB`] using the provided `path`. +pub fn initialize_ledger(path: impl AsRef) -> LedgerDB { + LedgerDB::with_path(path).expect("Ledger DB failed to open") +} + +/// Configure our rollup with a centralized sequencer using the SEQUENCER_DA_ADDRESS +/// address constant. Since the centralize sequencer's address is consensus critical, +/// it has to be hardcoded as a constant, rather than read from the config at runtime. +/// +/// If you want to customize the rollup to accept transactions from your own celestia +/// address, simply change the value of the SEQUENCER_DA_ADDRESS to your own address. +/// For example: +/// ```rust,no_run +/// const SEQUENCER_DA_ADDRESS: &str = "celestia1qp09ysygcx6npted5yc0au6k9lner05yvs9208"; +/// ``` +pub fn get_genesis_config( + sequencer_da_address: <::BlobTransaction as BlobReaderTrait>::Address, +) -> GenesisConfig { + let hex_key: HexPrivateAndAddress = serde_json::from_slice(include_bytes!( + "../../test-data/keys/token_deployer_private_key.json" + )) + .expect("Broken key data file"); + let key_and_address: PrivateKeyAndAddress = hex_key + .try_into() + .expect("Failed to parse sequencer private key and address"); + assert!( + key_and_address.is_matching_to_default(), + "Inconsistent key data" + ); + + create_demo_genesis_config( + 100000000, + key_and_address.address, + sequencer_da_address.as_ref().to_vec(), + &key_and_address.private_key, + ) +} diff --git a/examples/demo-rollup/src/main.rs b/examples/demo-rollup/src/main.rs index 7cc5955ff..0bcd1ea50 100644 --- a/examples/demo-rollup/src/main.rs +++ b/examples/demo-rollup/src/main.rs @@ -1,181 +1,29 @@ use std::env; -use std::sync::Arc; +use std::str::FromStr; -use anyhow::Context; -use const_rollup_config::{ROLLUP_NAMESPACE_RAW, SEQUENCER_DA_ADDRESS}; -use demo_stf::app::{App, DefaultContext, DefaultPrivateKey}; -use demo_stf::genesis_config::create_demo_genesis_config; -use demo_stf::runtime::{get_rpc_methods, GenesisConfig}; -use jupiter::da_service::CelestiaService; -#[cfg(feature = "experimental")] -use jupiter::da_service::DaServiceConfig; -use jupiter::types::NamespaceId; -use jupiter::verifier::RollupParams; -use risc0_adapter::host::Risc0Verifier; -use sov_db::ledger_db::LedgerDB; -#[cfg(feature = "experimental")] -use sov_ethereum::get_ethereum_rpc; -use sov_modules_stf_template::{SequencerOutcome, TxEffect}; -use sov_rollup_interface::services::da::DaService; -use sov_sequencer::get_sequencer_rpc; -use sov_state::storage::Storage; -use sov_stf_runner::{from_toml_path, get_ledger_rpc, StateTransitionRunner}; -use tracing::{debug, Level}; -use config::Config; +use sov_demo_rollup::new_rollup_with_celestia_da; +use tracing_subscriber::prelude::*; +use tracing_subscriber::{fmt, EnvFilter}; #[cfg(test)] mod test_rpc; -mod config; -#[cfg(feature = "experimental")] -const TX_SIGNER_PRIV_KEY_PATH: &str = "../test-data/keys/tx_signer_private_key.json"; - -// The rollup stores its data in the namespace b"sov-test" on Celestia -// You can change this constant to point your rollup at a different namespace -const ROLLUP_NAMESPACE: NamespaceId = NamespaceId(ROLLUP_NAMESPACE_RAW); - -pub fn initialize_ledger(path: impl AsRef) -> LedgerDB { - LedgerDB::with_path(path).expect("Ledger DB failed to open") -} - -// TODO: Remove this when sov-cli is in its own crate. -#[derive(Debug, serde::Serialize, serde::Deserialize)] -struct HexKey { - hex_priv_key: String, - address: String, -} - -/// Configure our rollup with a centralized sequencer using the SEQUENCER_DA_ADDRESS -/// address constant. Since the centralize sequencer's address is consensus critical, -/// it has to be hardcoded as a constant, rather than read from the config at runtime. -/// -/// If you want to customize the rollup to accept transactions from your own celestia -/// address, simply change the value of the SEQUENCER_DA_ADDRESS to your own address. -/// For example: -/// ```rust,no_run -/// const SEQUENCER_DA_ADDRESS: [u8;47] = *b"celestia1qp09ysygcx6npted5yc0au6k9lner05yvs9208" -/// ``` -pub fn get_genesis_config() -> GenesisConfig { - let hex_key: HexKey = serde_json::from_slice(include_bytes!( - "../../test-data/keys/token_deployer_private_key.json" - )) - .expect("Broken key data file"); - let sequencer_private_key = DefaultPrivateKey::from_hex(&hex_key.hex_priv_key).unwrap(); - assert_eq!( - sequencer_private_key.default_address().to_string(), - hex_key.address, - "Inconsistent key data", - ); - create_demo_genesis_config( - 100000000, - sequencer_private_key.default_address(), - SEQUENCER_DA_ADDRESS.to_vec(), - &sequencer_private_key, - &sequencer_private_key, - ) -} +/// Main demo runner. Initialize a DA chain, and starts a demo-rollup using the config provided +/// (or a default config if not provided). Then start checking the blocks sent to the DA layer in +/// the main event loop. #[tokio::main] async fn main() -> Result<(), anyhow::Error> { + // Initializing logging + tracing_subscriber::registry() + .with(fmt::layer()) + .with(EnvFilter::from_str("info,sov_sequencer=warn").unwrap()) + .init(); + let rollup_config_path = env::args() .nth(1) .unwrap_or_else(|| "rollup_config.toml".to_string()); - debug!("Starting demo rollup with config {}", rollup_config_path); - let config: Config = - from_toml_path(&rollup_config_path).context("Failed to read rollup configuration")?; - - // Initializing logging - let subscriber = tracing_subscriber::fmt() - .with_max_level(Level::INFO) - .finish(); - tracing::subscriber::set_global_default(subscriber) - .map_err(|_err| eprintln!("Unable to set global default subscriber")) - .expect("Cannot fail to set subscriber"); - - let ledger_db = initialize_ledger(&config.rollup_config.runner.storage.path); - - let da_service = CelestiaService::new( - config.da.clone(), - RollupParams { - namespace: ROLLUP_NAMESPACE, - }, - ) - .await; - - let mut app: App = - App::new(config.rollup_config.runner.storage.clone()); - - let storage = app.get_storage(); - let mut methods = get_rpc_methods::(storage); - - // register rpc methods - { - register_ledger(ledger_db.clone(), &mut methods)?; - register_sequencer(da_service.clone(), &mut app, &mut methods)?; - #[cfg(feature = "experimental")] - register_ethereum(config.da.clone(), &mut methods)?; - } - - let storage = app.get_storage(); - let genesis_config = get_genesis_config(); - - let mut runner = StateTransitionRunner::new( - config.rollup_config, - da_service, - ledger_db, - app.stf, - storage.is_empty(), - genesis_config, - )?; - - runner.start_rpc_server(methods).await; - runner.run().await?; - - Ok(()) -} - -fn register_sequencer( - da_service: DA, - demo_runner: &mut App, - methods: &mut jsonrpsee::RpcModule<()>, -) -> Result<(), anyhow::Error> -where - DA: DaService + Send + Sync + 'static, -{ - let batch_builder = demo_runner.batch_builder.take().unwrap(); - let sequencer_rpc = get_sequencer_rpc(batch_builder, Arc::new(da_service)); - methods - .merge(sequencer_rpc) - .context("Failed to merge Txs RPC modules") -} - -fn register_ledger( - ledger_db: LedgerDB, - methods: &mut jsonrpsee::RpcModule<()>, -) -> Result<(), anyhow::Error> { - let ledger_rpc = get_ledger_rpc::(ledger_db); - methods - .merge(ledger_rpc) - .context("Failed to merge ledger RPC modules") -} - -#[cfg(feature = "experimental")] -fn register_ethereum( - da_config: DaServiceConfig, - methods: &mut jsonrpsee::RpcModule<()>, -) -> Result<(), anyhow::Error> { - use std::fs; - - let data = fs::read_to_string(TX_SIGNER_PRIV_KEY_PATH).context("Unable to read file")?; - - let hex_key: HexKey = - serde_json::from_str(&data).context("JSON does not have correct format.")?; - - let tx_signer_private_key = DefaultPrivateKey::from_hex(&hex_key.hex_priv_key).unwrap(); - - let ethereum_rpc = get_ethereum_rpc(da_config, tx_signer_private_key); - methods - .merge(ethereum_rpc) - .context("Failed to merge Ethereum RPC modules") + let rollup = new_rollup_with_celestia_da(&rollup_config_path).await?; + rollup.run().await } diff --git a/examples/demo-rollup/src/register_rpc.rs b/examples/demo-rollup/src/register_rpc.rs new file mode 100644 index 000000000..40ccb340b --- /dev/null +++ b/examples/demo-rollup/src/register_rpc.rs @@ -0,0 +1,55 @@ +//! Full-Node specific RPC methods. + +use anyhow::Context; +use celestia::verifier::address::CelestiaAddress; +use demo_stf::app::App; +use sov_db::ledger_db::LedgerDB; +#[cfg(feature = "experimental")] +use sov_ethereum::experimental::EthRpcConfig; +use sov_modules_stf_template::{SequencerOutcome, TxEffect}; +use sov_rollup_interface::services::da::DaService; +use sov_rollup_interface::zk::Zkvm; +use sov_sequencer::get_sequencer_rpc; +use sov_stf_runner::get_ledger_rpc; + +/// register sequencer rpc methods. +pub fn register_sequencer( + da_service: Da, + app: &mut App, + methods: &mut jsonrpsee::RpcModule<()>, +) -> Result<(), anyhow::Error> +where + Da: DaService, + Vm: Zkvm, +{ + let batch_builder = app.batch_builder.take().unwrap(); + let sequencer_rpc = get_sequencer_rpc(batch_builder, da_service); + methods + .merge(sequencer_rpc) + .context("Failed to merge Txs RPC modules") +} + +/// register ledger rpc methods. +pub fn register_ledger( + ledger_db: LedgerDB, + methods: &mut jsonrpsee::RpcModule<()>, +) -> Result<(), anyhow::Error> { + let ledger_rpc = get_ledger_rpc::, TxEffect>(ledger_db); + methods + .merge(ledger_rpc) + .context("Failed to merge ledger RPC modules") +} + +#[cfg(feature = "experimental")] +/// register ethereum methods. +pub fn register_ethereum( + da_service: Da, + eth_rpc_config: EthRpcConfig, + methods: &mut jsonrpsee::RpcModule<()>, +) -> Result<(), anyhow::Error> { + let ethereum_rpc = sov_ethereum::get_ethereum_rpc(da_service, eth_rpc_config); + + methods + .merge(ethereum_rpc) + .context("Failed to merge Ethereum RPC modules") +} diff --git a/examples/demo-rollup/src/rollup.rs b/examples/demo-rollup/src/rollup.rs new file mode 100644 index 000000000..209f83f9f --- /dev/null +++ b/examples/demo-rollup/src/rollup.rs @@ -0,0 +1,175 @@ +use std::net::SocketAddr; +use std::str::FromStr; + +use anyhow::Context; +use celestia::verifier::address::CelestiaAddress; +use celestia::verifier::RollupParams; +use celestia::CelestiaService; +use presence::service::{DaProvider as AvailService, DaServiceConfig as AvailServiceConfig}; +use presence::spec::address::AvailAddress; +use const_rollup_config::SEQUENCER_DA_ADDRESS; +use const_rollup_config::SEQUENCER_AVAIL_DA_ADDRESS; +#[cfg(feature = "experimental")] +use demo_stf::app::DefaultPrivateKey; +use demo_stf::app::{App, DefaultContext}; +use demo_stf::runtime::{get_rpc_methods, GenesisConfig}; +use risc0_adapter::host::Risc0Verifier; +use sov_db::ledger_db::LedgerDB; +#[cfg(feature = "experimental")] +use sov_ethereum::experimental::EthRpcConfig; +use sov_rollup_interface::services::da::DaService; +use sov_rollup_interface::zk::Zkvm; +use sov_state::storage::Storage; +use sov_stf_runner::{from_toml_path, RollupConfig, RunnerConfig, StateTransitionRunner}; +use tokio::sync::oneshot; +use tracing::debug; + +#[cfg(feature = "experimental")] +use crate::register_rpc::register_ethereum; +use crate::register_rpc::{register_ledger, register_sequencer}; +use crate::{get_genesis_config, initialize_ledger, ROLLUP_NAMESPACE}; + +#[cfg(feature = "experimental")] +const TX_SIGNER_PRIV_KEY_PATH: &str = "../test-data/keys/tx_signer_private_key.json"; + +/// Dependencies needed to run the rollup. +pub struct Rollup { + /// Implementation of the STF. + pub app: App, + /// Data availability service. + pub da_service: Da, + /// Ledger db. + pub ledger_db: LedgerDB, + /// Runner configuration. + pub runner_config: RunnerConfig, + /// Initial rollup configuration. + pub genesis_config: GenesisConfig, + #[cfg(feature = "experimental")] + /// Configuration for the Ethereum RPC. + pub eth_rpc_config: EthRpcConfig, +} + +/// Creates celestia based rollup. +pub async fn new_rollup_with_celestia_da( + rollup_config_path: &str, +) -> Result, anyhow::Error> { + debug!("Starting demo rollup with config {}", rollup_config_path); + let rollup_config: RollupConfig = + from_toml_path(rollup_config_path).context("Failed to read rollup configuration")?; + + let ledger_db = initialize_ledger(&rollup_config.storage.path); + + let da_service = CelestiaService::new( + rollup_config.da.clone(), + RollupParams { + namespace: ROLLUP_NAMESPACE, + }, + ) + .await; + + let app = App::new(rollup_config.storage); + let sequencer_da_address = CelestiaAddress::from_str(SEQUENCER_DA_ADDRESS)?; + let genesis_config = get_genesis_config(sequencer_da_address); + + Ok(Rollup { + app, + da_service, + ledger_db, + runner_config: rollup_config.runner, + genesis_config, + #[cfg(feature = "experimental")] + eth_rpc_config: EthRpcConfig { + min_blob_size: Some(1), + tx_signer_priv_key: read_tx_signer_priv_key()?, + }, + }) +} + +/// Creates celestia based rollup. +pub async fn new_rollup_with_avail_da( + rollup_config_path: &str, +) -> Result, anyhow::Error> { + debug!("Starting demo rollup with config {}", rollup_config_path); + let rollup_config: RollupConfig = + from_toml_path(rollup_config_path).context("Failed to read rollup configuration")?; + + let ledger_db = initialize_ledger(&rollup_config.storage.path); + + let da_service = AvailService::new( + rollup_config.da.clone() + ) + .await; + + let app = App::new(rollup_config.storage); + let sequencer_da_address = AvailAddress::from_str(SEQUENCER_AVAIL_DA_ADDRESS)?; + let genesis_config = get_genesis_config(sequencer_da_address); + + Ok(Rollup { + app, + da_service, + ledger_db, + runner_config: rollup_config.runner, + genesis_config, + #[cfg(feature = "experimental")] + eth_rpc_config: EthRpcConfig { + min_blob_size: Some(1), + tx_signer_priv_key: read_tx_signer_priv_key()?, + }, + }) +} + +#[cfg(feature = "experimental")] +/// Ethereum RPC wraps EVM transaction in a rollup transaction. +/// This function reads the private key of the rollup transaction signer. +pub fn read_tx_signer_priv_key() -> Result { + let data = std::fs::read_to_string(TX_SIGNER_PRIV_KEY_PATH).context("Unable to read file")?; + + let hex_key: crate::HexPrivateAndAddress = + serde_json::from_str(&data).context("JSON does not have correct format.")?; + + let priv_key = sov_modules_api::default_signature::private_key::DefaultPrivateKey::from_hex( + &hex_key.hex_priv_key, + )?; + + Ok(priv_key) +} + +impl + Clone> Rollup { + /// Runs the rollup. + pub async fn run(self) -> Result<(), anyhow::Error> { + self.run_and_report_rpc_port(None).await + } + + /// Runs the rollup. Reports rpc port to the caller using the provided channel. + pub async fn run_and_report_rpc_port( + mut self, + channel: Option>, + ) -> Result<(), anyhow::Error> { + let storage = self.app.get_storage(); + let mut methods = get_rpc_methods::(storage); + + // register rpc methods + { + register_ledger(self.ledger_db.clone(), &mut methods)?; + register_sequencer(self.da_service.clone(), &mut self.app, &mut methods)?; + #[cfg(feature = "experimental")] + register_ethereum(self.da_service.clone(), self.eth_rpc_config, &mut methods)?; + } + + let storage = self.app.get_storage(); + + let mut runner = StateTransitionRunner::new( + self.runner_config, + self.da_service, + self.ledger_db, + self.app.stf, + storage.is_empty(), + self.genesis_config, + )?; + + runner.start_rpc_server(methods, channel).await; + runner.run().await?; + + Ok(()) + } +} diff --git a/examples/demo-rollup/src/sov-cli/README.md b/examples/demo-rollup/src/sov-cli/README.md new file mode 100644 index 000000000..75dcf0a4e --- /dev/null +++ b/examples/demo-rollup/src/sov-cli/README.md @@ -0,0 +1,112 @@ +# sov-cli + +- The sov-cli binary is used to generate serialized transactions that are ready for submitting to celestia (or other DA Layers) +- The sov-cli also has a "utils" subcommand to + - Generate a new private key + - View the public address of a private key + - View the derived token address + +``` +Main entry point for CLI + +Usage: sov-cli + +Commands: + generate-transaction-from-json Generate and serialize a call to a module. This creates a .dat file containing the serialized transaction + util Utility commands + help Print this message or the help of the given subcommand(s) +``` + +## Utils + +``` +Usage: sov-cli util + +Commands: +derive-token-address Compute the address of a derived token. This follows a deterministic algorithm +show-public-key Display the public key associated with a private key +create-private-key Create a new private key +help Print this message or the help of the given subcommand(s) +``` + +- To submit a transaction, first generate a private key + +``` +% cargo run --bin sov-cli util create-private-key . +private key written to path: sov1693hp77wx0kp8um6dumlvtm3jzhckk74l7w4qtd5llhkpdtf0d6sm7my76.json +``` + +- By default the file is named with the public key, but the file can be moved/renamed + +``` +% mv sov1693hp77wx0kp8um6dumlvtm3jzhckk74l7w4qtd5llhkpdtf0d6sm7my76.json my_private_key.json +``` + +- The show-public-key subcommand can be used to view the public key of the private key + +``` +% cargo run --bin sov-cli util show-public-key my_private_key.json +sov1693hp77wx0kp8um6dumlvtm3jzhckk74l7w4qtd5llhkpdtf0d6sm7my76 +``` + +- You can view the token address of a new token that you wish to create using the derive-token-address subcommand. + - token addresses are derived deterministically using the following params + - : a string that you choose + - : the address submitting the transaction to create the token + - : a random number of your choosing + +``` + % cargo run --bin sov-cli util derive-token-address sov-test-token sov1693hp77wx0kp8um6dumlvtm3jzhckk74l7w4qtd5llhkpdtf0d6sm7my76 11 +sov1g5htl6zvplygcsjfnt47tk6gmashsj8j9gu5jzg99wtm4ekuazrqaha4nj +``` + +## Generate Transaction + +- The `generate-transaction-from-json` subcommand is used to generate serialized transactions for a module +- The modules that are supported by `sov-cli` are the ones that are part of the `Runtime` struct and the code to create the transaction is generated from the `derive(CliWallet)` macro that annotates `Runtime` + +```rust +#[cfg_attr(feature = "native", derive(CliWallet)] +#[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] +pub struct Runtime { + pub sequencer: sov_sequencer_registry::Sequencer, + pub bank: sov_bank::Bank, + pub election: sov_election::Election, + pub value_setter: sov_value_setter::ValueSetter, + pub accounts: sov_accounts::Accounts, +} +``` + +- From the above code we can see which modules are supported, for an example we will generate transactions for the "Bank" module +- `generate-transaction-from-json` takes 4 parameters + +``` +Usage: sov-cli generate-transaction-from-json [OPTIONS] + +Arguments: + Path to the json file containing the private key of the sender + Name of the module to generate the call. Modules defined in your Runtime are supported. (eg: Bank, Accounts) + Path to the json file containing the parameters for a module call + Nonce for the transaction + +Options: + --format Output file format. borsh and hex are supported [default: hex] + -h, --help Print help + +``` + +- `` is the path to the private key generated in utils. can also use an existing private key +- `` is based on the type of the fields in the `Runtime` struct. in the above example, the supported modules are `Bank`, `Sequencer`, `Election`, `Accounts`, `ValueSetter` +- `` this is the path to the json containing the CallMessage for your modules. +- `` Nonce which has to be non-duplicate and in increasing order. + +- An example for the `` for the `Bank` module's `CreateToken` instruction is available at `sov-cli/test_data/create_token.json` +- The complete command for generating the create token transaction is + +``` +demo-stf % cargo run --bin sov-cli generate-transaction-from-json my_private_key.json Bank src/sov-cli/test_data/create_token.json 1 +``` + +- By default the file is formatted in `hex` and contains a blob ready for submission to celestia - the blob only contains a single transaction for now +- Other formats include `borsh` +- In order to know what the token is the `derive-token-address` command from the `utils` subcommand can be used diff --git a/examples/demo-rollup/src/sov-cli/main.rs b/examples/demo-rollup/src/sov-cli/main.rs new file mode 100644 index 000000000..701a74c5a --- /dev/null +++ b/examples/demo-rollup/src/sov-cli/main.rs @@ -0,0 +1,7 @@ +#[tokio::main] +async fn main() -> Result<(), anyhow::Error> { + demo_stf::cli::run::< + ::Spec, + >() + .await +} diff --git a/examples/demo-rollup/src/test_rpc.rs b/examples/demo-rollup/src/test_rpc.rs index 4180d6fab..d853b7109 100644 --- a/examples/demo-rollup/src/test_rpc.rs +++ b/examples/demo-rollup/src/test_rpc.rs @@ -8,7 +8,7 @@ use reqwest::header::CONTENT_TYPE; use serde_json::json; use sov_db::ledger_db::{LedgerDB, SlotCommit}; #[cfg(test)] -use sov_rollup_interface::mocks::{TestBlock, TestBlockHeader, TestHash}; +use sov_rollup_interface::mocks::{MockBlock, MockBlockHeader, MockHash}; use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::stf::fuzzing::BatchReceiptStrategyArgs; use sov_rollup_interface::stf::{BatchReceipt, Event, TransactionReceipt}; @@ -47,13 +47,13 @@ async fn queries_test_runner(test_queries: Vec, rpc_config: RpcConfi } } -fn populate_ledger(ledger_db: &mut LedgerDB, slots: Vec>) { +fn populate_ledger(ledger_db: &mut LedgerDB, slots: Vec>) { for slot in slots { ledger_db.commit_slot(slot).unwrap(); } } -fn test_helper(test_queries: Vec, slots: Vec>) { +fn test_helper(test_queries: Vec, slots: Vec>) { let rt = tokio::runtime::Builder::new_multi_thread() .enable_io() .enable_time() @@ -109,12 +109,14 @@ fn batch2_tx_receipts() -> Vec> { } fn regular_test_helper(payload: serde_json::Value, expected: &serde_json::Value) { - let mut slots: Vec> = vec![SlotCommit::new(TestBlock { + let mut slots: Vec> = vec![SlotCommit::new(MockBlock { curr_hash: sha2::Sha256::digest(b"slot_data"), - header: TestBlockHeader { - prev_hash: TestHash(sha2::Sha256::digest(b"prev_header")), + header: MockBlockHeader { + prev_hash: MockHash(sha2::Sha256::digest(b"prev_header")), }, height: 0, + validity_cond: Default::default(), + blobs: Default::default(), })]; let batches = vec![ @@ -298,13 +300,13 @@ prop_compose! { prop_compose! { fn arb_slots(max_slots: usize, max_batches: usize) - (batches_and_hashes in proptest::collection::vec(arb_batches_and_slot_hash(max_batches), 1..max_slots)) -> (Vec>, HashMap, usize) + (batches_and_hashes in proptest::collection::vec(arb_batches_and_slot_hash(max_batches), 1..max_slots)) -> (Vec>, HashMap, usize) { let mut slots = std::vec::Vec::with_capacity(max_slots); let mut total_num_batches = 1; - let mut prev_hash = TestHash([0;32]); + let mut prev_hash = MockHash([0;32]); let mut curr_tx_id = 1; let mut curr_event_id = 1; @@ -312,12 +314,14 @@ prop_compose! { let mut tx_id_to_event_range = HashMap::new(); for (batches, hash) in batches_and_hashes{ - let mut new_slot = SlotCommit::new(TestBlock { + let mut new_slot = SlotCommit::new(MockBlock { curr_hash: hash, - header: TestBlockHeader { + header: MockBlockHeader { prev_hash, }, - height: 0 + height: 0, + validity_cond: Default::default(), + blobs: Default::default() }); total_num_batches += batches.len(); @@ -336,7 +340,7 @@ prop_compose! { slots.push(new_slot); - prev_hash = TestHash(hash); + prev_hash = MockHash(hash); } (slots, tx_id_to_event_range, total_num_batches) @@ -374,6 +378,8 @@ fn full_tx_json( } proptest!( + // Reduce the cases from 256 to 100 to speed up these tests + #![proptest_config(proptest::prelude::ProptestConfig::with_cases(100))] #[test] fn proptest_get_head((slots, _, total_num_batches) in arb_slots(10, 10)){ let last_slot = slots.last().unwrap(); diff --git a/examples/demo-rollup/submitting_1.sh b/examples/demo-rollup/submitting_1.sh new file mode 100755 index 000000000..69d4db1b5 --- /dev/null +++ b/examples/demo-rollup/submitting_1.sh @@ -0,0 +1,21 @@ +#!/usr/bin/env bash + +RPC_ENDPOINT="http://127.0.0.1:12345" +PRIVATE_KEY="../test-data/keys/token_deployer_private_key.json" +SOV_CLI="../../target/debug/sov-cli" + +echo "Preparing..." +$SOV_CLI submit-transaction "$PRIVATE_KEY" Bank ../test-data/requests/create_token.json 0 "$RPC_ENDPOINT" +$SOV_CLI submit-transaction "$PRIVATE_KEY" SequencerRegistry ../test-data/requests/register_sequencer.json 1 "$RPC_ENDPOINT" +$SOV_CLI publish-batch "$RPC_ENDPOINT" + + +sleep 1 +echo "Starting submitting transfers" +for nonce in {2..30}; do + echo "Submitting transaction with nonce $nonce" + $SOV_CLI submit-transaction "$PRIVATE_KEY" Bank ../test-data/requests/transfer.json "$nonce" "$RPC_ENDPOINT" + if [ $((nonce % 3)) -eq 0 ]; then + $SOV_CLI publish-batch "$RPC_ENDPOINT" + fi +done \ No newline at end of file diff --git a/examples/demo-rollup/submitting_2.sh b/examples/demo-rollup/submitting_2.sh new file mode 100755 index 000000000..6dc1adb56 --- /dev/null +++ b/examples/demo-rollup/submitting_2.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash + +RPC_ENDPOINT="http://127.0.0.1:12346" +PRIVATE_KEY="../test-data/keys/minter_private_key.json" +SOV_CLI="../../target/debug/sov-cli" + +echo "Starting !!!" + +for nonce in {0..30}; do + echo "Submitting transaction with nonce $nonce" + $SOV_CLI submit-transaction "$PRIVATE_KEY" Bank ../test-data/requests/transfer.json "$nonce" "$RPC_ENDPOINT" + if [ $((nonce % 3)) -eq 0 ]; then + $SOV_CLI publish-batch "$RPC_ENDPOINT" + fi +done \ No newline at end of file diff --git a/examples/demo-rollup/tests/all_tests.rs b/examples/demo-rollup/tests/all_tests.rs new file mode 100644 index 000000000..03f5afb91 --- /dev/null +++ b/examples/demo-rollup/tests/all_tests.rs @@ -0,0 +1,4 @@ +mod bank; +mod evm; + +mod test_helpers; diff --git a/examples/demo-rollup/tests/bank/mod.rs b/examples/demo-rollup/tests/bank/mod.rs new file mode 100644 index 000000000..c2c5cb42a --- /dev/null +++ b/examples/demo-rollup/tests/bank/mod.rs @@ -0,0 +1,80 @@ +use std::net::SocketAddr; + +use borsh::BorshSerialize; +use demo_stf::app::DefaultPrivateKey; +use demo_stf::runtime::RuntimeCall; +use jsonrpsee::core::client::{Subscription, SubscriptionClientT}; +use jsonrpsee::rpc_params; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::transaction::Transaction; +use sov_modules_api::{PrivateKey, Spec}; +use sov_rollup_interface::mocks::MockDaSpec; +use sov_sequencer::utils::SimpleClient; + +use super::test_helpers::start_rollup; +const TOKEN_SALT: u64 = 0; +const TOKEN_NAME: &str = "test_token"; + +async fn send_test_create_token_tx(rpc_address: SocketAddr) -> Result<(), anyhow::Error> { + let key = DefaultPrivateKey::generate(); + let user_address: ::Address = key.to_address(); + + let token_address = sov_bank::get_token_address::( + TOKEN_NAME, + user_address.as_ref(), + TOKEN_SALT, + ); + + let msg = RuntimeCall::::bank(sov_bank::CallMessage::< + DefaultContext, + >::CreateToken { + salt: TOKEN_SALT, + token_name: TOKEN_NAME.to_string(), + initial_balance: 1000, + minter_address: user_address, + authorized_minters: vec![], + }); + let tx = Transaction::::new_signed_tx(&key, msg.try_to_vec().unwrap(), 0); + + let port = rpc_address.port(); + let client = SimpleClient::new("localhost", port).await?; + + let mut slot_processed_subscription: Subscription = client + .ws() + .subscribe( + "ledger_subscribeSlots", + rpc_params![], + "ledger_unsubscribeSlots", + ) + .await?; + + client.send_transaction(tx).await?; + + // Wait until the rollup has processed the next slot + let _ = slot_processed_subscription.next().await; + + let balance_response = sov_bank::query::BankRpcClient::::balance_of( + client.http(), + user_address, + token_address, + ) + .await?; + assert_eq!(balance_response.amount.unwrap_or_default(), 1000); + Ok(()) +} + +#[tokio::test] +async fn bank_tx_tests() -> Result<(), anyhow::Error> { + let (port_tx, port_rx) = tokio::sync::oneshot::channel(); + + let rollup_task = tokio::spawn(async { + start_rollup(port_tx).await; + }); + + // Wait for rollup task to start: + let port = port_rx.await.unwrap(); + + send_test_create_token_tx(port).await?; + rollup_task.abort(); + Ok(()) +} diff --git a/examples/demo-rollup/tests/evm/mod.rs b/examples/demo-rollup/tests/evm/mod.rs new file mode 100644 index 000000000..b4c99fc5b --- /dev/null +++ b/examples/demo-rollup/tests/evm/mod.rs @@ -0,0 +1,206 @@ +use std::net::SocketAddr; +use std::str::FromStr; + +use ethereum_types::H160; +use ethers_core::abi::Address; +use ethers_core::k256::ecdsa::SigningKey; +use ethers_core::types::transaction::eip2718::TypedTransaction; +use ethers_core::types::Eip1559TransactionRequest; +use ethers_middleware::SignerMiddleware; +use ethers_providers::{Http, Middleware, PendingTransaction, Provider}; +use ethers_signers::{LocalWallet, Signer, Wallet}; +use jsonrpsee::core::client::ClientT; +use jsonrpsee::http_client::{HttpClient, HttpClientBuilder}; +use jsonrpsee::rpc_params; +use sov_evm::smart_contracts::SimpleStorageContract; + +use super::test_helpers::start_rollup; + +const MAX_FEE_PER_GAS: u64 = 100000001; + +struct TestClient { + chain_id: u64, + from_addr: Address, + contract: SimpleStorageContract, + client: SignerMiddleware, Wallet>, + http_client: HttpClient, +} + +impl TestClient { + #[allow(dead_code)] + async fn new( + chain_id: u64, + key: Wallet, + from_addr: Address, + contract: SimpleStorageContract, + rpc_addr: std::net::SocketAddr, + ) -> Self { + let host = format!("http://localhost:{}", rpc_addr.port()); + + let provider = Provider::try_from(&host).unwrap(); + let client = SignerMiddleware::new_with_provider_chain(provider, key) + .await + .unwrap(); + + let http_client = HttpClientBuilder::default().build(host).unwrap(); + + Self { + chain_id, + from_addr, + contract, + client, + http_client, + } + } + + async fn send_publish_batch_request(&self) { + let _: String = self + .http_client + .request("eth_publishBatch", rpc_params![]) + .await + .unwrap(); + } + + async fn deploy_contract( + &self, + ) -> Result, Box> { + let req = Eip1559TransactionRequest::new() + .from(self.from_addr) + .chain_id(self.chain_id) + .nonce(0u64) + .max_priority_fee_per_gas(10u64) + .max_fee_per_gas(MAX_FEE_PER_GAS) + .gas(900000u64) + .data(self.contract.byte_code()); + + let typed_transaction = TypedTransaction::Eip1559(req); + + let receipt_req = self + .client + .send_transaction(typed_transaction, None) + .await?; + + Ok(receipt_req) + } + + async fn set_value( + &self, + contract_address: H160, + set_arg: u32, + nonce: u64, + ) -> PendingTransaction<'_, Http> { + let req = Eip1559TransactionRequest::new() + .from(self.from_addr) + .to(contract_address) + .chain_id(self.chain_id) + .nonce(nonce) + .data(self.contract.set_call_data(set_arg)) + .max_priority_fee_per_gas(10u64) + .max_fee_per_gas(MAX_FEE_PER_GAS) + .gas(900000u64); + + let typed_transaction = TypedTransaction::Eip1559(req); + + self.client + .send_transaction(typed_transaction, None) + .await + .unwrap() + } + + async fn query_contract( + &self, + contract_address: H160, + nonce: u64, + ) -> Result> { + let req = Eip1559TransactionRequest::new() + .from(self.from_addr) + .to(contract_address) + .chain_id(self.chain_id) + .nonce(nonce) + .data(self.contract.get_call_data()) + .gas(900000u64); + + let typed_transaction = TypedTransaction::Eip1559(req); + + let response = self.client.call(&typed_transaction, None).await?; + + let resp_array: [u8; 32] = response.to_vec().try_into().unwrap(); + Ok(ethereum_types::U256::from(resp_array)) + } + + async fn execute(self) -> Result<(), Box> { + let contract_address = { + let deploy_contract_req = self.deploy_contract().await?; + self.send_publish_batch_request().await; + + deploy_contract_req + .await? + .unwrap() + .contract_address + .unwrap() + }; + + let set_arg = 923; + { + let set_value_req = self.set_value(contract_address, set_arg, 1).await; + self.send_publish_batch_request().await; + set_value_req.await.unwrap(); + } + + { + let get_arg = self.query_contract(contract_address, 2).await?; + assert_eq!(set_arg, get_arg.as_u32()); + } + + // Create a blob with multiple transactions. + let mut requests = Vec::default(); + let mut nonce = 2; + for value in 100..103 { + let set_value_req = self.set_value(contract_address, value, nonce).await; + requests.push(set_value_req); + nonce += 1 + } + + self.send_publish_batch_request().await; + + for req in requests { + req.await.unwrap(); + } + + { + let get_arg = self.query_contract(contract_address, nonce).await?; + assert_eq!(102, get_arg.as_u32()); + } + + Ok(()) + } +} + +async fn send_tx_test_to_eth(rpc_address: SocketAddr) -> Result<(), Box> { + let chain_id: u64 = 1; + let key = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" + .parse::() + .unwrap() + .with_chain_id(chain_id); + + let contract = SimpleStorageContract::default(); + let from_addr = Address::from_str("0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266").unwrap(); + + let test_client = TestClient::new(chain_id, key, from_addr, contract, rpc_address).await; + test_client.execute().await +} + +#[tokio::test] +async fn evm_tx_tests() -> Result<(), anyhow::Error> { + let (port_tx, port_rx) = tokio::sync::oneshot::channel(); + + let rollup_task = tokio::spawn(async { + start_rollup(port_tx).await; + }); + + // Wait for rollup task to start: + let port = port_rx.await.unwrap(); + send_tx_test_to_eth(port).await.unwrap(); + rollup_task.abort(); + Ok(()) +} diff --git a/examples/demo-rollup/tests/test_helpers.rs b/examples/demo-rollup/tests/test_helpers.rs new file mode 100644 index 000000000..44aeffac2 --- /dev/null +++ b/examples/demo-rollup/tests/test_helpers.rs @@ -0,0 +1,62 @@ +use std::fs::remove_dir_all; +use std::net::SocketAddr; + +use demo_stf::app::App; +use risc0_adapter::host::Risc0Verifier; +use sov_demo_rollup::{get_genesis_config, initialize_ledger, Rollup}; +#[cfg(feature = "experimental")] +use sov_ethereum::experimental::EthRpcConfig; +use sov_rollup_interface::mocks::{MockAddress, MockDaService}; +use sov_stf_runner::{RollupConfig, RpcConfig, RunnerConfig, StorageConfig}; +use tokio::sync::oneshot; + +fn create_mock_da_rollup(rollup_config: RollupConfig<()>) -> Rollup { + let _ = remove_dir_all(&rollup_config.storage.path); + let ledger_db = initialize_ledger(rollup_config.storage.path.clone()); + let sequencer_da_address = MockAddress { addr: [99; 32] }; + let da_service = MockDaService::new(sequencer_da_address); + + let app = App::new(rollup_config.storage); + let genesis_config = get_genesis_config(sequencer_da_address); + + Rollup { + app, + da_service, + ledger_db, + runner_config: rollup_config.runner, + genesis_config, + #[cfg(feature = "experimental")] + eth_rpc_config: EthRpcConfig { + min_blob_size: None, + tx_signer_priv_key: sov_demo_rollup::read_tx_signer_priv_key() + .expect("Unable to read signer private key"), + }, + } +} + +pub async fn start_rollup(rpc_reporting_channel: oneshot::Sender) { + let temp_dir = tempfile::tempdir().unwrap(); + let temp_path = temp_dir.path(); + + let rollup_config = RollupConfig { + storage: StorageConfig { + path: temp_path.to_path_buf(), + }, + runner: RunnerConfig { + start_height: 0, + rpc_config: RpcConfig { + bind_host: "127.0.0.1".into(), + bind_port: 0, + }, + }, + da: (), + }; + let rollup = create_mock_da_rollup(rollup_config); + rollup + .run_and_report_rpc_port(Some(rpc_reporting_channel)) + .await + .unwrap(); + + // Close the tempdir explicitly to ensure that rustc doesn't see that it's unused and drop it unexpectedly + temp_dir.close().unwrap(); +} diff --git a/examples/demo-simple-stf/Cargo.toml b/examples/demo-simple-stf/Cargo.toml index fbd146cad..40f86de59 100644 --- a/examples/demo-simple-stf/Cargo.toml +++ b/examples/demo-simple-stf/Cargo.toml @@ -16,6 +16,5 @@ hex = { workspace = true } sov-rollup-interface = { path = "../../rollup-interface" } - [dev-dependencies] sov-rollup-interface = { path = "../../rollup-interface", features = ["mocks"] } diff --git a/examples/demo-simple-stf/README.md b/examples/demo-simple-stf/README.md index 933d0a9aa..3ccf89397 100644 --- a/examples/demo-simple-stf/README.md +++ b/examples/demo-simple-stf/README.md @@ -154,37 +154,39 @@ The first transaction that finds the correct hash would break the loop and retur The `sov_rollup_interface::mocks` crate provides two utilities that are useful for testing: 1. The `MockZkvm` is an implementation of the `Zkvm` trait that can be used in tests. -1. The `TestBlob` is an implementation of the `BlobTransactionTrait` trait that can be used in tests. It accepts an `Address` as a generic parameter. For testing purposes, we implement our own `Address` type as follows: - -```rust, ignore -#[derive(PartialEq, Debug, Clone, Eq, serde::Serialize, serde::Deserialize)] -pub struct DaAddress { - pub addr: [u8; 32], -} - -impl AddressTrait for DaAddress {} - -``` +1. The `MockBlob` is an implementation of the `BlobTransactionTrait` trait that can be used in tests. It accepts an `A: BasicAddress` as a generic parameter. For testing purposes, we use `MockAddress` struct from the same `mocks` module You can find more details in the `stf_test.rs` file. The following test checks the rollup logic. In the test, we call `init_chain, begin_slot, and end_slot` for completeness, even though these methods do nothing. -```rust, ignore +```rust +use demo_simple_stf::{ApplySlotResult, CheckHashPreimageStf}; +use sov_rollup_interface::mocks::{MockAddress, MockBlob, MockBlock, MockValidityCond, MockZkvm}; +use sov_rollup_interface::stf::StateTransitionFunction; + #[test] fn test_stf() { - let address = DaAddress { addr: [1; 32] }; + let address = MockAddress { addr: [1; 32] }; let preimage = vec![0; 32]; - let test_blob = TestBlob::::new(preimage, address); - let stf = &mut CheckHashPreimageStf {}; + let test_blob = MockBlob::new(preimage, address, [0; 32]); + let stf = &mut CheckHashPreimageStf::::default(); + + let data = MockBlock::default(); + let mut blobs = [test_blob]; - StateTransitionFunction::::init_chain(stf, ()); - StateTransitionFunction::::begin_slot(stf, ()); + StateTransitionFunction::::init_chain(stf, ()); - let receipt = StateTransitionFunction::::apply_blob(stf, test_blob, None); - assert_eq!(receipt.inner, ApplyBlobResult::Success); + let result = StateTransitionFunction::::apply_slot( + stf, + (), + &data, + &mut blobs, + ); - StateTransitionFunction::::end_slot(stf); + assert_eq!(1, result.batch_receipts.len()); + let receipt = result.batch_receipts[0].clone(); + assert_eq!(receipt.inner, ApplySlotResult::Success); } ``` diff --git a/examples/demo-simple-stf/src/lib.rs b/examples/demo-simple-stf/src/lib.rs index 1bd2ce1f0..03089d6a4 100644 --- a/examples/demo-simple-stf/src/lib.rs +++ b/examples/demo-simple-stf/src/lib.rs @@ -1,18 +1,21 @@ #![deny(missing_docs)] #![doc = include_str!("../README.md")] use std::io::Read; +use std::marker::PhantomData; use sha2::Digest; use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::stf::{BatchReceipt, SlotResult, StateTransitionFunction}; -use sov_rollup_interface::zk::Zkvm; - -#[derive(PartialEq, Debug, Clone, Eq, serde::Serialize, serde::Deserialize)] +use sov_rollup_interface::zk::{ValidityCondition, Zkvm}; /// An implementation of the /// [`StateTransitionFunction`](sov_rollup_interface::stf::StateTransitionFunction) /// that is specifically designed to check if someone knows a preimage of a specific hash. -pub struct CheckHashPreimageStf {} +#[derive(PartialEq, Debug, Clone, Eq, serde::Serialize, serde::Deserialize, Default)] +pub struct CheckHashPreimageStf { + phantom_data: PhantomData, +} /// Outcome of the apply_slot method. #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq, Eq)] @@ -23,7 +26,9 @@ pub enum ApplySlotResult { Success, } -impl StateTransitionFunction for CheckHashPreimageStf { +impl StateTransitionFunction + for CheckHashPreimageStf +{ // Since our rollup is stateless, we don't need to consider the StateRoot. type StateRoot = (); @@ -40,14 +45,17 @@ impl StateTransitionFunction for CheckHashP // However, in this tutorial, we won't use it. type Witness = (); + type Condition = Cond; + // Perform one-time initialization for the genesis block. fn init_chain(&mut self, _params: Self::InitialState) { // Do nothing } - fn apply_slot<'a, I>( + fn apply_slot<'a, I, Data>( &mut self, _witness: Self::Witness, + _slot_data: &Data, blobs: I, ) -> SlotResult< Self::StateRoot, @@ -57,6 +65,7 @@ impl StateTransitionFunction for CheckHashP > where I: IntoIterator, + Data: SlotData, { let mut receipts = vec![]; for blob in blobs { @@ -99,4 +108,8 @@ impl StateTransitionFunction for CheckHashP witness: (), } } + + fn get_current_state_root(&self) -> anyhow::Result { + Ok(()) + } } diff --git a/examples/demo-simple-stf/tests/stf_test.rs b/examples/demo-simple-stf/tests/stf_test.rs index 9620e98ed..b9d1f48a1 100644 --- a/examples/demo-simple-stf/tests/stf_test.rs +++ b/examples/demo-simple-stf/tests/stf_test.rs @@ -1,77 +1,36 @@ -use std::fmt::Display; -use std::str::FromStr; - use demo_simple_stf::{ApplySlotResult, CheckHashPreimageStf}; -use sov_rollup_interface::mocks::{MockZkvm, TestBlob}; +use sov_rollup_interface::mocks::{MockAddress, MockBlob, MockBlock, MockValidityCond, MockZkvm}; use sov_rollup_interface::stf::StateTransitionFunction; -use sov_rollup_interface::AddressTrait; - -#[derive(PartialEq, Debug, Clone, Eq, serde::Serialize, serde::Deserialize, Hash)] -pub struct DaAddress { - pub addr: [u8; 32], -} - -impl AddressTrait for DaAddress {} - -impl AsRef<[u8]> for DaAddress { - fn as_ref(&self) -> &[u8] { - &self.addr - } -} - -impl From<[u8; 32]> for DaAddress { - fn from(addr: [u8; 32]) -> Self { - DaAddress { addr } - } -} -impl FromStr for DaAddress { - type Err = hex::FromHexError; - - fn from_str(s: &str) -> Result { - // Remove the "0x" prefix, if it exists. - let s = s.strip_prefix("0x").unwrap_or(s); - let mut addr = [0u8; 32]; - hex::decode_to_slice(s, &mut addr)?; - Ok(DaAddress { addr }) - } -} - -impl<'a> TryFrom<&'a [u8]> for DaAddress { - type Error = anyhow::Error; - - fn try_from(addr: &'a [u8]) -> Result { - if addr.len() != 32 { - anyhow::bail!("Address must be 32 bytes long"); - } - let mut addr_bytes = [0u8; 32]; - addr_bytes.copy_from_slice(addr); - Ok(Self { addr: addr_bytes }) - } -} +#[test] +fn test_stf_success() { + let address = MockAddress { addr: [1; 32] }; -impl Display for DaAddress { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", self.addr) - } -} + let stf = &mut CheckHashPreimageStf::::default(); + StateTransitionFunction::::init_chain(stf, ()); -#[test] -fn test_stf() { - let address = DaAddress { addr: [1; 32] }; - let preimage = vec![0; 32]; + let mut blobs = { + let incorrect_preimage = vec![1; 32]; + let correct_preimage = vec![0; 32]; - let test_blob = TestBlob::::new(preimage, address, [0; 32]); - let stf = &mut CheckHashPreimageStf {}; + [ + MockBlob::new(incorrect_preimage, address, [0; 32]), + MockBlob::new(correct_preimage, address, [0; 32]), + ] + }; - let mut blobs = [test_blob]; + let result = StateTransitionFunction::::apply_slot( + stf, + (), + &MockBlock::default(), + &mut blobs, + ); - StateTransitionFunction::>::init_chain(stf, ()); + assert_eq!(2, result.batch_receipts.len()); - let result = - StateTransitionFunction::>::apply_slot(stf, (), &mut blobs); + let receipt = &result.batch_receipts[0]; + assert_eq!(receipt.inner, ApplySlotResult::Failure); - assert_eq!(1, result.batch_receipts.len()); - let receipt = result.batch_receipts[0].clone(); + let receipt = &result.batch_receipts[1]; assert_eq!(receipt.inner, ApplySlotResult::Success); } diff --git a/examples/demo-stf/Cargo.toml b/examples/demo-stf/Cargo.toml index a98699989..bbc6dd33f 100644 --- a/examples/demo-stf/Cargo.toml +++ b/examples/demo-stf/Cargo.toml @@ -8,10 +8,6 @@ license = { workspace = true } homepage = "sovereign.xyz" publish = false -[[bin]] -name = "sov-cli" -path = "src/sov-cli/main.rs" - # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] @@ -28,37 +24,42 @@ tracing = { workspace = true } const-rollup-config = { path = "../const-rollup-config" } sov-rollup-interface = { path = "../../rollup-interface" } -sov-election = { path = "../../module-system/module-implementations/examples/sov-election", default-features = false } -sov-sequencer-registry = { path = "../../module-system/module-implementations/sov-sequencer-registry", default-features = false } -sov-bank = { path = "../../module-system/module-implementations/sov-bank", default-features = false } -sov-modules-stf-template = { path = "../../module-system/sov-modules-stf-template" } # no features available -sov-value-setter = { path = "../../module-system/module-implementations/examples/sov-value-setter", default-features = false } -sov-accounts = { path = "../../module-system/module-implementations/sov-accounts", default-features = false } -sov-state = { path = "../../module-system/sov-state", default-features = false } -sov-modules-api = { path = "../../module-system/sov-modules-api", default-features = false, features = ["macros"] } +sov-cli = { path = "../../module-system/sov-cli", optional = true } +sov-sequencer-registry = { path = "../../module-system/module-implementations/sov-sequencer-registry" } +sov-blob-storage = { path = "../../module-system/module-implementations/sov-blob-storage" } +sov-bank = { path = "../../module-system/module-implementations/sov-bank" } +sov-chain-state = { path = "../../module-system/module-implementations/sov-chain-state" } +sov-modules-stf-template = { path = "../../module-system/sov-modules-stf-template" } +sov-value-setter = { path = "../../module-system/module-implementations/examples/sov-value-setter" } +sov-accounts = { path = "../../module-system/module-implementations/sov-accounts" } +sov-state = { path = "../../module-system/sov-state" } +sov-modules-api = { path = "../../module-system/sov-modules-api" } sov-sequencer = { path = "../../full-node/sov-sequencer", optional = true } sov-stf-runner = { path = "../../full-node/sov-stf-runner", optional = true } - # Only enable the evm on "experimental" feature -sov-evm = { path = "../../module-system/module-implementations/sov-evm", default-features = false, optional = true } +sov-evm = { path = "../../module-system/module-implementations/sov-evm", optional = true } [dev-dependencies] sov-rollup-interface = { path = "../../rollup-interface", features = ["mocks"] } +sov-data-generators = { path = "../../module-system/utils/sov-data-generators" } +demo-stf = { path = ".", features = ["native"] } tempfile = { workspace = true } rand = "0.8" [features] -default = ["native"] +default = [] experimental = ["sov-evm/experimental"] - native = [ "sov-bank/native", + "sov-cli", "sov-accounts/native", - "sov-election/native", "sov-sequencer-registry/native", + "sov-blob-storage/native", + "sov-chain-state/native", "sov-value-setter/native", "sov-modules-api/native", "sov-rollup-interface/mocks", + "sov-modules-stf-template/native", "sov-sequencer", "sov-stf-runner", "clap", diff --git a/examples/demo-stf/README.md b/examples/demo-stf/README.md index 457244701..152a4aa69 100644 --- a/examples/demo-stf/README.md +++ b/examples/demo-stf/README.md @@ -1,3 +1,18 @@ + + + +- [Demo State Transition Function](#demo-state-transition-function) + - [Overview](#overview) + - [Implementing State Transition _Function_](#implementing-state-transition-_function_) + - [Implementing Runtime: Pick Your Modules](#implementing-runtime-pick-your-modules) + - [Implementing Hooks for the Runtime:](#implementing-hooks-for-the-runtime) + - [Exposing RPC](#exposing-rpc) + - [Make Full Node Itegrations Simpler with the State Transition Runner:](#make-full-node-itegrations-simpler-with-the-state-transition-runner) + - [Using State Transition Runner](#using-state-transition-runner) + - [Wrapping Up](#wrapping-up) + + + # Demo State Transition Function This package shows how you can combine modules to build a custom state transition function. We provide several module implementations @@ -35,18 +50,8 @@ several parameters that specify its exact behavior. In order, these generics are native mode we just read values straight from disk. 2. `Runtime`: a collection of modules which make up the rollup's public interface - To implement your state transition function, you simply need to specify values for each of these fields. -So, a typical app definition looks like this: - -```rust -pub type MyNativeStf = AppTemplate>; -pub type MyZkStf = AppTemplate>; -``` - -Note that `DefaultContext` and `ZkDefaultContext` are exported by the `sov_modules_api` crate. - In the remainder of this section, we'll walk you through implementing each of the remaining generics. ## Implementing Runtime: Pick Your Modules @@ -78,22 +83,25 @@ initialization code for each module which will get run at your rollup's genesis. allow your runtime to dispatch transactions and queries, and tell it which serialization scheme to use. We recommend borsh, since it's both fast and safe for hashing. -### Implementing Hooks for the Runtime: -The next step is to implement `Hooks` for `MyRuntime`. Hooks are abstractions that allows for the injection of custom logic into the transaction processing pipeline. +### Implementing Hooks for the Runtime: + +The next step is to implement `Hooks` for `MyRuntime`. Hooks are abstractions that allow for the injection of custom logic into the transaction processing pipeline. There are two kind of hooks: `TxHooks`, which has the following methods: + 1. `pre_dispatch_tx_hook`: Invoked immediately before each transaction is processed. This is a good time to apply stateful transaction verification, like checking the nonce. 2. `post_dispatch_tx_hook`: Invoked immediately after each transaction is executed. This is a good place to perform any post-execution operations, like incrementing the nonce. -`ApplyBlobHooks`, which has the following methods: +`ApplyBlobHooks`, which has the following methods: + 1. `begin_blob_hook `Invoked at the beginning of the `apply_blob` function, before the blob is deserialized into a group of transactions. This is a good time to ensure that the sequencer is properly bonded. 2. `end_blob_hook` invoked at the end of the `apply_blob` function. This is a good place to reward sequencers. To use the `AppTemplate`, the runtime needs to provide implementation of these hooks which specifies what needs to happen at each of these four stages. -In this demo, we only rely on two modules which need access to the hooks - `sov-accounts` and `sequencer-registry`. +In this demo, we only rely on two modules which need access to the hooks - `sov-accounts` and `sequencer-registry`. The `sov-accounts` module implements `TxHooks` because it needs to check and increment the sender nonce for every transaction. The `sequencer-registry` implements `ApplyBlobHooks` since it is responsible for managing the sequencer bond. @@ -152,15 +160,14 @@ complete State Transition Function! Your modules implement rpc methods via the `rpc_gen` macro, in order to enable the full-node to expose them, annotate the `Runtime` with `expose_rpc`. In the example above, you can see how to use the `expose_rpc` macro on the `native` `Runtime`. - -## Make Full Node Itegrations Simpler with the State Transition Runner: +## Make Full Node Integrations Simpler with the State Transition Runner: Now that we have an app, we want to be able to run it. For any custom state transition, your full node implementation is going to need a little customization. At the very least, you'll have to modify our `demo-rollup` example code to import your custom STF! But, when you're building an STF it's useful to stick as closely as possible to some standard interfaces. That way, you can minimize the changeset for your custom node implementation, which reduces the risk of bugs. -To help you integrate with full node implementations, we provide standard tools for intitializing an app (`StateTransitionRunner`). In this section, we'll briefly show how to use them. Again it is not strictly +To help you integrate with full node implementations, we provide standard tools for initializing an app (`StateTransitionRunner`). In this section, we'll briefly show how to use them. Again it is not strictly required - just by implementing STF, you get the capability to integrate with DA layers and ZKVMs. But, using these structures makes you more compatible with full node implementations out of the box. @@ -173,26 +180,8 @@ The State Transition Runner struct contains logic related to initialization and 3. `start_rpc_server` - which exposes an RPC server. -```rust -let mut app: App = - App::new(rollup_config.runner.storage.clone()); -... -let mut runner = StateTransitionRunner::new( - rollup_config, - da_service, - ledger_db, - app.stf, - storage.is_empty(), - genesis_config, -)?; - -runner.start_rpc_server(methods).await; -runner.run().await?; - -``` - ## Wrapping Up Whew, that was a lot of information. To recap, implementing your own state transition function is as simple as plugging a Runtime, a Transaction Verifier, and some Transaction Hooks into the pre-built app template. Once you've done that, -you can integrate with any DA layer and ZKVM to create a Sovereign Rollup. \ No newline at end of file +you can integrate with any DA layer and ZKVM to create a Sovereign Rollup. diff --git a/examples/demo-stf/src/app.rs b/examples/demo-stf/src/app.rs index 84efd30f3..c326f2a9f 100644 --- a/examples/demo-stf/src/app.rs +++ b/examples/demo-stf/src/app.rs @@ -7,7 +7,7 @@ pub use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::Spec; use sov_modules_stf_template::AppTemplate; pub use sov_modules_stf_template::Batch; -use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::da::DaSpec; use sov_rollup_interface::zk::Zkvm; #[cfg(feature = "native")] use sov_state::ProverStorage; @@ -20,13 +20,13 @@ use sov_stf_runner::StorageConfig; use crate::runtime::Runtime; #[cfg(feature = "native")] -pub struct App { - pub stf: AppTemplate, Vm, B>, - pub batch_builder: Option, DefaultContext>>, +pub struct App { + pub stf: AppTemplate>, + pub batch_builder: Option, DefaultContext>>, } #[cfg(feature = "native")] -impl App { +impl App { pub fn new(storage_config: StorageConfig) -> Self { let storage = ProverStorage::with_config(storage_config).expect("Failed to open prover storage"); @@ -49,9 +49,9 @@ impl App { } } -pub fn create_zk_app_template( +pub fn create_zk_app_template( runtime_config: [u8; 32], -) -> AppTemplate, Vm, B> { +) -> AppTemplate> { let storage = ZkStorage::with_config(runtime_config).expect("Failed to open zk storage"); AppTemplate::new(storage, Runtime::default()) } diff --git a/examples/demo-stf/src/cli.rs b/examples/demo-stf/src/cli.rs new file mode 100644 index 000000000..86523900f --- /dev/null +++ b/examples/demo-stf/src/cli.rs @@ -0,0 +1,61 @@ +use sov_cli::wallet_state::WalletState; +use sov_cli::workflows::keys::KeyWorkflow; +use sov_cli::workflows::rpc::RpcWorkflows; +use sov_cli::workflows::transactions::TransactionWorkflow; +use sov_cli::{clap, wallet_dir}; +use sov_modules_api::clap::Parser; +use sov_modules_api::cli::{FileNameArg, JsonStringArg}; +use sov_modules_api::default_context::DefaultContext; +use sov_rollup_interface::da::DaSpec; + +use crate::runtime::{Runtime, RuntimeCall, RuntimeSubcommand}; + +type Ctx = DefaultContext; + +#[derive(clap::Subcommand)] +#[command(author, version, about, long_about = None)] +pub enum Workflows { + #[clap(subcommand)] + Transactions( + TransactionWorkflow< + RuntimeSubcommand, + RuntimeSubcommand, + >, + ), + #[clap(subcommand)] + Keys(KeyWorkflow), + #[clap(subcommand)] + Rpc(RpcWorkflows), +} + +#[derive(clap::Parser)] +#[command(author, version, about, long_about = None)] +pub struct App { + #[clap(subcommand)] + workflow: Workflows, +} + +pub async fn run( +) -> Result<(), anyhow::Error> { + let app_dir = wallet_dir()?; + std::fs::create_dir_all(app_dir.as_ref())?; + let wallet_state_path = app_dir.as_ref().join("wallet_state.json"); + + let mut wallet_state: WalletState, Ctx> = + WalletState::load(&wallet_state_path)?; + + let invocation = App::::parse(); + + match invocation.workflow { + Workflows::Transactions(tx) => tx + .run::, DefaultContext, JsonStringArg, _, _, _>( + &mut wallet_state, + app_dir, + )?, + Workflows::Keys(inner) => inner.run(&mut wallet_state, app_dir)?, + Workflows::Rpc(inner) => { + inner.run(&mut wallet_state, app_dir).await?; + } + } + wallet_state.save(wallet_state_path) +} diff --git a/examples/demo-stf/src/genesis_config.rs b/examples/demo-stf/src/genesis_config.rs index 0284374b2..3b87f4325 100644 --- a/examples/demo-stf/src/genesis_config.rs +++ b/examples/demo-stf/src/genesis_config.rs @@ -1,10 +1,11 @@ -use sov_election::ElectionConfig; +use sov_chain_state::ChainStateConfig; #[cfg(feature = "experimental")] use sov_evm::{AccountData, EvmConfig, SpecId}; pub use sov_modules_api::default_context::DefaultContext; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; -use sov_modules_api::test_utils::generate_address; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Context, PrivateKey, PublicKey}; +use sov_rollup_interface::da::DaSpec; pub use sov_state::config::Config as StorageConfig; use sov_value_setter::ValueSetterConfig; @@ -16,13 +17,12 @@ pub const LOCKED_AMOUNT: u64 = 50; pub const DEMO_SEQ_PUB_KEY_STR: &str = "seq_pub_key"; pub const DEMO_TOKEN_NAME: &str = "sov-demo-token"; -pub fn create_demo_genesis_config( +pub fn create_demo_genesis_config( initial_sequencer_balance: u64, sequencer_address: C::Address, sequencer_da_address: Vec, value_setter_admin_private_key: &DefaultPrivateKey, - election_admin_private_key: &DefaultPrivateKey, -) -> GenesisConfig { +) -> GenesisConfig { let token_config: sov_bank::TokenConfig = sov_bank::TokenConfig { token_name: DEMO_TOKEN_NAME.to_owned(), address_and_balances: vec![(sequencer_address.clone(), initial_sequencer_balance)], @@ -46,27 +46,29 @@ pub fn create_demo_genesis_config( amount: LOCKED_AMOUNT, token_address, }, - preferred_sequencer: None, + is_preferred_sequencer: true, }; let value_setter_config = ValueSetterConfig { admin: value_setter_admin_private_key.pub_key().to_address(), }; - let election_config = ElectionConfig { - admin: election_admin_private_key.pub_key().to_address(), - }; - #[cfg(feature = "experimental")] let genesis_evm_address = hex::decode("f39Fd6e51aad88F6F4ce6aB8827279cffFb92266") .unwrap() .try_into() .expect("EVM module initialized with invalid address"); + let chain_state_config = ChainStateConfig { + // TODO: Put actual value + initial_slot_height: 0, + }; + GenesisConfig::new( bank_config, sequencer_registry_config, - election_config, + (), + chain_state_config, value_setter_config, sov_accounts::AccountConfig { pub_keys: vec![] }, #[cfg(feature = "experimental")] @@ -85,16 +87,14 @@ pub fn create_demo_genesis_config( ) } -pub fn create_demo_config( +pub fn create_demo_config( initial_sequencer_balance: u64, value_setter_admin_private_key: &DefaultPrivateKey, - election_admin_private_key: &DefaultPrivateKey, -) -> GenesisConfig { - create_demo_genesis_config::( +) -> GenesisConfig { + create_demo_genesis_config::( initial_sequencer_balance, generate_address::(DEMO_SEQ_PUB_KEY_STR), DEMO_SEQUENCER_DA_ADDRESS.to_vec(), value_setter_admin_private_key, - election_admin_private_key, ) } diff --git a/examples/demo-stf/src/hooks_impl.rs b/examples/demo-stf/src/hooks_impl.rs index 2b9fe518c..8db6162d3 100644 --- a/examples/demo-stf/src/hooks_impl.rs +++ b/examples/demo-stf/src/hooks_impl.rs @@ -2,13 +2,14 @@ use sov_modules_api::hooks::{ApplyBlobHooks, TxHooks}; use sov_modules_api::transaction::Transaction; use sov_modules_api::{Context, Spec}; use sov_modules_stf_template::SequencerOutcome; -use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::da::{BlobReaderTrait, DaSpec}; +use sov_sequencer_registry::SequencerRegistry; use sov_state::WorkingSet; use tracing::info; use crate::runtime::Runtime; -impl TxHooks for Runtime { +impl TxHooks for Runtime { type Context = C; fn pre_dispatch_tx_hook( @@ -28,13 +29,14 @@ impl TxHooks for Runtime { } } -impl ApplyBlobHooks for Runtime { +impl ApplyBlobHooks for Runtime { type Context = C; - type BlobResult = SequencerOutcome; + type BlobResult = + SequencerOutcome<<::BlobTransaction as BlobReaderTrait>::Address>; fn begin_blob_hook( &self, - blob: &mut impl BlobReaderTrait, + blob: &mut Da::BlobTransaction, working_set: &mut WorkingSet<::Storage>, ) -> anyhow::Result<()> { self.sequencer_registry.begin_blob_hook(blob, working_set) @@ -48,7 +50,8 @@ impl ApplyBlobHooks for Runtime { match result { SequencerOutcome::Rewarded(_reward) => { // TODO: Process reward here or above. - self.sequencer_registry.end_blob_hook( + as ApplyBlobHooks>::end_blob_hook( + &self.sequencer_registry, sov_sequencer_registry::SequencerOutcome::Completed, working_set, ) @@ -58,10 +61,11 @@ impl ApplyBlobHooks for Runtime { reason, sequencer_da_address, } => { - info!("Sequencer slashed: {:?}", reason); - self.sequencer_registry.end_blob_hook( + info!("Sequencer {} slashed: {:?}", sequencer_da_address, reason); + as ApplyBlobHooks>::end_blob_hook( + &self.sequencer_registry, sov_sequencer_registry::SequencerOutcome::Slashed { - sequencer: sequencer_da_address, + sequencer: sequencer_da_address.as_ref().to_vec(), }, working_set, ) diff --git a/examples/demo-stf/src/lib.rs b/examples/demo-stf/src/lib.rs index 24b2cf573..cc9131986 100644 --- a/examples/demo-stf/src/lib.rs +++ b/examples/demo-stf/src/lib.rs @@ -7,4 +7,7 @@ pub mod runtime; #[cfg(test)] pub mod tests; +#[cfg(feature = "native")] +pub mod cli; + pub use sov_state::ArrayWitness; diff --git a/examples/demo-stf/src/runtime.rs b/examples/demo-stf/src/runtime.rs index f81d1465d..c3129df16 100644 --- a/examples/demo-stf/src/runtime.rs +++ b/examples/demo-stf/src/runtime.rs @@ -1,29 +1,43 @@ #[cfg(feature = "native")] -use sov_accounts::{AccountsRpcImpl, AccountsRpcServer}; +use sov_accounts::query::{AccountsRpcImpl, AccountsRpcServer}; #[cfg(feature = "native")] -use sov_bank::{BankRpcImpl, BankRpcServer}; +use sov_bank::query::{BankRpcImpl, BankRpcServer}; #[cfg(feature = "native")] -use sov_election::{ElectionRpcImpl, ElectionRpcServer}; +use sov_blob_storage::{BlobStorageRpcImpl, BlobStorageRpcServer}; +#[cfg(feature = "native")] +use sov_chain_state::{ChainStateRpcImpl, ChainStateRpcServer}; #[cfg(feature = "native")] #[cfg(feature = "experimental")] use sov_evm::query::{EvmRpcImpl, EvmRpcServer}; +use sov_modules_api::capabilities::{BlobRefOrOwned, BlobSelector}; #[cfg(feature = "native")] pub use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::hooks::SlotHooks; use sov_modules_api::macros::DefaultRuntime; #[cfg(feature = "native")] use sov_modules_api::macros::{expose_rpc, CliWallet}; -use sov_modules_api::{Context, DispatchCall, Genesis, MessageCodec}; +use sov_modules_api::{Context, DispatchCall, Genesis, MessageCodec, Spec}; +use sov_rollup_interface::da::DaSpec; #[cfg(feature = "native")] use sov_sequencer_registry::{SequencerRegistryRpcImpl, SequencerRegistryRpcServer}; #[cfg(feature = "native")] -use sov_value_setter::{ValueSetterRpcImpl, ValueSetterRpcServer}; +use sov_value_setter::query::{ValueSetterRpcImpl, ValueSetterRpcServer}; + +#[cfg(feature = "native")] +pub mod query { + pub use sov_accounts::query as accounts; + pub use sov_bank::query as bank; + pub use sov_chain_state::query as chain_state; + pub use sov_sequencer_registry::query as sequencer_registry; + pub use sov_value_setter::query as value_setter; +} /// The Rollup entrypoint. /// /// On a high level, the rollup node receives serialized call messages from the DA layer and executes them as atomic transactions. /// Upon reception, the message has to be deserialized and forwarded to an appropriate module. /// -/// The module specific logic is implemented by module creators, but all the glue code responsible for message +/// The module-specific logic is implemented by module creators, but all the glue code responsible for message /// deserialization/forwarding is handled by a rollup `runtime`. /// /// In order to define the runtime we need to specify all the modules supported by our rollup (see the `Runtime` struct bellow) @@ -54,37 +68,89 @@ use sov_value_setter::{ValueSetterRpcImpl, ValueSetterRpcServer}; /// instead of going through the DA layer. #[cfg(not(feature = "experimental"))] -#[cfg_attr(feature = "native", derive(CliWallet), expose_rpc(DefaultContext))] +#[cfg_attr(feature = "native", derive(CliWallet), expose_rpc)] #[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] #[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] #[cfg_attr( feature = "native", serialization(serde::Serialize, serde::Deserialize) )] -pub struct Runtime { +pub struct Runtime { pub bank: sov_bank::Bank, pub sequencer_registry: sov_sequencer_registry::SequencerRegistry, - pub election: sov_election::Election, + #[cfg_attr(feature = "native", cli_skip)] + pub blob_storage: sov_blob_storage::BlobStorage, + #[cfg_attr(feature = "native", cli_skip)] + pub chain_state: sov_chain_state::ChainState, pub value_setter: sov_value_setter::ValueSetter, pub accounts: sov_accounts::Accounts, } #[cfg(feature = "experimental")] -#[cfg_attr(feature = "native", derive(CliWallet), expose_rpc(DefaultContext))] +#[cfg_attr(feature = "native", derive(CliWallet), expose_rpc)] #[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] #[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] #[cfg_attr( feature = "native", serialization(serde::Serialize, serde::Deserialize) )] -pub struct Runtime { +pub struct Runtime { pub bank: sov_bank::Bank, pub sequencer_registry: sov_sequencer_registry::SequencerRegistry, - pub election: sov_election::Election, + #[cfg_attr(feature = "native", cli_skip)] + pub blob_storage: sov_blob_storage::BlobStorage, + #[cfg_attr(feature = "native", cli_skip)] + pub chain_state: sov_chain_state::ChainState, pub value_setter: sov_value_setter::ValueSetter, pub accounts: sov_accounts::Accounts, #[cfg_attr(feature = "native", cli_skip)] pub evm: sov_evm::Evm, } -impl sov_modules_stf_template::Runtime for Runtime {} +impl SlotHooks for Runtime { + type Context = C; + + fn begin_slot_hook( + &self, + _slot_data: &impl sov_rollup_interface::services::da::SlotData, + _working_set: &mut sov_state::WorkingSet<::Storage>, + ) { + } + + fn end_slot_hook( + &self, + #[allow(unused_variables)] root_hash: [u8; 32], + #[allow(unused_variables)] working_set: &mut sov_state::WorkingSet< + ::Storage, + >, + ) { + #[cfg(feature = "experimental")] + self.evm.end_slot_hook(root_hash, working_set); + } +} + +impl sov_modules_stf_template::Runtime for Runtime +where + C: Context, + Da: DaSpec, +{ +} + +impl BlobSelector for Runtime { + type Context = C; + + fn get_blobs_for_this_slot<'a, I>( + &self, + current_blobs: I, + working_set: &mut sov_state::WorkingSet<::Storage>, + ) -> anyhow::Result>> + where + I: IntoIterator, + { + as BlobSelector>::get_blobs_for_this_slot( + &self.blob_storage, + current_blobs, + working_set, + ) + } +} diff --git a/examples/demo-stf/src/sov-cli/main.rs b/examples/demo-stf/src/sov-cli/main.rs deleted file mode 100644 index e6d3a07fa..000000000 --- a/examples/demo-stf/src/sov-cli/main.rs +++ /dev/null @@ -1,12 +0,0 @@ -#[cfg(feature = "native")] -mod native; - -#[cfg(feature = "native")] -fn main() -> Result<(), anyhow::Error> { - native::main() -} - -#[cfg(not(feature = "native"))] -fn main() -> Result<(), anyhow::Error> { - Err(anyhow::format_err!("CLI support is only available when the app is compiled with the 'native' flag. You can recompile with 'cargo build --features=native' to use the CLI")) -} diff --git a/examples/demo-stf/src/sov-cli/native.rs b/examples/demo-stf/src/sov-cli/native.rs deleted file mode 100644 index 5f27c459e..000000000 --- a/examples/demo-stf/src/sov-cli/native.rs +++ /dev/null @@ -1,549 +0,0 @@ -use std::fs::File; -use std::io::{Read, Write}; -use std::path::{Path, PathBuf}; -use std::{fs, vec}; - -use anyhow::Context; -use borsh::BorshSerialize; -use clap::Parser; -use const_rollup_config::ROLLUP_NAMESPACE_RAW; -use demo_stf::runtime::{borsh_encode_cli_tx, parse_call_message_json, CliTransactionParser}; -use jsonrpsee::core::client::ClientT; -use jsonrpsee::http_client::HttpClientBuilder; -use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::default_signature::private_key::DefaultPrivateKey; -use sov_modules_api::transaction::Transaction; -use sov_modules_api::{AddressBech32, PrivateKey, PublicKey, Spec}; -use sov_modules_stf_template::RawTx; -use sov_sequencer::SubmitTransaction; - -type C = DefaultContext; -type Address = ::Address; - -/// Main entry point for CLI -#[derive(Parser)] -#[clap(version = "1.0", author = "Sovereign")] -struct Cli { - #[clap(subcommand)] - /// Commands to perform operations - command: Commands, -} - -/// Main commands -#[derive(Parser)] -enum Commands { - /// Serialize a call to a module. - /// This creates a .dat file containing the serialized transaction - GenerateTransactionFromJson { - /// Path to the json file containing the private key of the sender - sender_priv_key_path: String, - /// Name of the module to generate the call. - /// Modules defined in your Runtime are supported. - /// (eg: Bank, Accounts) - module_name: String, - /// Path to the json file containing the parameters for a module call - call_data_path: String, - /// Nonce for the transaction - nonce: u64, - }, - /// Submits transaction to sequencer - SubmitTransaction { - /// Path to the json file containing the private key of the sender - sender_priv_key_path: String, - /// Name of the module to generate the call. - /// Modules defined in your Runtime are supported. - /// (eg: Bank, Accounts) - module_name: String, - /// Path to the json file containing the parameters for a module call - call_data_path: String, - /// Nonce for the transaction - nonce: u64, - /// RPC endpoint with sequencer RPC - rpc_endpoint: String, - }, - /// Tells Sequencer to publish batch - PublishBatch { - /// RPC endpoint with sequencer RPC - rpc_endpoint: String, - }, - /// Combine a list of files generated by GenerateTransaction into a blob for submission to Celestia - MakeBatch { - /// List of files containing serialized transactions - path_list: Vec, - }, - /// Utility commands - Util(UtilArgs), - /// Generate a transaction from the command line - #[clap(subcommand)] - GenerateTransaction(CliTransactionParser), -} - -#[derive(Parser)] -struct UtilArgs { - #[clap(subcommand)] - /// Commands under utilities - command: UtilCommands, -} - -/// List of utility commands -#[derive(Parser)] -enum UtilCommands { - /// Compute the address of a derived token. This follows a deterministic algorithm - DeriveTokenAddress { - /// Name of the token - token_name: String, - /// Address of the sender (can be obtained using the show-public-key subcommand) - sender_address: String, - /// A random number chosen by the token deployer - salt: u64, - }, - /// Display the public key associated with a private key - ShowPublicKey { - /// Path to the json file containing the private key - private_key_path: String, - }, - /// Create a new private key - CreatePrivateKey { - /// Folder to store the new private key json file. The filename is auto-generated - priv_key_path: String, - }, - PrintNamespace, -} - -struct SerializedTx { - raw: RawTx, - #[allow(dead_code)] - sender: Address, -} - -#[derive(serde::Serialize, serde::Deserialize, Debug)] -struct PrivKeyAndAddress { - hex_priv_key: String, - address: Address, -} - -impl PrivKeyAndAddress { - fn generate() -> Self { - let priv_key = DefaultPrivateKey::generate(); - let address = priv_key.pub_key().to_address(); - Self { - hex_priv_key: priv_key.as_hex(), - address, - } - } - - fn generate_and_save_to_file(priv_key_path: &Path) -> anyhow::Result<()> { - let priv_key = Self::generate(); - let data = serde_json::to_string(&priv_key)?; - fs::create_dir_all(priv_key_path)?; - let path = Path::new(priv_key_path).join(format!("{}.json", priv_key.address)); - fs::write(&path, data)?; - println!( - "private key written to path: {}", - path.into_os_string().into_string().unwrap() - ); - Ok(()) - } -} - -impl SerializedTx { - fn new>( - sender_priv_key_path: P, - module_name: &str, - call_data_path: P, - nonce: u64, - ) -> anyhow::Result { - let sender_priv_key = Self::deserialize_priv_key(sender_priv_key_path)?; - let sender_address = sender_priv_key.pub_key().to_address(); - let message = Self::serialize_call_message(module_name, call_data_path)?; - - let tx = Transaction::::new_signed_tx(&sender_priv_key, message, nonce); - - Ok(SerializedTx { - raw: RawTx { - data: tx.try_to_vec()?, - }, - sender: sender_address, - }) - } - - fn deserialize_priv_key>( - sender_priv_key_path: P, - ) -> anyhow::Result { - let priv_key_data = std::fs::read_to_string(&sender_priv_key_path).with_context(|| { - format!( - "Failed to read private key from {:?}", - sender_priv_key_path.as_ref() - ) - })?; - - let sender_priv_key_data = serde_json::from_str::(&priv_key_data)?; - - Ok(DefaultPrivateKey::from_hex( - &sender_priv_key_data.hex_priv_key, - )?) - } - - fn serialize_call_message>( - module_name: &str, - call_data_path: P, - ) -> anyhow::Result> { - let call_data = std::fs::read_to_string(&call_data_path).with_context(|| { - format!( - "Failed to read call data from {:?}", - call_data_path.as_ref() - ) - })?; - parse_call_message_json::(module_name, &call_data) - } -} - -fn serialize_call(command: &Commands) -> Result { - if let Commands::GenerateTransactionFromJson { - sender_priv_key_path, - module_name, - call_data_path, - nonce, - } = command - { - let serialized = - SerializedTx::new(&sender_priv_key_path, module_name, &call_data_path, *nonce) - .context("Call message serialization error")?; - - Ok(hex::encode(serialized.raw.data)) - } else { - Ok(Default::default()) - } -} - -fn make_hex_blob(txs: impl Iterator) -> Result { - // decode the hex string to bytes - let mut batch = vec![]; - for tx in txs { - let bytes = hex::decode(tx.as_bytes()) - .with_context(|| format!("Unable to decode {} as hex", tx))?; - batch.push(bytes); - } - Ok(hex::encode( - batch - .try_to_vec() - .expect("Serializing to a vector is infallible."), - )) -} - -#[tokio::main] -pub async fn main() -> Result<(), anyhow::Error> { - let cli = Cli::parse(); - - match cli.command { - Commands::GenerateTransactionFromJson { - ref call_data_path, .. - } => { - let raw_contents = serialize_call(&cli.command)?; - let mut bin_path = PathBuf::from(call_data_path); - bin_path.set_extension("dat"); - - let mut file = File::create(&bin_path) - .with_context(|| format!("Unable to create file {}", bin_path.display()))?; - file.write_all(raw_contents.as_bytes()) - .with_context(|| format!("Unable to save file {}", bin_path.display()))?; - } - Commands::SubmitTransaction { - sender_priv_key_path, - module_name, - call_data_path, - nonce, - rpc_endpoint, - } => { - let serialized = - SerializedTx::new(&sender_priv_key_path, &module_name, &call_data_path, nonce) - .context("Unable to serialize call transaction")?; - - let request = SubmitTransaction::new(serialized.raw.data); - let client = HttpClientBuilder::default().build(rpc_endpoint).unwrap(); - let response: String = client - .request("sequencer_acceptTx", [request]) - .await - .context("Unable to submit transaction")?; - - println!( - "Your transaction was submitted to the sequencer. Response: {}", - response - ); - } - Commands::PublishBatch { rpc_endpoint } => { - let client = HttpClientBuilder::default().build(rpc_endpoint).unwrap(); - - let response: String = client - .request("sequencer_publishBatch", [1u32]) - .await - .context("Unable to publish batch")?; - - // Print the result - println!( - "Your batch was submitted to the sequencer for publication. Response: {:?}", - response - ); - } - Commands::MakeBatch { path_list } => { - let mut hex_encoded_txs = vec![]; - for path in path_list { - let mut file = - File::open(&path).with_context(|| format!("Unable to create file {}", path))?; - let mut hex_string = String::new(); - file.read_to_string(&mut hex_string) - .context("Unable to read the file")?; - hex_encoded_txs.push(hex_string); - } - - let blob = make_hex_blob(hex_encoded_txs.into_iter())?; - println!("{}", blob) - } - Commands::Util(util_args) => match util_args.command { - UtilCommands::DeriveTokenAddress { - token_name, - sender_address, - salt, - } => { - let sender_address = Address::from( - AddressBech32::try_from(sender_address.clone()).with_context(|| { - format!( - "Could not parse {} as a valid bech32 address", - sender_address, - ) - })?, - ); - let token_address = - sov_bank::get_token_address::(&token_name, sender_address.as_ref(), salt); - println!("{}", token_address); - } - - UtilCommands::ShowPublicKey { private_key_path } => { - let sender_priv_key = SerializedTx::deserialize_priv_key(private_key_path) - .context("Failed to get private key from file")?; - let sender_address: Address = sender_priv_key.pub_key().to_address(); - println!("{}", sender_address); - } - - UtilCommands::CreatePrivateKey { priv_key_path } => { - PrivKeyAndAddress::generate_and_save_to_file(priv_key_path.as_ref()) - .context("Could not create private key")?; - } - UtilCommands::PrintNamespace => { - println!("{}", hex::encode(ROLLUP_NAMESPACE_RAW)); - } - }, - Commands::GenerateTransaction(cli_tx) => { - println!("{}", hex::encode(borsh_encode_cli_tx(cli_tx))); - } - } - - Ok(()) -} - -#[cfg(test)] -mod test { - use borsh::BorshDeserialize; - use demo_stf::app::App; - use demo_stf::genesis_config::{create_demo_config, DEMO_SEQUENCER_DA_ADDRESS, LOCKED_AMOUNT}; - use demo_stf::runtime::{GenesisConfig, Runtime}; - use sov_modules_api::Address; - use sov_modules_stf_template::{AppTemplate, Batch, RawTx, SequencerOutcome}; - use sov_rollup_interface::mocks::MockZkvm; - use sov_rollup_interface::stf::StateTransitionFunction; - use sov_state::WorkingSet; - use sov_stf_runner::Config; - - use super::*; - - type TestBlob = sov_rollup_interface::mocks::TestBlob
; - - fn new_test_blob(batch: Batch, address: &[u8]) -> TestBlob { - let address = Address::try_from(address).unwrap(); - let data = batch.try_to_vec().unwrap(); - TestBlob::new(data, address, [0; 32]) - } - - #[test] - fn test_sov_cli() { - // Tempdir is created here, so it will be deleted only after test is finished. - let tempdir = tempfile::tempdir().unwrap(); - let mut test_demo = TestDemo::with_path(tempdir.path().to_path_buf()); - let test_data = read_test_data(); - - execute_txs(&mut test_demo.demo, test_demo.config, test_data.data); - - // get minter balance - let balance = get_balance( - &mut test_demo.demo, - &test_data.token_deployer_address, - test_data.minter_address, - ); - - // The minted amount was 1000 and we transferred 200 and burned 300. - assert_eq!(balance, Some(500)) - } - - #[test] - fn test_create_token() { - let tempdir = tempfile::tempdir().unwrap(); - let mut test_demo = TestDemo::with_path(tempdir.path().to_path_buf()); - let test_tx = serialize_call(&Commands::GenerateTransactionFromJson { - sender_priv_key_path: make_test_path("keys/token_deployer_private_key.json") - .to_str() - .unwrap() - .into(), - module_name: "Bank".into(), - call_data_path: make_test_path("requests/create_token.json") - .to_str() - .unwrap() - .into(), - nonce: 0, - }) - .unwrap(); - - let mut test_data = read_test_data(); - test_data.data.pop(); - test_data.data.pop(); - - let batch = Batch { - txs: test_data.data.clone(), - }; - - println!("batch: {}", hex::encode(batch.try_to_vec().unwrap())); - - let blob = make_hex_blob(vec![test_tx].into_iter()).unwrap(); - println!("generated: {}", &blob); - - let blob = hex::decode(blob.as_bytes()).unwrap(); - - let batch = Batch::deserialize(&mut &blob[..]).expect("must be valid blob"); - execute_txs(&mut test_demo.demo, test_demo.config, batch.txs); - } - - // Test helpers - struct TestDemo { - config: GenesisConfig, - demo: AppTemplate, MockZkvm, TestBlob>, - } - - impl TestDemo { - fn with_path(path: PathBuf) -> Self { - let value_setter_admin_private_key = DefaultPrivateKey::generate(); - let election_admin_private_key = DefaultPrivateKey::generate(); - - let genesis_config = create_demo_config( - LOCKED_AMOUNT + 1, - &value_setter_admin_private_key, - &election_admin_private_key, - ); - - let runner_config = Config { - storage: sov_state::config::Config { path }, - }; - - Self { - config: genesis_config, - demo: App::::new(runner_config.storage).stf, - } - } - } - - struct TestData { - token_deployer_address: Address, - minter_address: Address, - data: Vec, - } - - fn make_test_path>(path: P) -> PathBuf { - let mut sender_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - sender_path.push(".."); - sender_path.push("test-data"); - - sender_path.push(path); - - sender_path - } - - fn read_test_data() -> TestData { - let create_token = SerializedTx::new( - make_test_path("keys/token_deployer_private_key.json"), - "Bank", - make_test_path("requests/create_token.json"), - 0, - ) - .unwrap(); - - let transfer = SerializedTx::new( - make_test_path("keys/minter_private_key.json"), - "Bank", - make_test_path("requests/transfer.json"), - 0, - ) - .unwrap(); - - let burn = SerializedTx::new( - make_test_path("keys/minter_private_key.json"), - "Bank", - make_test_path("requests/burn.json"), - 1, - ) - .unwrap(); - - let data = vec![create_token.raw, transfer.raw, burn.raw]; - - TestData { - token_deployer_address: create_token.sender, - minter_address: transfer.sender, - data, - } - } - - fn execute_txs( - demo: &mut AppTemplate, MockZkvm, TestBlob>, - config: GenesisConfig, - txs: Vec, - ) { - StateTransitionFunction::::init_chain(demo, config); - - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); - let mut blobs = [blob]; - - let apply_block_result = StateTransitionFunction::::apply_slot( - demo, - Default::default(), - &mut blobs, - ); - - assert_eq!(1, apply_block_result.batch_receipts.len()); - let apply_blob_outcome = apply_block_result.batch_receipts[0].clone(); - - assert_eq!( - SequencerOutcome::Rewarded(0), - apply_blob_outcome.inner, - "Sequencer execution should have succeeded but failed", - ); - } - - fn get_balance( - demo: &mut AppTemplate, MockZkvm, TestBlob>, - token_deployer_address: &Address, - user_address: Address, - ) -> Option { - let token_address = create_token_address(token_deployer_address); - - let mut working_set = WorkingSet::new(demo.current_storage.clone()); - - let balance = demo - .runtime - .bank - .balance_of(user_address, token_address, &mut working_set) - .unwrap(); - - balance.amount - } - - fn create_token_address(token_deployer_address: &Address) -> Address { - sov_bank::get_token_address::("sov-test-token", token_deployer_address.as_ref(), 11) - } -} diff --git a/examples/demo-stf/src/tests/da_simulation.rs b/examples/demo-stf/src/tests/da_simulation.rs new file mode 100644 index 000000000..4d9478cbf --- /dev/null +++ b/examples/demo-stf/src/tests/da_simulation.rs @@ -0,0 +1,55 @@ +use std::rc::Rc; + +use sov_data_generators::bank_data::{ + BadNonceBankCallMessages, BadSerializationBankCallMessages, BadSignatureBankCallMessages, + BankMessageGenerator, +}; +use sov_data_generators::value_setter_data::{ValueSetterMessage, ValueSetterMessages}; +use sov_data_generators::MessageGenerator; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::default_signature::private_key::DefaultPrivateKey; +use sov_modules_stf_template::RawTx; +use sov_rollup_interface::mocks::MockDaSpec; + +use crate::runtime::Runtime; + +type C = DefaultContext; +type Da = MockDaSpec; + +pub fn simulate_da(value_setter_admin: DefaultPrivateKey) -> Vec { + let mut messages = Vec::default(); + + let bank_generator = BankMessageGenerator::::default(); + let bank_txs = bank_generator.create_raw_txs::>(); + + let value_setter = ValueSetterMessages::new(vec![ValueSetterMessage { + admin: Rc::new(value_setter_admin), + messages: vec![99, 33], + }]); + messages.extend(value_setter.create_raw_txs::>()); + messages.extend(bank_txs); + messages +} + +pub fn simulate_da_with_revert_msg() -> Vec { + let mut messages = Vec::default(); + let bank_generator = BankMessageGenerator::::create_invalid_transfer(); + let bank_txns = bank_generator.create_raw_txs::>(); + messages.extend(bank_txns); + messages +} + +pub fn simulate_da_with_bad_sig() -> Vec { + let b: BadSignatureBankCallMessages = Default::default(); + b.create_raw_txs::>() +} + +pub fn simulate_da_with_bad_nonce() -> Vec { + let b: BadNonceBankCallMessages = Default::default(); + b.create_raw_txs::>() +} + +pub fn simulate_da_with_bad_serialization() -> Vec { + let b: BadSerializationBankCallMessages = Default::default(); + b.create_raw_txs::>() +} diff --git a/examples/demo-stf/src/tests/data_generation/election_data.rs b/examples/demo-stf/src/tests/data_generation/election_data.rs deleted file mode 100644 index 2eb68d9c9..000000000 --- a/examples/demo-stf/src/tests/data_generation/election_data.rs +++ /dev/null @@ -1,294 +0,0 @@ -use std::rc::Rc; - -use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::PrivateKey; - -use super::*; - -struct CallGenerator { - election_admin_nonce: u64, - election_admin: Rc, - voters: Vec>, -} - -impl CallGenerator { - fn new(election_admin: Rc) -> Self { - let voters = vec![ - Rc::new(DefaultPrivateKey::generate()), - Rc::new(DefaultPrivateKey::generate()), - Rc::new(DefaultPrivateKey::generate()), - ]; - Self { - election_admin_nonce: 0, - election_admin, - voters, - } - } - - fn inc_nonce(&mut self) { - self.election_admin_nonce += 1; - } - - fn create_voters_and_vote( - &mut self, - ) -> Vec<( - Rc, - sov_election::CallMessage, - u64, - )> { - let mut messages = Vec::default(); - - let set_candidates_message = sov_election::CallMessage::SetCandidates { - names: vec!["candidate_1".to_owned(), "candidate_2".to_owned()], - }; - - messages.push(( - self.election_admin.clone(), - set_candidates_message, - self.election_admin_nonce, - )); - self.inc_nonce(); - - for voter in self.voters.clone() { - let add_voter_message = - sov_election::CallMessage::AddVoter(voter.pub_key().to_address()); - - messages.push(( - self.election_admin.clone(), - add_voter_message, - self.election_admin_nonce, - )); - - let vote_message = sov_election::CallMessage::Vote(1); - messages.push((voter, vote_message, 0)); - self.inc_nonce(); - } - - messages - } - - fn freeze_vote( - &mut self, - ) -> Vec<( - Rc, - sov_election::CallMessage, - u64, - )> { - let mut messages = Vec::default(); - - let freeze_message = sov_election::CallMessage::FreezeElection; - messages.push(( - self.election_admin.clone(), - freeze_message, - self.election_admin_nonce, - )); - self.inc_nonce(); - - messages - } - - fn all_messages( - &mut self, - ) -> Vec<( - Rc, - sov_election::CallMessage, - u64, - )> { - let mut messages = Vec::default(); - - messages.extend(self.create_voters_and_vote()); - messages.extend(self.freeze_vote()); - messages - } -} - -pub struct ElectionCallMessages { - election_admin: Rc, -} - -impl ElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for ElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - call_generator.all_messages() - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - _is_last: bool, - ) -> Transaction { - let message = Runtime::::encode_election_call(message); - Transaction::::new_signed_tx(sender, message, nonce) - } -} - -pub struct InvalidElectionCallMessages { - election_admin: Rc, -} - -impl InvalidElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for InvalidElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - - let mut messages = Vec::default(); - - messages.extend(call_generator.create_voters_and_vote()); - - // Additional invalid message: This voter already voted. - { - let voter = call_generator.voters[0].clone(); - let vote_message = sov_election::CallMessage::Vote(1); - messages.push((voter, vote_message, 1)); - } - - messages.extend(call_generator.freeze_vote()); - messages - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - _is_last: bool, - ) -> Transaction { - let message = Runtime::::encode_election_call(message); - Transaction::::new_signed_tx(sender, message, nonce) - } -} - -pub struct BadSigElectionCallMessages { - election_admin: Rc, -} - -impl BadSigElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for BadSigElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - call_generator.all_messages() - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - is_last: bool, - ) -> Transaction { - let message = Runtime::::encode_election_call(message); - - if is_last { - let tx = Transaction::::new_signed_tx(sender, message.clone(), nonce); - Transaction::new( - DefaultPrivateKey::generate().pub_key(), - message, - tx.signature().clone(), - nonce, - ) - } else { - Transaction::::new_signed_tx(sender, message, nonce) - } - } -} - -pub struct BadNonceElectionCallMessages { - election_admin: Rc, -} - -impl BadNonceElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for BadNonceElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - call_generator.all_messages() - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - flag: bool, - ) -> Transaction { - let nonce = if flag { nonce + 1 } else { nonce }; - - let message = Runtime::::encode_election_call(message); - Transaction::::new_signed_tx(sender, message, nonce) - } -} - -pub struct BadSerializationElectionCallMessages { - election_admin: Rc, -} - -impl BadSerializationElectionCallMessages { - pub fn new(election_admin: DefaultPrivateKey) -> Self { - Self { - election_admin: Rc::new(election_admin), - } - } -} - -impl MessageGenerator for BadSerializationElectionCallMessages { - type Call = sov_election::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let call_generator = &mut CallGenerator::new(self.election_admin.clone()); - call_generator.all_messages() - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - is_last: bool, - ) -> Transaction { - let call_data = if is_last { - vec![1, 2, 3] - } else { - Runtime::::encode_election_call(message) - }; - - Transaction::::new_signed_tx(sender, call_data, nonce) - } -} diff --git a/examples/demo-stf/src/tests/data_generation/mod.rs b/examples/demo-stf/src/tests/data_generation/mod.rs deleted file mode 100644 index 8f248551a..000000000 --- a/examples/demo-stf/src/tests/data_generation/mod.rs +++ /dev/null @@ -1,80 +0,0 @@ -use std::rc::Rc; - -use borsh::BorshSerialize; -use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::default_signature::private_key::DefaultPrivateKey; -use sov_modules_api::transaction::Transaction; -use sov_modules_api::PublicKey; -use sov_modules_stf_template::RawTx; - -use crate::runtime::Runtime; - -mod election_data; -mod value_setter_data; - -pub fn simulate_da( - value_setter_admin: DefaultPrivateKey, - election_admin: DefaultPrivateKey, -) -> Vec { - let election = election_data::ElectionCallMessages::new(election_admin); - - let mut messages = Vec::default(); - messages.extend(election.create_raw_txs()); - - let value_setter = value_setter_data::ValueSetterMessages::new(value_setter_admin); - messages.extend(value_setter.create_raw_txs()); - - messages -} - -pub fn simulate_da_with_revert_msg(election_admin: DefaultPrivateKey) -> Vec { - let election = election_data::InvalidElectionCallMessages::new(election_admin); - election.create_raw_txs() -} - -pub fn simulate_da_with_bad_sig(election_admin: DefaultPrivateKey) -> Vec { - let election = election_data::BadSigElectionCallMessages::new(election_admin); - election.create_raw_txs() -} - -// TODO: Remove once we fix test with bad nonce -// https://github.com/Sovereign-Labs/sovereign-sdk/issues/235 -#[allow(unused)] -pub fn simulate_da_with_bad_nonce(election_admin: DefaultPrivateKey) -> Vec { - let election = election_data::BadNonceElectionCallMessages::new(election_admin); - election.create_raw_txs() -} - -pub fn simulate_da_with_bad_serialization(election_admin: DefaultPrivateKey) -> Vec { - let election = election_data::BadSerializationElectionCallMessages::new(election_admin); - election.create_raw_txs() -} - -trait MessageGenerator { - type Call; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)>; - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - is_last: bool, - ) -> Transaction; - - fn create_raw_txs(&self) -> Vec { - let mut messages_iter = self.create_messages().into_iter().peekable(); - let mut serialized_messages = Vec::default(); - while let Some((sender, m, nonce)) = messages_iter.next() { - let is_last = messages_iter.peek().is_none(); - - let tx = self.create_tx(&sender, m, nonce, is_last); - - serialized_messages.push(RawTx { - data: tx.try_to_vec().unwrap(), - }) - } - serialized_messages - } -} diff --git a/examples/demo-stf/src/tests/data_generation/value_setter_data.rs b/examples/demo-stf/src/tests/data_generation/value_setter_data.rs deleted file mode 100644 index a63796a50..000000000 --- a/examples/demo-stf/src/tests/data_generation/value_setter_data.rs +++ /dev/null @@ -1,52 +0,0 @@ -use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::default_signature::private_key::DefaultPrivateKey; - -use super::*; - -pub struct ValueSetterMessages { - admin: Rc, -} - -impl ValueSetterMessages { - pub fn new(admin: DefaultPrivateKey) -> Self { - Self { - admin: Rc::new(admin), - } - } -} - -impl MessageGenerator for ValueSetterMessages { - type Call = sov_value_setter::CallMessage; - - fn create_messages(&self) -> Vec<(Rc, Self::Call, u64)> { - let admin = self.admin.clone(); - let mut value_setter_admin_nonce = 0; - let mut messages = Vec::default(); - - let new_value = 99; - - let set_value_msg_1: sov_value_setter::CallMessage = - sov_value_setter::CallMessage::SetValue(new_value); - - let new_value = 33; - let set_value_msg_2 = sov_value_setter::CallMessage::SetValue(new_value); - - messages.push((admin.clone(), set_value_msg_1, value_setter_admin_nonce)); - - value_setter_admin_nonce += 1; - messages.push((admin, set_value_msg_2, value_setter_admin_nonce)); - - messages - } - - fn create_tx( - &self, - sender: &DefaultPrivateKey, - message: Self::Call, - nonce: u64, - _is_last: bool, - ) -> Transaction { - let message = Runtime::::encode_value_setter_call(message); - Transaction::::new_signed_tx(sender, message, nonce) - } -} diff --git a/examples/demo-stf/src/tests/mod.rs b/examples/demo-stf/src/tests/mod.rs index b100992bf..ef737dd00 100644 --- a/examples/demo-stf/src/tests/mod.rs +++ b/examples/demo-stf/src/tests/mod.rs @@ -1,45 +1,27 @@ use std::path::Path; -use borsh::BorshSerialize; use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::Address; -use sov_modules_stf_template::{AppTemplate, Batch, SequencerOutcome, TxEffect}; -use sov_rollup_interface::stf::BatchReceipt; +use sov_modules_stf_template::AppTemplate; +use sov_rollup_interface::mocks::MockDaSpec; use sov_state::ProverStorage; use crate::runtime::Runtime; -mod data_generation; +mod da_simulation; mod stf_tests; mod tx_revert_tests; pub(crate) type C = DefaultContext; - -pub type TestBlob = sov_rollup_interface::mocks::TestBlob
; +pub(crate) type Da = MockDaSpec; pub fn create_new_demo( path: impl AsRef, ) -> AppTemplate< DefaultContext, - Runtime, + Da, sov_rollup_interface::mocks::MockZkvm, - TestBlob, + Runtime, > { let runtime = Runtime::default(); let storage = ProverStorage::with_path(path).unwrap(); AppTemplate::new(storage, runtime) } - -pub fn has_tx_events(apply_blob_outcome: &BatchReceipt) -> bool { - let events = apply_blob_outcome - .tx_receipts - .iter() - .flat_map(|receipts| receipts.events.iter()); - - events.peekable().peek().is_some() -} - -pub fn new_test_blob(batch: Batch, address: &[u8]) -> TestBlob { - let address = Address::try_from(address).unwrap(); - let data = batch.try_to_vec().unwrap(); - TestBlob::new(data, address, [0; 32]) -} diff --git a/examples/demo-stf/src/tests/stf_tests.rs b/examples/demo-stf/src/tests/stf_tests.rs index 340cd185f..3e9f310ff 100644 --- a/examples/demo-stf/src/tests/stf_tests.rs +++ b/examples/demo-stf/src/tests/stf_tests.rs @@ -1,47 +1,46 @@ #[cfg(test)] pub mod test { + + use sov_data_generators::bank_data::get_default_token_address; + use sov_data_generators::{has_tx_events, new_test_blob_from_batch}; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; use sov_modules_api::PrivateKey; use sov_modules_stf_template::{Batch, SequencerOutcome}; - use sov_rollup_interface::mocks::MockZkvm; + use sov_rollup_interface::mocks::{MockBlock, MockDaSpec}; use sov_rollup_interface::stf::StateTransitionFunction; use sov_state::{ProverStorage, WorkingSet}; use crate::genesis_config::{create_demo_config, DEMO_SEQUENCER_DA_ADDRESS, LOCKED_AMOUNT}; use crate::runtime::Runtime; - use crate::tests::data_generation::simulate_da; - use crate::tests::{create_new_demo, has_tx_events, new_test_blob, TestBlob, C}; + use crate::tests::da_simulation::simulate_da; + use crate::tests::{create_new_demo, C}; #[test] fn test_demo_values_in_db() { let tempdir = tempfile::tempdir().unwrap(); let path = tempdir.path(); let value_setter_admin_private_key = DefaultPrivateKey::generate(); - let election_admin_private_key = DefaultPrivateKey::generate(); - let config = create_demo_config( - LOCKED_AMOUNT + 1, - &value_setter_admin_private_key, - &election_admin_private_key, - ); + let config = create_demo_config(LOCKED_AMOUNT + 1, &value_setter_admin_private_key); { let mut demo = create_new_demo(path); - StateTransitionFunction::::init_chain(&mut demo, config); + demo.init_chain(config); - let txs = simulate_da(value_setter_admin_private_key, election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let txs = simulate_da(value_setter_admin_private_key); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; - let result = StateTransitionFunction::::apply_slot( - &mut demo, - Default::default(), - &mut blobs, - ); + let data = MockBlock::default(); + let result = demo.apply_slot(Default::default(), &data, &mut blobs); assert_eq!(1, result.batch_receipts.len()); + // 2 transactions from value setter + // 2 transactions from bank + assert_eq!(4, result.batch_receipts[0].tx_receipts.len()); + let apply_blob_outcome = result.batch_receipts[0].clone(); assert_eq!( SequencerOutcome::Rewarded(0), @@ -54,22 +53,21 @@ pub mod test { // Generate a new storage instance after dumping data to the db. { - let runtime = &mut Runtime::::default(); + let runtime = &mut Runtime::::default(); let storage = ProverStorage::with_path(path).unwrap(); let mut working_set = WorkingSet::new(storage); - - let resp = runtime.election.results(&mut working_set).unwrap(); - + let resp = runtime + .bank + .supply_of(get_default_token_address(), &mut working_set) + .unwrap(); assert_eq!( resp, - sov_election::GetResultResponse::Result(Some(sov_election::Candidate { - name: "candidate_2".to_owned(), - count: 3 - })) + sov_bank::query::TotalSupplyResponse { amount: Some(1000) } ); + let resp = runtime.value_setter.query_value(&mut working_set).unwrap(); - assert_eq!(resp, sov_value_setter::Response { value: Some(33) }); + assert_eq!(resp, sov_value_setter::query::Response { value: Some(33) }); } } @@ -80,26 +78,18 @@ pub mod test { let mut demo = create_new_demo(path); let value_setter_admin_private_key = DefaultPrivateKey::generate(); - let election_admin_private_key = DefaultPrivateKey::generate(); - let config = create_demo_config( - LOCKED_AMOUNT + 1, - &value_setter_admin_private_key, - &election_admin_private_key, - ); + let config = create_demo_config(LOCKED_AMOUNT + 1, &value_setter_admin_private_key); - StateTransitionFunction::::init_chain(&mut demo, config); + demo.init_chain(config); - let txs = simulate_da(value_setter_admin_private_key, election_admin_private_key); + let txs = simulate_da(value_setter_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = MockBlock::default(); - let apply_block_result = StateTransitionFunction::::apply_slot( - &mut demo, - Default::default(), - &mut blobs, - ); + let apply_block_result = demo.apply_slot(Default::default(), &data, &mut blobs); assert_eq!(1, apply_block_result.batch_receipts.len()); let apply_blob_outcome = apply_block_result.batch_receipts[0].clone(); @@ -112,22 +102,21 @@ pub mod test { assert!(has_tx_events(&apply_blob_outcome),); - let runtime = &mut Runtime::::default(); + let runtime = &mut Runtime::::default(); let mut working_set = WorkingSet::new(demo.current_storage.clone()); - let resp = runtime.election.results(&mut working_set).unwrap(); - + let resp = runtime + .bank + .supply_of(get_default_token_address(), &mut working_set) + .unwrap(); assert_eq!( resp, - sov_election::GetResultResponse::Result(Some(sov_election::Candidate { - name: "candidate_2".to_owned(), - count: 3 - })) + sov_bank::query::TotalSupplyResponse { amount: Some(1000) } ); let resp = runtime.value_setter.query_value(&mut working_set).unwrap(); - assert_eq!(resp, sov_value_setter::Response { value: Some(33) }); + assert_eq!(resp, sov_value_setter::query::Response { value: Some(33) }); } #[test] @@ -137,27 +126,18 @@ pub mod test { let path = tempdir.path(); let value_setter_admin_private_key = DefaultPrivateKey::generate(); - let election_admin_private_key = DefaultPrivateKey::generate(); - let config = create_demo_config( - LOCKED_AMOUNT + 1, - &value_setter_admin_private_key, - &election_admin_private_key, - ); + let config = create_demo_config(LOCKED_AMOUNT + 1, &value_setter_admin_private_key); { let mut demo = create_new_demo(path); + demo.init_chain(config); - StateTransitionFunction::::init_chain(&mut demo, config); - - let txs = simulate_da(value_setter_admin_private_key, election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let txs = simulate_da(value_setter_admin_private_key); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = MockBlock::default(); - let apply_block_result = StateTransitionFunction::::apply_slot( - &mut demo, - Default::default(), - &mut blobs, - ); + let apply_block_result = demo.apply_slot(Default::default(), &data, &mut blobs); assert_eq!(1, apply_block_result.batch_receipts.len()); let apply_blob_outcome = apply_block_result.batch_receipts[0].clone(); @@ -169,22 +149,24 @@ pub mod test { ); } - // Generate a new storage instance, value are missing because we didn't call `end_slot()`; + // Generate a new storage instance, values are missing because we didn't call `end_slot()`; { - let runtime = &mut Runtime::::default(); + let runtime = &mut Runtime::::default(); let storage = ProverStorage::with_path(path).unwrap(); let mut working_set = WorkingSet::new(storage); - let resp = runtime.election.results(&mut working_set).unwrap(); - + let resp = runtime + .bank + .supply_of(get_default_token_address(), &mut working_set) + .unwrap(); assert_eq!( resp, - sov_election::GetResultResponse::Err("Election is not frozen".to_owned()) + sov_bank::query::TotalSupplyResponse { amount: Some(1000) } ); let resp = runtime.value_setter.query_value(&mut working_set).unwrap(); - assert_eq!(resp, sov_value_setter::Response { value: None }); + assert_eq!(resp, sov_value_setter::query::Response { value: None }); } } @@ -194,28 +176,20 @@ pub mod test { let path = tempdir.path(); let value_setter_admin_private_key = DefaultPrivateKey::generate(); - let election_admin_private_key = DefaultPrivateKey::generate(); - let config = create_demo_config( - LOCKED_AMOUNT + 1, - &value_setter_admin_private_key, - &election_admin_private_key, - ); + let mut config = create_demo_config(LOCKED_AMOUNT + 1, &value_setter_admin_private_key); + config.sequencer_registry.is_preferred_sequencer = false; let mut demo = create_new_demo(path); - - StateTransitionFunction::::init_chain(&mut demo, config); + demo.init_chain(config); let some_sequencer: [u8; 32] = [121; 32]; - let txs = simulate_da(value_setter_admin_private_key, election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &some_sequencer); + let txs = simulate_da(value_setter_admin_private_key); + let blob = new_test_blob_from_batch(Batch { txs }, &some_sequencer, [0; 32]); let mut blobs = [blob]; + let data = MockBlock::default(); - let apply_block_result = StateTransitionFunction::::apply_slot( - &mut demo, - Default::default(), - &mut blobs, - ); + let apply_block_result = demo.apply_slot(Default::default(), &data, &mut blobs); assert_eq!(1, apply_block_result.batch_receipts.len()); let apply_blob_outcome = apply_block_result.batch_receipts[0].clone(); diff --git a/examples/demo-stf/src/tests/tx_revert_tests.rs b/examples/demo-stf/src/tests/tx_revert_tests.rs index e46cbe5bd..ba2f4d0cd 100644 --- a/examples/demo-stf/src/tests/tx_revert_tests.rs +++ b/examples/demo-stf/src/tests/tx_revert_tests.rs @@ -1,55 +1,50 @@ -use borsh::BorshSerialize; -use const_rollup_config::SEQUENCER_DA_ADDRESS; -use sov_accounts::Response; +use sov_accounts::query::Response; +use sov_data_generators::bank_data::{get_default_private_key, get_default_token_address}; +use sov_data_generators::{has_tx_events, new_test_blob_from_batch}; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; -use sov_modules_api::transaction::Transaction; -use sov_modules_api::{PrivateKey, PublicKey}; -use sov_modules_stf_template::{Batch, RawTx, SequencerOutcome, SlashingReason}; -use sov_rollup_interface::mocks::MockZkvm; +use sov_modules_api::PrivateKey; +use sov_modules_stf_template::{Batch, SequencerOutcome, SlashingReason, TxEffect}; +use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::mocks::{MockBlock, MockDaSpec}; use sov_rollup_interface::stf::StateTransitionFunction; use sov_state::{ProverStorage, WorkingSet}; use super::create_new_demo; -use super::data_generation::{simulate_da_with_bad_sig, simulate_da_with_revert_msg}; use crate::genesis_config::{create_demo_config, DEMO_SEQUENCER_DA_ADDRESS, LOCKED_AMOUNT}; use crate::runtime::Runtime; -use crate::tests::data_generation::simulate_da_with_bad_serialization; -use crate::tests::{has_tx_events, new_test_blob, TestBlob}; +use crate::tests::da_simulation::{ + simulate_da_with_bad_nonce, simulate_da_with_bad_serialization, simulate_da_with_bad_sig, + simulate_da_with_revert_msg, +}; const SEQUENCER_BALANCE_DELTA: u64 = 1; const SEQUENCER_BALANCE: u64 = LOCKED_AMOUNT + SEQUENCER_BALANCE_DELTA; +// Assume there was proper address and we converted it to bytes already. +const SEQUENCER_DA_ADDRESS: [u8; 32] = [1; 32]; #[test] fn test_tx_revert() { let tempdir = tempfile::tempdir().unwrap(); let path = tempdir.path(); - let value_setter_admin_private_key = DefaultPrivateKey::generate(); - let election_admin_private_key = DefaultPrivateKey::generate(); + let admin_private_key = DefaultPrivateKey::generate(); - let config = create_demo_config( - SEQUENCER_BALANCE, - &value_setter_admin_private_key, - &election_admin_private_key, - ); - let sequencer_rollup_address = config.sequencer_registry.seq_rollup_address.clone(); + let config = create_demo_config(SEQUENCER_BALANCE, &admin_private_key); + let sequencer_rollup_address = config.sequencer_registry.seq_rollup_address; { let mut demo = create_new_demo(path); + // TODO: Maybe complete with actual block data + let _data = MockBlock::default(); + demo.init_chain(config); - StateTransitionFunction::::init_chain(&mut demo, config); - - let txs = simulate_da_with_revert_msg(election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let txs = simulate_da_with_revert_msg(); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = MockBlock::default(); - let apply_block_result = StateTransitionFunction::::apply_slot( - &mut demo, - Default::default(), - &mut blobs, - ); + let apply_block_result = demo.apply_slot(Default::default(), &data, &mut blobs); - // TODO: Check witness. assert_eq!(1, apply_block_result.batch_receipts.len()); let apply_blob_outcome = apply_block_result.batch_receipts[0].clone(); @@ -59,30 +54,31 @@ fn test_tx_revert() { "Unexpected outcome: Batch execution should have succeeded", ); - // Some events were observed - assert!(has_tx_events(&apply_blob_outcome), "No events were taken"); + let txn_receipts = apply_block_result.batch_receipts[0].tx_receipts.clone(); + // 3 transactions + // create 1000 tokens + // transfer 15 tokens + // transfer 5000 tokens // this should be reverted + assert_eq!(txn_receipts[0].receipt, TxEffect::Successful); + assert_eq!(txn_receipts[1].receipt, TxEffect::Successful); + assert_eq!(txn_receipts[2].receipt, TxEffect::Reverted); } // Checks { - let runtime = &mut Runtime::::default(); + let runtime = &mut Runtime::::default(); let storage = ProverStorage::with_path(path).unwrap(); let mut working_set = WorkingSet::new(storage); + let resp = runtime + .bank + .balance_of( + get_default_private_key().default_address(), + get_default_token_address(), + &mut working_set, + ) + .unwrap(); - // We sent 4 vote messages but one of them is invalid and should be reverted. - let resp = runtime.election.number_of_votes(&mut working_set).unwrap(); - - assert_eq!(resp, sov_election::GetNbOfVotesResponse::Result(3)); - - let resp = runtime.election.results(&mut working_set).unwrap(); - - assert_eq!( - resp, - sov_election::GetResultResponse::Result(Some(sov_election::Candidate { - name: "candidate_2".to_owned(), - count: 3 - })) - ); + assert_eq!(resp.amount, Some(985)); let resp = runtime .sequencer_registry @@ -94,69 +90,25 @@ fn test_tx_revert() { } #[test] -// In this test single call is invalid, which means it returned error on dispatch, -// But nonce of the account should be increased. fn test_nonce_incremented_on_revert() { let tempdir = tempfile::tempdir().unwrap(); let path = tempdir.path(); - let value_setter_admin_private_key = DefaultPrivateKey::generate(); - let election_admin_private_key = DefaultPrivateKey::generate(); - let voter = DefaultPrivateKey::generate(); - let original_nonce = 0; + let admin_private_key = DefaultPrivateKey::generate(); - let config = create_demo_config( - SEQUENCER_BALANCE, - &value_setter_admin_private_key, - &election_admin_private_key, - ); + let config = create_demo_config(SEQUENCER_BALANCE, &admin_private_key); { let mut demo = create_new_demo(path); - StateTransitionFunction::::init_chain(&mut demo, config); - - let set_candidates_message = Runtime::::encode_election_call( - sov_election::CallMessage::SetCandidates { - names: vec!["candidate_1".to_owned(), "candidate_2".to_owned()], - }, - ); + // TODO: Maybe complete with actual block data + let _data = MockBlock::default(); + demo.init_chain(config); - let set_candidates_message = Transaction::::new_signed_tx( - &election_admin_private_key, - set_candidates_message, - 0, - ); - - let add_voter_message = Runtime::::encode_election_call( - sov_election::CallMessage::AddVoter(voter.pub_key().to_address()), - ); - let add_voter_message = Transaction::::new_signed_tx( - &election_admin_private_key, - add_voter_message, - 1, - ); - - // There's only 2 candidates - let vote_message = - Runtime::::encode_election_call(sov_election::CallMessage::Vote(100)); - let vote_message = - Transaction::::new_signed_tx(&voter, vote_message, original_nonce); - - let txs = vec![set_candidates_message, add_voter_message, vote_message]; - let txs = txs - .into_iter() - .map(|t| RawTx { - data: t.try_to_vec().unwrap(), - }) - .collect(); - - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let txs = simulate_da_with_revert_msg(); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); let mut blobs = [blob]; + let data = MockBlock::default(); - let apply_block_result = StateTransitionFunction::::apply_slot( - &mut demo, - Default::default(), - &mut blobs, - ); + let apply_block_result = demo.apply_slot(Default::default(), &data, &mut blobs); assert_eq!(1, apply_block_result.batch_receipts.len()); let apply_blob_outcome = apply_block_result.batch_receipts[0].clone(); @@ -166,29 +118,36 @@ fn test_nonce_incremented_on_revert() { apply_blob_outcome.inner, "Unexpected outcome: Batch execution should have succeeded", ); + + let txn_receipts = apply_block_result.batch_receipts[0].tx_receipts.clone(); + // 3 transactions + // create 1000 tokens + // transfer 15 tokens + // transfer 5000 tokens // this should be reverted + assert_eq!(txn_receipts[0].receipt, TxEffect::Successful); + assert_eq!(txn_receipts[1].receipt, TxEffect::Successful); + assert_eq!(txn_receipts[2].receipt, TxEffect::Reverted); } + // with 3 transactions, the final nonce should be 3 + // 0 -> 1 + // 1 -> 2 + // 2 -> 3 { - let runtime = &mut Runtime::::default(); + let runtime = &mut Runtime::::default(); let storage = ProverStorage::with_path(path).unwrap(); let mut working_set = WorkingSet::new(storage); - - // No votes actually recorded, because there was invalid vote - let resp = runtime.election.number_of_votes(&mut working_set).unwrap(); - - assert_eq!(resp, sov_election::GetNbOfVotesResponse::Result(0)); - let nonce = match runtime .accounts - .get_account(voter.pub_key(), &mut working_set) + .get_account(get_default_private_key().pub_key(), &mut working_set) .unwrap() { Response::AccountExists { nonce, .. } => nonce, Response::AccountEmpty => 0, }; - // Voter should have its nonce increased - assert_eq!(original_nonce + 1, nonce); + // minter account should have its nonce increased for 3 transactions + assert_eq!(3, nonce); } } @@ -196,30 +155,24 @@ fn test_nonce_incremented_on_revert() { fn test_tx_bad_sig() { let tempdir = tempfile::tempdir().unwrap(); let path = tempdir.path(); - let value_setter_admin_private_key = DefaultPrivateKey::generate(); - let election_admin_private_key = DefaultPrivateKey::generate(); + let admin_private_key = DefaultPrivateKey::generate(); - let config = create_demo_config( - SEQUENCER_BALANCE, - &value_setter_admin_private_key, - &election_admin_private_key, - ); + let config = create_demo_config(SEQUENCER_BALANCE, &admin_private_key); { let mut demo = create_new_demo(path); + // TODO: Maybe complete with actual block data + let _data = MockBlock::default(); + demo.init_chain(config); - StateTransitionFunction::::init_chain(&mut demo, config); + let txs = simulate_da_with_bad_sig(); - let txs = simulate_da_with_bad_sig(election_admin_private_key); - - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); + let blob_sender = blob.sender(); let mut blobs = [blob]; - let apply_block_result = StateTransitionFunction::::apply_slot( - &mut demo, - Default::default(), - &mut blobs, - ); + let data = MockBlock::default(); + let apply_block_result = demo.apply_slot(Default::default(), &data, &mut blobs); assert_eq!(1, apply_block_result.batch_receipts.len()); let apply_blob_outcome = apply_block_result.batch_receipts[0].clone(); @@ -227,7 +180,7 @@ fn test_tx_bad_sig() { assert_eq!( SequencerOutcome::Slashed{ reason:SlashingReason::StatelessVerificationFailed, - sequencer_da_address: DEMO_SEQUENCER_DA_ADDRESS.to_vec(), + sequencer_da_address: blob_sender, }, apply_blob_outcome.inner, "Unexpected outcome: Stateless verification should have failed due to invalid signature" @@ -236,20 +189,44 @@ fn test_tx_bad_sig() { // The batch receipt contains no events. assert!(!has_tx_events(&apply_blob_outcome)); } +} + +#[test] +fn test_tx_bad_nonce() { + let tempdir = tempfile::tempdir().unwrap(); + let path = tempdir.path(); + let admin_private_key = DefaultPrivateKey::generate(); + + let config = create_demo_config(SEQUENCER_BALANCE, &admin_private_key); { - let runtime = &mut Runtime::::default(); - let storage = ProverStorage::with_path(path).unwrap(); - let mut working_set = WorkingSet::new(storage); + let mut demo = create_new_demo(path); + // TODO: Maybe complete with actual block data + let _data = MockBlock::default(); + demo.init_chain(config); - let resp = runtime.election.results(&mut working_set).unwrap(); + let txs = simulate_da_with_bad_nonce(); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); + let mut blobs = [blob]; + + let data = MockBlock::default(); + let apply_block_result = demo.apply_slot(Default::default(), &data, &mut blobs); + + assert_eq!(1, apply_block_result.batch_receipts.len()); + let tx_receipts = apply_block_result.batch_receipts[0].tx_receipts.clone(); + // Bad nonce means that the transaction has to be reverted + assert_eq!(tx_receipts[0].receipt, TxEffect::Reverted); + + // We don't expect the sequencer to be slashed for a bad nonce + // The reason for this is that in cases such as based sequencing, the sequencer can + // still post under the assumption that the nonce is valid (It doesn't know other sequencers + // are also doing this) so it needs to be rewarded. + // We're asserting that here to track if the logic changes assert_eq!( - resp, - sov_election::GetResultResponse::Err("Election is not frozen".to_owned()) + apply_block_result.batch_receipts[0].inner, + SequencerOutcome::Rewarded(0) ); - - // TODO: Sequencer is slashed } } @@ -259,17 +236,13 @@ fn test_tx_bad_serialization() { let path = tempdir.path(); let value_setter_admin_private_key = DefaultPrivateKey::generate(); - let election_admin_private_key = DefaultPrivateKey::generate(); - - let config = create_demo_config( - SEQUENCER_BALANCE, - &value_setter_admin_private_key, - &election_admin_private_key, - ); - let sequencer_rollup_address = config.sequencer_registry.seq_rollup_address.clone(); + + let config = create_demo_config(SEQUENCER_BALANCE, &value_setter_admin_private_key); + let sequencer_rollup_address = config.sequencer_registry.seq_rollup_address; let sequencer_balance_before = { let mut demo = create_new_demo(path); - StateTransitionFunction::::init_chain(&mut demo, config); + demo.init_chain(config); + let mut working_set = WorkingSet::new(demo.current_storage); let coins = demo .runtime @@ -280,7 +253,7 @@ fn test_tx_bad_serialization() { demo.runtime .bank .get_balance_of( - sequencer_rollup_address.clone(), + sequencer_rollup_address, coins.token_address, &mut working_set, ) @@ -288,17 +261,18 @@ fn test_tx_bad_serialization() { }; { + // TODO: Maybe complete with actual block data + let _data = MockBlock::default(); + let mut demo = create_new_demo(path); - let txs = simulate_da_with_bad_serialization(election_admin_private_key); - let blob = new_test_blob(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS); + let txs = simulate_da_with_bad_serialization(); + let blob = new_test_blob_from_batch(Batch { txs }, &DEMO_SEQUENCER_DA_ADDRESS, [0; 32]); + let blob_sender = blob.sender(); let mut blobs = [blob]; - let apply_block_result = StateTransitionFunction::::apply_slot( - &mut demo, - Default::default(), - &mut blobs, - ); + let data = MockBlock::default(); + let apply_block_result = demo.apply_slot(Default::default(), &data, &mut blobs); assert_eq!(1, apply_block_result.batch_receipts.len()); let apply_blob_outcome = apply_block_result.batch_receipts[0].clone(); @@ -306,7 +280,7 @@ fn test_tx_bad_serialization() { assert_eq!( SequencerOutcome::Slashed { reason: SlashingReason::InvalidTransactionEncoding , - sequencer_da_address: DEMO_SEQUENCER_DA_ADDRESS.to_vec(), + sequencer_da_address: blob_sender, }, apply_blob_outcome.inner, "Unexpected outcome: Stateless verification should have failed due to invalid signature" @@ -317,18 +291,12 @@ fn test_tx_bad_serialization() { } { - let runtime = &mut Runtime::::default(); + let runtime = &mut Runtime::::default(); let storage = ProverStorage::with_path(path).unwrap(); let mut working_set = WorkingSet::new(storage); - let resp = runtime.election.results(&mut working_set).unwrap(); - - assert_eq!( - resp, - sov_election::GetResultResponse::Err("Election is not frozen".to_owned()) - ); + // Sequencer is not in the list of allowed sequencers - // Sequencer is not in list of allowed sequencers let allowed_sequencer = runtime .sequencer_registry .sequencer_address(SEQUENCER_DA_ADDRESS.to_vec(), &mut working_set) diff --git a/examples/test-data/keys/minter_private_key.json b/examples/test-data/keys/minter_private_key.json index e7642f5d9..117c31efb 100644 --- a/examples/test-data/keys/minter_private_key.json +++ b/examples/test-data/keys/minter_private_key.json @@ -1,4 +1,39 @@ { - "hex_priv_key": "236e80cb222c4ed0431b093b3ac53e6aa7a2273fe1f4351cd354989a823432a27b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe6", + "private_key": { + "key_pair": [ + 35, + 110, + 128, + 203, + 34, + 44, + 78, + 208, + 67, + 27, + 9, + 59, + 58, + 197, + 62, + 106, + 167, + 162, + 39, + 63, + 225, + 244, + 53, + 28, + 211, + 84, + 152, + 154, + 130, + 52, + 50, + 162 + ] + }, "address": "sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc" } \ No newline at end of file diff --git a/examples/test-data/keys/tx_signer_private_key.json b/examples/test-data/keys/tx_signer_private_key.json index f01dde9ad..b237947bc 100644 --- a/examples/test-data/keys/tx_signer_private_key.json +++ b/examples/test-data/keys/tx_signer_private_key.json @@ -1,4 +1,4 @@ { - "hex_priv_key": "27c3774d52e71ea266a9c5256cd98b9ae67e62f2ae5ed34a668db8eaa83e1bac61fcf0f466bc20ca3882d46ae07d65227e31cfaefb852bc8f579415247565dd4", - "address": "sov1dnhqk4mdsj2kwv4xymt8a624xuahfx8906j9usdkx7ensfghndkq8p33f7" - } \ No newline at end of file + "hex_priv_key": "27c3774d52e71ea266a9c5256cd98b9ae67e62f2ae5ed34a668db8eaa83e1bac61fcf0f466bc20ca3882d46ae07d65227e31cfaefb852bc8f579415247565dd4", + "address": "sov1dnhqk4mdsj2kwv4xymt8a624xuahfx8906j9usdkx7ensfghndkq8p33f7" +} \ No newline at end of file diff --git a/examples/test-data/requests/burn.json b/examples/test-data/requests/burn.json index f371c0998..e3e7f9f88 100644 --- a/examples/test-data/requests/burn.json +++ b/examples/test-data/requests/burn.json @@ -1,8 +1,8 @@ { - "Burn":{ - "coins":{ - "amount":300, - "token_address":"sov1zdwj8thgev2u3yyrrlekmvtsz4av4tp3m7dm5mx5peejnesga27svq9m72" + "Burn": { + "coins": { + "amount": 300, + "token_address": "sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft" } } -} \ No newline at end of file +} diff --git a/examples/test-data/requests/create_token.json b/examples/test-data/requests/create_token.json index f8bf76c2d..7b792ac5f 100644 --- a/examples/test-data/requests/create_token.json +++ b/examples/test-data/requests/create_token.json @@ -1,12 +1,12 @@ { - "CreateToken": { - "salt": 11, - "token_name": "sov-test-token", - "initial_balance": 1000, - "minter_address": "sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc", - "authorized_minters": [ - "sov1l6n2cku82yfqld30lanm2nfw43n2auc8clw7r5u5m6s7p8jrm4zqrr8r94", - "sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc" - ] - } + "CreateToken": { + "salt": 11, + "token_name": "sov-test-token", + "initial_balance": 1000, + "minter_address": "sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc", + "authorized_minters": [ + "sov1l6n2cku82yfqld30lanm2nfw43n2auc8clw7r5u5m6s7p8jrm4zqrr8r94", + "sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc" + ] + } } diff --git a/examples/test-data/requests/mint.json b/examples/test-data/requests/mint.json index aeb6e9236..10436e602 100644 --- a/examples/test-data/requests/mint.json +++ b/examples/test-data/requests/mint.json @@ -2,7 +2,7 @@ "Mint": { "coins": { "amount": 3000, - "token_address": "sov1zdwj8thgev2u3yyrrlekmvtsz4av4tp3m7dm5mx5peejnesga27svq9m72" + "token_address": "sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft" }, "minter_address": "sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc" } diff --git a/examples/test-data/requests/register_sequencer.json b/examples/test-data/requests/register_sequencer.json index 1da7886bf..2d2aacf37 100644 --- a/examples/test-data/requests/register_sequencer.json +++ b/examples/test-data/requests/register_sequencer.json @@ -1,53 +1,38 @@ { "Register": { "da_address": [ - 99, - 101, - 108, - 101, - 115, - 116, - 105, - 97, - 49, - 104, - 115, - 118, - 113, - 56, - 101, - 116, - 53, - 117, - 117, - 117, - 97, - 51, - 112, - 48, - 52, - 118, - 107, - 107, - 99, - 118, - 119, - 51, - 114, - 115, - 122, - 107, - 54, - 100, - 56, - 55, - 52, - 119, - 119, - 99, - 116, - 100, - 53 + 13, + 5, + 25, + 31, + 28, + 30, + 5, + 27, + 20, + 6, + 29, + 10, + 14, + 29, + 20, + 12, + 22, + 13, + 19, + 1, + 0, + 11, + 9, + 15, + 23, + 13, + 14, + 1, + 9, + 27, + 9, + 14 ] } } \ No newline at end of file diff --git a/examples/test-data/requests/transfer.json b/examples/test-data/requests/transfer.json index 1bba402cc..92411d14c 100644 --- a/examples/test-data/requests/transfer.json +++ b/examples/test-data/requests/transfer.json @@ -3,8 +3,7 @@ "to": "sov1l6n2cku82yfqld30lanm2nfw43n2auc8clw7r5u5m6s7p8jrm4zqklh0qh", "coins": { "amount": 200, - "token_address": "sov1zdwj8thgev2u3yyrrlekmvtsz4av4tp3m7dm5mx5peejnesga27svq9m72" + "token_address": "sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft" } } } - diff --git a/full-node/db/sov-db/Cargo.toml b/full-node/db/sov-db/Cargo.toml index 8caaa2733..29218907a 100644 --- a/full-node/db/sov-db/Cargo.toml +++ b/full-node/db/sov-db/Cargo.toml @@ -17,7 +17,7 @@ resolver = "2" # Maintained by sovereign labs jmt = { workspace = true } sov-schema-db = { path = "../sov-schema-db", version = "0.1" } -sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1" } +sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1", features = ["native", "mocks"] } # External anyhow = { workspace = true } @@ -26,10 +26,8 @@ borsh = { workspace = true } serde = { workspace = true, features = ["derive"] } rocksdb = { workspace = true } bincode = { workspace = true } +tokio = { workspace = true } [dev-dependencies] tempfile = { workspace = true } - -[features] -default = [] diff --git a/full-node/db/sov-db/src/ledger_db/mod.rs b/full-node/db/sov-db/src/ledger_db/mod.rs index b1bcfc340..84c3add77 100644 --- a/full-node/db/sov-db/src/ledger_db/mod.rs +++ b/full-node/db/sov-db/src/ledger_db/mod.rs @@ -29,6 +29,7 @@ pub struct LedgerDB { /// requires transactions to be executed before being committed. db: Arc, next_item_numbers: Arc>, + slot_subscriptions: tokio::sync::broadcast::Sender, } /// A SlotNumber, BatchNumber, TxNumber, and EventNumber which are grouped together, typically representing @@ -107,6 +108,7 @@ impl LedgerDB { Ok(Self { db: Arc::new(inner), next_item_numbers: Arc::new(Mutex::new(next_item_numbers)), + slot_subscriptions: tokio::sync::broadcast::channel(10).0, }) } @@ -288,6 +290,11 @@ impl LedgerDB { self.db.write_schemas(schema_batch)?; + // Notify subscribers. This call returns an error IFF there are no subscribers, so we don't need to check the result + let _ = self + .slot_subscriptions + .send(current_item_numbers.slot_number); + Ok(()) } diff --git a/full-node/db/sov-db/src/ledger_db/rpc.rs b/full-node/db/sov-db/src/ledger_db/rpc.rs index fb3ea5be6..26ece24da 100644 --- a/full-node/db/sov-db/src/ledger_db/rpc.rs +++ b/full-node/db/sov-db/src/ledger_db/rpc.rs @@ -5,6 +5,7 @@ use sov_rollup_interface::rpc::{ TxIdentifier, TxResponse, }; use sov_rollup_interface::stf::Event; +use tokio::sync::broadcast::Receiver; use crate::schema::tables::{ BatchByHash, BatchByNumber, EventByNumber, SlotByHash, SlotByNumber, TxByHash, TxByNumber, @@ -267,6 +268,10 @@ impl LedgerRpcProvider for LedgerDB { let ids: Vec<_> = (start..=end).map(TxIdentifier::Number).collect(); self.get_transactions(&ids, query_mode) } + + fn subscribe_slots(&self) -> Result, anyhow::Error> { + Ok(self.slot_subscriptions.subscribe()) + } } impl LedgerDB { @@ -419,3 +424,23 @@ impl LedgerDB { }) } } + +#[cfg(test)] +mod tests { + use sov_rollup_interface::mocks::{MockBlob, MockBlock}; + use sov_rollup_interface::rpc::LedgerRpcProvider; + + use crate::ledger_db::{LedgerDB, SlotCommit}; + #[test] + fn test_slot_subscription() { + let temp_dir = tempfile::tempdir().unwrap(); + let path = temp_dir.path(); + let db = LedgerDB::with_path(path).unwrap(); + + let mut rx = db.subscribe_slots().unwrap(); + db.commit_slot(SlotCommit::<_, MockBlob, Vec>::new(MockBlock::default())) + .unwrap(); + + assert_eq!(rx.blocking_recv().unwrap(), 1); + } +} diff --git a/full-node/sov-ethereum/Cargo.toml b/full-node/sov-ethereum/Cargo.toml index d6c60c743..3e6c69a2b 100644 --- a/full-node/sov-ethereum/Cargo.toml +++ b/full-node/sov-ethereum/Cargo.toml @@ -11,13 +11,15 @@ readme = "README.md" resolver = "2" [dependencies] +anyhow = { workspace = true } jsonrpsee = { workspace = true, features = ["http-client", "server"] } +sov-rollup-interface = { path = "../../rollup-interface" } -sov-evm = { path = "../../module-system/module-implementations/sov-evm", default-features = false } +sov-evm = { path = "../../module-system/module-implementations/sov-evm" } demo-stf = { path = "../../examples/demo-stf", features = ["native"] } -sov-modules-api = { path = "../../module-system/sov-modules-api", default-features = false } +sov-modules-api = { path = "../../module-system/sov-modules-api" } const-rollup-config = { path = "../../examples/const-rollup-config" } -jupiter = { path = "../../adapters/celestia", features = ["native"] } +celestia = { path = "../../adapters/celestia", features = ["native"] } borsh = { workspace = true } serde_json = { workspace = true } @@ -33,7 +35,6 @@ tokio = { workspace = true } [features] -default = ["native"] +default = [] experimental = ["demo-stf/experimental", "sov-evm/experimental"] - native = ["demo-stf/native", "sov-evm/native"] \ No newline at end of file diff --git a/full-node/sov-ethereum/src/batch_builder.rs b/full-node/sov-ethereum/src/batch_builder.rs new file mode 100644 index 000000000..39d8c70a5 --- /dev/null +++ b/full-node/sov-ethereum/src/batch_builder.rs @@ -0,0 +1,34 @@ +use std::collections::VecDeque; + +#[derive(Default)] +pub struct EthBatchBuilder { + mempool: VecDeque>, +} + +impl EthBatchBuilder { + fn make_blob(&mut self) -> Vec> { + let mut txs = Vec::new(); + + while let Some(raw_tx) = self.mempool.pop_front() { + txs.push(raw_tx); + } + txs + } + + /// Adds `txs` to the mempool and attempts to create a blob with a minimum size of `min_blob_size`. + pub fn add_transactions_and_get_next_blob( + &mut self, + min_blob_size: Option, + txs: Vec>, + ) -> Vec> { + for tx in txs { + self.mempool.push_back(tx); + } + if let Some(min_blob_size) = min_blob_size { + if self.mempool.len() >= min_blob_size { + return self.make_blob(); + } + } + Vec::default() + } +} diff --git a/full-node/sov-ethereum/src/lib.rs b/full-node/sov-ethereum/src/lib.rs index 9f6bfec23..f87ec8ab5 100644 --- a/full-node/sov-ethereum/src/lib.rs +++ b/full-node/sov-ethereum/src/lib.rs @@ -1,134 +1,177 @@ #[cfg(feature = "experimental")] +mod batch_builder; +#[cfg(feature = "experimental")] pub use experimental::{get_ethereum_rpc, Ethereum}; #[cfg(feature = "experimental")] pub mod experimental { use std::collections::HashMap; - use std::sync::Mutex; + use std::sync::{Arc, Mutex}; use borsh::ser::BorshSerialize; - use const_rollup_config::ROLLUP_NAMESPACE_RAW; use demo_stf::app::DefaultPrivateKey; use demo_stf::runtime::{DefaultContext, Runtime}; use ethers::types::{Bytes, H256}; - use jsonrpsee::core::client::ClientT; - use jsonrpsee::core::params::ArrayParams; - use jsonrpsee::http_client::{HeaderMap, HttpClient}; use jsonrpsee::types::ErrorObjectOwned; use jsonrpsee::RpcModule; - use jupiter::da_service::DaServiceConfig; - use reth_primitives::Bytes as RethBytes; + use reth_primitives::TransactionSignedNoHash as RethTransactionSignedNoHash; + use reth_rpc::eth::error::EthApiError; use sov_evm::call::CallMessage; - use sov_evm::evm::{EthAddress, EvmTransaction}; + use sov_evm::evm::{EthAddress, RawEvmTransaction}; use sov_modules_api::transaction::Transaction; use sov_modules_api::utils::to_jsonrpsee_error_object; + use sov_modules_api::EncodeCall; + use sov_rollup_interface::services::da::DaService; + + use super::batch_builder::EthBatchBuilder; - const GAS_PER_BYTE: usize = 120; const ETH_RPC_ERROR: &str = "ETH_RPC_ERROR"; - pub fn get_ethereum_rpc( - config: DaServiceConfig, - tx_signer_prov_key: DefaultPrivateKey, - ) -> RpcModule { - let mut rpc = RpcModule::new(Ethereum { - config, - nonces: Default::default(), - tx_signer_prov_key, - }); + pub struct EthRpcConfig { + pub min_blob_size: Option, + pub tx_signer_priv_key: DefaultPrivateKey, + } + + pub fn get_ethereum_rpc( + da_service: Da, + eth_rpc_config: EthRpcConfig, + ) -> RpcModule> { + let mut rpc = RpcModule::new(Ethereum::new( + Default::default(), + da_service, + Arc::new(Mutex::new(EthBatchBuilder::default())), + eth_rpc_config, + )); + register_rpc_methods(&mut rpc).expect("Failed to register sequencer RPC methods"); rpc } - pub struct Ethereum { - config: DaServiceConfig, + pub struct Ethereum { nonces: Mutex>, - tx_signer_prov_key: DefaultPrivateKey, + da_service: Da, + batch_builder: Arc>, + eth_rpc_config: EthRpcConfig, + } + + impl Ethereum { + fn new( + nonces: Mutex>, + da_service: Da, + batch_builder: Arc>, + eth_rpc_config: EthRpcConfig, + ) -> Self { + Self { + nonces, + da_service, + batch_builder, + eth_rpc_config, + } + } } - impl Ethereum { - fn make_raw_tx(&self, evm_tx: EvmTransaction) -> Result, std::io::Error> { + impl Ethereum { + fn make_raw_tx( + &self, + raw_tx: RawEvmTransaction, + ) -> Result<(H256, Vec), jsonrpsee::core::Error> { + let signed_transaction: RethTransactionSignedNoHash = + raw_tx.clone().try_into().map_err(EthApiError::from)?; + + let tx_hash = signed_transaction.hash(); + let sender = signed_transaction + .recover_signer() + .ok_or(EthApiError::InvalidTransactionSignature)?; + let mut nonces = self.nonces.lock().unwrap(); let nonce = *nonces - .entry(evm_tx.sender) + .entry(sender.into()) .and_modify(|n| *n += 1) .or_insert(0); - let tx = CallMessage { tx: evm_tx }; - let message = Runtime::::encode_evm_call(tx); + let tx = CallMessage { tx: raw_tx }; + let message = as EncodeCall< + sov_evm::Evm, + >>::encode_call(tx); + let tx = Transaction::::new_signed_tx( - &self.tx_signer_prov_key, + &self.eth_rpc_config.tx_signer_priv_key, message, nonce, ); - tx.try_to_vec() + Ok((H256::from(tx_hash), tx.try_to_vec()?)) } - } - - impl Ethereum { - fn make_client(&self) -> HttpClient { - let mut headers = HeaderMap::new(); - headers.insert( - "Authorization", - format!("Bearer {}", self.config.celestia_rpc_auth_token.clone()) - .parse() - .unwrap(), - ); - jsonrpsee::http_client::HttpClientBuilder::default() - .set_headers(headers) - .max_request_size(default_max_response_size()) - .build(self.config.celestia_rpc_address.clone()) - .expect("Client initialization is valid") - } + async fn submit_batch(&self, raw_txs: Vec>) -> Result<(), jsonrpsee::core::Error> { + let blob = raw_txs + .try_to_vec() + .map_err(|e| to_jsonrpsee_error_object(e, ETH_RPC_ERROR))?; - async fn send_tx_to_da( - &self, - raw: Vec, - ) -> Result { - let blob = vec![raw].try_to_vec()?; - let client = self.make_client(); - let fee: u64 = 2000; - let namespace = ROLLUP_NAMESPACE_RAW.to_vec(); - let gas_limit = (blob.len() + 512) * GAS_PER_BYTE + 1060; - - let mut params = ArrayParams::new(); - params.insert(namespace)?; - params.insert(blob)?; - params.insert(fee.to_string())?; - params.insert(gas_limit)?; - client - .request::("state.SubmitPayForBlob", params) + self.da_service + .send_transaction(&blob) .await + .map_err(|e| to_jsonrpsee_error_object(e, ETH_RPC_ERROR))?; + + Ok(()) } } - fn register_rpc_methods(rpc: &mut RpcModule) -> Result<(), jsonrpsee::core::Error> { + fn register_rpc_methods( + rpc: &mut RpcModule>, + ) -> Result<(), jsonrpsee::core::Error> { + rpc.register_async_method("eth_publishBatch", |params, ethereum| async move { + let mut params_iter = params.sequence(); + + let mut txs = Vec::default(); + while let Some(tx) = params_iter.optional_next::>()? { + txs.push(tx) + } + + let blob = ethereum + .batch_builder + .lock() + .unwrap() + .add_transactions_and_get_next_blob(Some(1), txs); + + if !blob.is_empty() { + ethereum + .submit_batch(blob) + .await + .map_err(|e| to_jsonrpsee_error_object(e, ETH_RPC_ERROR))?; + } + Ok::("Submitted transaction".to_string()) + })?; + rpc.register_async_method( "eth_sendRawTransaction", |parameters, ethereum| async move { let data: Bytes = parameters.one().unwrap(); - let data = RethBytes::from(data.as_ref()); - let evm_transaction: EvmTransaction = data.try_into()?; + let raw_evm_tx = RawEvmTransaction { rlp: data.to_vec() }; - let tx_hash = evm_transaction.hash; - let raw_tx = ethereum - .make_raw_tx(evm_transaction) + let (tx_hash, raw_tx) = ethereum + .make_raw_tx(raw_evm_tx) .map_err(|e| to_jsonrpsee_error_object(e, ETH_RPC_ERROR))?; - ethereum - .send_tx_to_da(raw_tx) - .await - .map_err(|e| to_jsonrpsee_error_object(e, ETH_RPC_ERROR))?; - - Ok::<_, ErrorObjectOwned>(H256::from(tx_hash)) + let blob = ethereum + .batch_builder + .lock() + .unwrap() + .add_transactions_and_get_next_blob( + ethereum.eth_rpc_config.min_blob_size, + vec![raw_tx], + ); + + if !blob.is_empty() { + ethereum + .submit_batch(blob) + .await + .map_err(|e| to_jsonrpsee_error_object(e, ETH_RPC_ERROR))?; + } + Ok::<_, ErrorObjectOwned>(tx_hash) }, )?; Ok(()) } - - fn default_max_response_size() -> u32 { - 1024 * 1024 * 100 // 100 MB - } } diff --git a/full-node/sov-sequencer/Cargo.toml b/full-node/sov-sequencer/Cargo.toml index a988949c3..4b0bdee71 100644 --- a/full-node/sov-sequencer/Cargo.toml +++ b/full-node/sov-sequencer/Cargo.toml @@ -16,12 +16,10 @@ resolver = "2" anyhow = { workspace = true } borsh = { workspace = true } hex = { workspace = true } -jsonrpsee = { workspace = true, features = ["http-client", "server"] } +jsonrpsee = { workspace = true, features = ["client", "server",] } serde = { workspace = true, features = ["derive"] } tracing = { workspace = true } - sov-rollup-interface = { path = "../../rollup-interface" } -sov-modules-api = { path = "../../module-system/sov-modules-api" } [dev-dependencies] async-trait = { workspace = true } diff --git a/full-node/sov-sequencer/README.md b/full-node/sov-sequencer/README.md index 4dd6bd118..d11e6fc08 100644 --- a/full-node/sov-sequencer/README.md +++ b/full-node/sov-sequencer/README.md @@ -4,7 +4,7 @@ Simple implementation of based sequencer generic over batch builder and DA servi Exposes 2 RPC methods: -1. `sequencer_acceptTx` where input is suppose to be signed and serialized transaction. This transaction is stored in mempool +1. `sequencer_acceptTx` where input is supposed to be signed and serialized transaction. This transaction is stored in mempool 2. `sequencer_publishBatch` without any input, which builds the batch using batch builder and publishes it on DA layer. ## How to use it with `sov-cli` @@ -24,7 +24,7 @@ This command is similar to serialize call from [`demo-rollup` README](../../exam When demo-rollup with enabled sequencer starts, it prints on which endpoint it listens: -``` +```bash 2023-07-07T14:53:02.280562Z INFO sov_demo_rollup: Starting RPC server at 127.0.0.1:12345 ``` diff --git a/full-node/sov-sequencer/src/lib.rs b/full-node/sov-sequencer/src/lib.rs index 8cb52d19e..93507b781 100644 --- a/full-node/sov-sequencer/src/lib.rs +++ b/full-node/sov-sequencer/src/lib.rs @@ -1,34 +1,39 @@ -use std::sync::{Arc, Mutex}; +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] +use std::sync::Mutex; +/// Utilities for the sequencer rpc +pub mod utils; use anyhow::anyhow; use jsonrpsee::types::ErrorObjectOwned; use jsonrpsee::RpcModule; -use sov_modules_api::utils::to_jsonrpsee_error_object; use sov_rollup_interface::services::batch_builder::BatchBuilder; use sov_rollup_interface::services::da::DaService; +use utils::to_jsonrpsee_error_object; const SEQUENCER_RPC_ERROR: &str = "SEQUENCER_RPC_ERROR"; /// Single data structure that manages mempool and batch producing. pub struct Sequencer { batch_builder: Mutex, - da_service: Arc, + da_service: T, } impl Sequencer { /// Creates new Sequencer from BatchBuilder and DaService - pub fn new(batch_builder: B, da_service: Arc) -> Self { + pub fn new(batch_builder: B, da_service: T) -> Self { Self { batch_builder: Mutex::new(batch_builder), da_service, } } - async fn submit_batch(&self) -> anyhow::Result<()> { - // Need to release lock before await, so Future is `Send`. - // But potentially it can create blobs that sent out of order. - // Can be improved with atomics, so new batch is only created after previous was submitted. - tracing::info!("Going to submit batch!"); + async fn submit_batch(&self) -> anyhow::Result { + // Need to release lock before await, so the Future is `Send`. + // But potentially it can create blobs that are sent out of order. + // It can be improved with atomics, + // so a new batch is only created after previous was submitted. + tracing::info!("Submit batch request has been received!"); let blob = { let mut batch_builder = self .batch_builder @@ -36,9 +41,12 @@ impl Sequencer .map_err(|e| anyhow!("failed to lock mempool: {}", e.to_string()))?; batch_builder.get_next_blob()? }; + let num_txs = blob.len(); let blob: Vec = borsh::to_vec(&blob)?; + + println!("Sending avial DA"); match self.da_service.send_transaction(&blob).await { - Ok(_) => Ok(()), + Ok(_) => Ok(num_txs), Err(e) => Err(anyhow!("failed to submit batch: {:?}", e)), } } @@ -59,14 +67,25 @@ fn register_txs_rpc_methods( ) -> Result<(), jsonrpsee::core::Error> where B: BatchBuilder + Send + Sync + 'static, - D: DaService + Send + Sync + 'static, + D: DaService, { - rpc.register_async_method("sequencer_publishBatch", |_, batch_builder| async move { - batch_builder - .submit_batch() - .await - .map_err(|e| to_jsonrpsee_error_object(e, SEQUENCER_RPC_ERROR)) - })?; + rpc.register_async_method( + "sequencer_publishBatch", + |params, batch_builder| async move { + let mut params_iter = params.sequence(); + while let Some(tx) = params_iter.optional_next::>()? { + batch_builder + .accept_tx(tx) + .map_err(|e| to_jsonrpsee_error_object(e, SEQUENCER_RPC_ERROR))?; + } + let num_txs = batch_builder + .submit_batch() + .await + .map_err(|e| to_jsonrpsee_error_object(e, SEQUENCER_RPC_ERROR))?; + + Ok::(format!("Submitted {} transactions", num_txs)) + }, + )?; rpc.register_method("sequencer_acceptTx", move |params, sequencer| { let tx: SubmitTransaction = params.one()?; let response = match sequencer.accept_tx(tx.body) { @@ -79,10 +98,11 @@ where Ok(()) } -pub fn get_sequencer_rpc(batch_builder: B, da_service: Arc) -> RpcModule> +/// Creates an RPC module with the sequencer's methods +pub fn get_sequencer_rpc(batch_builder: B, da_service: D) -> RpcModule> where B: BatchBuilder + Send + Sync + 'static, - D: DaService + Send + Sync + 'static, + D: DaService, { let sequencer = Sequencer::new(batch_builder, da_service); let mut rpc = RpcModule::new(sequencer); @@ -90,106 +110,46 @@ where rpc } +/// A transaction to be submitted to the rollup #[derive(serde::Serialize, serde::Deserialize)] pub struct SubmitTransaction { body: Vec, } impl SubmitTransaction { + /// Creates a new transaction for submission to the rollup pub fn new(body: Vec) -> Self { SubmitTransaction { body } } } +/// The result of submitting a transaction to the rollup #[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] pub enum SubmitTransactionResponse { + /// Submission succeeded Registered, + /// Submission faileds Failed(String), } #[cfg(test)] mod tests { - use std::sync::Arc; - - use anyhow::bail; - use async_trait::async_trait; - use sov_rollup_interface::da::DaSpec; - use sov_rollup_interface::mocks::{MockDaSpec, TestBlock}; - use super::*; + use std::io::Read; - struct MockDaService { - submitted: Arc>>>, - } + use sov_rollup_interface::da::BlobReaderTrait; + use sov_rollup_interface::mocks::{MockAddress, MockDaService}; - impl MockDaService { - fn new() -> Self { - MockDaService { - submitted: Arc::new(Mutex::new(Vec::new())), - } - } - - fn is_empty(&self) -> bool { - self.submitted.lock().unwrap().is_empty() - } - - fn get_submitted(&self) -> Vec> { - self.submitted.lock().unwrap().clone() - } - } - - #[async_trait] - impl DaService for MockDaService { - type RuntimeConfig = (); - type Spec = MockDaSpec; - type FilteredBlock = TestBlock; - type Error = anyhow::Error; - - async fn new( - _config: Self::RuntimeConfig, - _chain_params: ::ChainParams, - ) -> Self { - MockDaService::new() - } - - async fn get_finalized_at(&self, _height: u64) -> Result { - todo!() - } - - async fn get_block_at(&self, _height: u64) -> Result { - todo!() - } - - fn extract_relevant_txs( - &self, - _block: &Self::FilteredBlock, - ) -> Vec<::BlobTransaction> { - todo!() - } - - async fn get_extraction_proof( - &self, - _block: &Self::FilteredBlock, - _blobs: &[::BlobTransaction], - ) -> ( - ::InclusionMultiProof, - ::CompletenessProof, - ) { - todo!() - } - - async fn send_transaction(&self, blob: &[u8]) -> Result<(), Self::Error> { - self.submitted.lock().unwrap().push(blob.to_vec()); - Ok(()) - } - } + use super::*; - struct MockBatchBuilder { - mempool: Vec>, + /// BatchBuilder used in tests. + pub struct MockBatchBuilder { + /// Mempool with transactions. + pub mempool: Vec>, } - /// It only takes the first byte of the tx, when submits it. - /// This allows to show effect of batch builder + // It only takes the first byte of the tx, when submits it. + // This allows to show effect of batch builder impl BatchBuilder for MockBatchBuilder { fn accept_tx(&mut self, tx: Vec) -> anyhow::Result<()> { self.mempool.push(tx); @@ -198,7 +158,7 @@ mod tests { fn get_next_blob(&mut self) -> anyhow::Result>> { if self.mempool.is_empty() { - bail!("Mock mempool is empty"); + anyhow::bail!("Mock mempool is empty"); } let txs = std::mem::take(&mut self.mempool) .into_iter() @@ -217,12 +177,12 @@ mod tests { #[tokio::test] async fn test_submit_on_empty_mempool() { let batch_builder = MockBatchBuilder { mempool: vec![] }; - let da_service = Arc::new(MockDaService::new()); - assert!(da_service.is_empty()); + let da_service = MockDaService::new(MockAddress::default()); let rpc = get_sequencer_rpc(batch_builder, da_service.clone()); - let result: Result<(), jsonrpsee::core::Error> = - rpc.call("sequencer_publishBatch", [1u64]).await; + let arg: &[u8] = &[]; + let result: Result = + rpc.call("sequencer_publishBatch", arg).await; assert!(result.is_err()); let error = result.err().unwrap(); @@ -239,29 +199,31 @@ mod tests { let batch_builder = MockBatchBuilder { mempool: vec![tx1.clone(), tx2.clone()], }; - let da_service = Arc::new(MockDaService::new()); - assert!(da_service.is_empty()); + let da_service = MockDaService::new(MockAddress::default()); let rpc = get_sequencer_rpc(batch_builder, da_service.clone()); - let _: () = rpc.call("sequencer_publishBatch", [1u64]).await.unwrap(); + let arg: &[u8] = &[]; + let _: String = rpc.call("sequencer_publishBatch", arg).await.unwrap(); - assert!(!da_service.is_empty()); + let mut block = vec![]; + let mut submitted_block = da_service.get_block_at(0).await.unwrap(); + let _ = submitted_block.blobs[0] + .data_mut() + .read_to_end(&mut block) + .unwrap(); - let submitted = da_service.get_submitted(); - assert_eq!(1, submitted.len()); // First bytes of each tx, flattened let blob: Vec> = vec![vec![tx1[0]], vec![tx2[0]]]; let expected: Vec = borsh::to_vec(&blob).unwrap(); - assert_eq!(expected, submitted[0]); + assert_eq!(expected, block); } #[tokio::test] async fn test_accept_tx() { let batch_builder = MockBatchBuilder { mempool: vec![] }; - let da_service = Arc::new(MockDaService::new()); + let da_service = MockDaService::new(MockAddress::default()); let rpc = get_sequencer_rpc(batch_builder, da_service.clone()); - assert!(da_service.is_empty()); let tx: Vec = vec![1, 2, 3, 4, 5]; let request = SubmitTransaction { body: tx.clone() }; @@ -269,19 +231,20 @@ mod tests { rpc.call("sequencer_acceptTx", [request]).await.unwrap(); assert_eq!(SubmitTransactionResponse::Registered, result); - // Check that it got passed to DA service - assert!(da_service.is_empty()); - - let _: () = rpc.call("sequencer_publishBatch", [1u64]).await.unwrap(); + let arg: &[u8] = &[]; + let _: String = rpc.call("sequencer_publishBatch", arg).await.unwrap(); - assert!(!da_service.is_empty()); + let mut block = vec![]; + let mut submitted_block = da_service.get_block_at(0).await.unwrap(); + let _ = submitted_block.blobs[0] + .data_mut() + .read_to_end(&mut block) + .unwrap(); - let submitted = da_service.get_submitted(); - assert_eq!(1, submitted.len()); // First bytes of each tx, flattened let blob: Vec> = vec![vec![tx[0]]]; let expected: Vec = borsh::to_vec(&blob).unwrap(); - assert_eq!(expected, submitted[0]); + assert_eq!(expected, block); } #[tokio::test] diff --git a/full-node/sov-sequencer/src/utils.rs b/full-node/sov-sequencer/src/utils.rs new file mode 100644 index 000000000..aa59f500e --- /dev/null +++ b/full-node/sov-sequencer/src/utils.rs @@ -0,0 +1,58 @@ +use borsh::BorshSerialize; +use jsonrpsee::core::client::ClientT; +use jsonrpsee::http_client::{HttpClient, HttpClientBuilder}; +use jsonrpsee::types::ErrorObjectOwned; +use jsonrpsee::ws_client::{WsClient, WsClientBuilder}; + +/// A simple client for the sequencer RPC. +pub struct SimpleClient { + http_client: HttpClient, + ws_client: WsClient, +} + +impl SimpleClient { + /// Creates a new client at the given endpoint + pub async fn new(address: &str, port: u16) -> Result { + let http_client = HttpClientBuilder::default() + .build(format!("http://{address}:{port}")) + .unwrap(); + let ws_client = WsClientBuilder::default() + .build(&format!("ws://{address}:{port}")) + .await?; + Ok(Self { + http_client, + ws_client, + }) + } + + /// Sends a transaction to the sequencer for immediate publication. + pub async fn send_transaction(&self, tx: Tx) -> Result<(), anyhow::Error> { + let batch = vec![tx.try_to_vec()?]; + + let response: String = self + .http_client + .request("sequencer_publishBatch", batch) + .await?; + println!("response: {:?}", response); + Ok(()) + } + + /// Get a reference to the underlying [`HttpClient`] + pub fn http(&self) -> &HttpClient { + &self.http_client + } + + /// Get a reference to the underlying [`WsClient`] + pub fn ws(&self) -> &WsClient { + &self.ws_client + } +} + +/// Creates an jsonrpsee ErrorObject +pub fn to_jsonrpsee_error_object(err: impl ToString, message: &str) -> ErrorObjectOwned { + ErrorObjectOwned::owned( + jsonrpsee::types::error::UNKNOWN_ERROR_CODE, + message, + Some(err.to_string()), + ) +} diff --git a/full-node/sov-stf-runner/Cargo.toml b/full-node/sov-stf-runner/Cargo.toml index 9eb1a8d8e..446b9f775 100644 --- a/full-node/sov-stf-runner/Cargo.toml +++ b/full-node/sov-stf-runner/Cargo.toml @@ -25,17 +25,26 @@ tracing-subscriber = "0.3.17" sov-db = { path = "../db/sov-db" } sov-rollup-interface = { path = "../../rollup-interface", version = "0.1" } -sov-state = { path = "../../module-system/sov-state", version = "0.1"} -sov-modules-api = { path = "../../module-system/sov-modules-api", version = "0.1" } -jupiter = { path = "../../adapters/celestia" } +sov-state = { path = "../../module-system/sov-state", version = "0.1", features = ["native"] } +sov-modules-api = { path = "../../module-system/sov-modules-api", version = "0.1", features = ["native"] } +celestia = { path = "../../adapters/celestia", features = ["native"] } [dev-dependencies] tempfile = { workspace = true } rand = { workspace = true } -sov-election = { path = "../../module-system/module-implementations/examples/sov-election", default-features = false } -sov-sequencer-registry = { path = "../../module-system/module-implementations/sov-sequencer-registry", default-features = false } -sov-bank = { path = "../../module-system/module-implementations/sov-bank", default-features = false } -sov-modules-stf-template = { path = "../../module-system/sov-modules-stf-template" } -sov-value-setter = { path = "../../module-system/module-implementations/examples/sov-value-setter", default-features = false } -sov-accounts = { path = "../../module-system/module-implementations/sov-accounts", default-features = false } +sov-sequencer-registry = { path = "../../module-system/module-implementations/sov-sequencer-registry", features = ["native"] } +sov-bank = { path = "../../module-system/module-implementations/sov-bank", features = ["native"] } +sov-modules-stf-template = { path = "../../module-system/sov-modules-stf-template", features = ["native"] } +sov-value-setter = { path = "../../module-system/module-implementations/examples/sov-value-setter", features = ["native"] } +sov-accounts = { path = "../../module-system/module-implementations/sov-accounts", features = ["native"] } + +#[features] +#default = [] +#native = [ +# "sov-sequencer-registry/native", +# "sov-bank/native", +# "sov-value-setter/native", +# "sov-accounts/native", +# "jupiter/native", +#] diff --git a/full-node/sov-stf-runner/src/batch_builder.rs b/full-node/sov-stf-runner/src/batch_builder.rs index 0af49152e..8c29a0144 100644 --- a/full-node/sov-stf-runner/src/batch_builder.rs +++ b/full-node/sov-stf-runner/src/batch_builder.rs @@ -3,11 +3,12 @@ use std::io::Cursor; use anyhow::bail; use borsh::BorshDeserialize; +use sov_modules_api::digest::Digest; use sov_modules_api::transaction::Transaction; -use sov_modules_api::{Context, DispatchCall, PublicKey}; +use sov_modules_api::{Context, DispatchCall, PublicKey, Spec}; use sov_rollup_interface::services::batch_builder::BatchBuilder; use sov_state::WorkingSet; -use tracing::warn; +use tracing::{info, warn}; /// BatchBuilder that creates batches of transactions in the order they were submitted /// Only transactions that were successfully dispatched are included. @@ -110,6 +111,11 @@ where // In order to fill batch as big as possible, // we only check if valid tx can fit in the batch. if current_batch_size + tx_len <= self.max_batch_size_bytes { + let tx_hash: [u8; 32] = ::Hasher::digest(&raw_tx[..]).into(); + info!( + "Tx with hash 0x{} has been included in the batch", + hex::encode(tx_hash) + ); txs.push(raw_tx); } else { self.mempool.push_front(raw_tx); @@ -141,10 +147,10 @@ mod tests { use sov_modules_api::default_signature::DefaultPublicKey; use sov_modules_api::macros::DefaultRuntime; use sov_modules_api::transaction::Transaction; - use sov_modules_api::{Context, DispatchCall, Genesis, MessageCodec, PrivateKey}; + use sov_modules_api::{Context, DispatchCall, EncodeCall, Genesis, MessageCodec, PrivateKey}; use sov_rollup_interface::services::batch_builder::BatchBuilder; use sov_state::{DefaultStorageSpec, ProverStorage, Storage}; - use sov_value_setter::{CallMessage, ValueSetterConfig}; + use sov_value_setter::{CallMessage, ValueSetter, ValueSetterConfig}; use tempfile::TempDir; use super::*; @@ -167,9 +173,9 @@ mod tests { fn generate_valid_tx(private_key: &DefaultPrivateKey, value: u32) -> Vec { let msg = CallMessage::SetValue(value); - let msg = TestRuntime::::encode_value_setter_call(msg); + let msg = as EncodeCall>>::encode_call(msg); - Transaction::new_signed_tx(private_key, msg, 1) + Transaction::::new_signed_tx(private_key, msg, 1) .try_to_vec() .unwrap() } @@ -184,7 +190,7 @@ mod tests { fn generate_signed_tx_with_invalid_payload(private_key: &DefaultPrivateKey) -> Vec { let msg = generate_random_bytes(); - Transaction::new_signed_tx(private_key, msg, 1) + Transaction::::new_signed_tx(private_key, msg, 1) .try_to_vec() .unwrap() } diff --git a/full-node/sov-stf-runner/src/config.rs b/full-node/sov-stf-runner/src/config.rs index d17338b19..f0c16e4c8 100644 --- a/full-node/sov-stf-runner/src/config.rs +++ b/full-node/sov-stf-runner/src/config.rs @@ -1,6 +1,19 @@ +use std::fs::File; +use std::io::Read; +use std::path::Path; + +use serde::de::DeserializeOwned; use serde::Deserialize; +pub use sov_state::config::Config as StorageConfig; -use crate::runner_config::Config as RunnerConfig; +/// Configuration for StateTransitionRunner. +#[derive(Debug, Clone, PartialEq, Deserialize)] +pub struct RunnerConfig { + /// DA start height. + pub start_height: u64, + /// RPC configuration. + pub rpc_config: RpcConfig, +} /// RPC configuration. #[derive(Debug, Clone, PartialEq, Deserialize)] @@ -13,13 +26,26 @@ pub struct RpcConfig { /// Rollup Configuration #[derive(Debug, Clone, PartialEq, Deserialize)] -pub struct RollupConfig { - /// DA start height. - pub start_height: u64, +pub struct RollupConfig { /// Runner configuration. + pub storage: StorageConfig, + /// TODO pub runner: RunnerConfig, - /// RPC configuration. - pub rpc_config: RpcConfig, + /// DA configuration. + pub da: DaServiceConfig, +} + +/// Reads toml file as a specific type. +pub fn from_toml_path, R: DeserializeOwned>(path: P) -> anyhow::Result { + let mut contents = String::new(); + { + let mut file = File::open(path)?; + file.read_to_string(&mut contents)?; + } + + let result: R = toml::from_str(&contents)?; + + Ok(result) } #[cfg(test)] @@ -30,7 +56,6 @@ mod tests { use tempfile::NamedTempFile; use super::*; - use crate::runner_config::{from_toml_path, StorageConfig}; fn create_config_from(content: &str) -> NamedTempFile { let mut config_file = NamedTempFile::new().unwrap(); @@ -41,37 +66,40 @@ mod tests { #[test] fn test_correct_config() { let config = r#" - start_height = 31337 [da] celestia_rpc_auth_token = "SECRET_RPC_TOKEN" celestia_rpc_address = "http://localhost:11111/" max_celestia_response_body_size = 980 - [runner.storage] + [storage] path = "/tmp" - [rpc_config] + [runner] + start_height = 1 + [runner.rpc_config] bind_host = "127.0.0.1" bind_port = 12345 "#; let config_file = create_config_from(config); - let config: RollupConfig = from_toml_path(config_file.path()).unwrap(); + let config: RollupConfig = + from_toml_path(config_file.path()).unwrap(); let expected = RollupConfig { - start_height: 31337, - da: DaServiceConfig { + runner: RunnerConfig { + start_height: 1, + rpc_config: RpcConfig { + bind_host: "127.0.0.1".to_string(), + bind_port: 12345, + }, + }, + + da: celestia::DaServiceConfig { celestia_rpc_auth_token: "SECRET_RPC_TOKEN".to_string(), celestia_rpc_address: "http://localhost:11111/".into(), max_celestia_response_body_size: 980, celestia_rpc_timeout_seconds: 60, }, - runner: RunnerConfig { - storage: StorageConfig { - path: PathBuf::from("/tmp"), - }, - }, - rpc_config: RpcConfig { - bind_host: "127.0.0.1".to_string(), - bind_port: 12345, + storage: StorageConfig { + path: PathBuf::from("/tmp"), }, }; assert_eq!(config, expected); diff --git a/full-node/sov-stf-runner/src/ledger_rpc.rs b/full-node/sov-stf-runner/src/ledger_rpc.rs index 18fb52111..4b0be4d73 100644 --- a/full-node/sov-stf-runner/src/ledger_rpc.rs +++ b/full-node/sov-stf-runner/src/ledger_rpc.rs @@ -1,4 +1,5 @@ -use jsonrpsee::RpcModule; +use futures::future::{select, Either}; +use jsonrpsee::{RpcModule, SubscriptionMessage}; use serde::de::DeserializeOwned; use serde::Serialize; use sov_db::ledger_db::LedgerDB; @@ -12,8 +13,8 @@ const LEDGER_RPC_ERROR: &str = "LEDGER_RPC_ERROR"; use self::query_args::{extract_query_args, QueryArgs}; /// Registers the following RPC methods -/// - `ledger_head` -/// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_head","params":[],"id":1}' http://127.0.0.1:12345` +/// - `ledger_getHead` +/// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_getHead","params":[],"id":1}' http://127.0.0.1:12345` /// - ledger_getSlots /// Example Query: `curl -X POST -H "Content-Type: application/json" -d '{"jsonrpc":"2.0","method":"ledger_getSlots","params":[[1, 2], "Compact"],"id":1}' http://127.0.0.1:12345` /// - ledger_getBatches @@ -57,6 +58,42 @@ fn register_ledger_rpc_methods< .map_err(|e| to_jsonrpsee_error_object(e, LEDGER_RPC_ERROR)) })?; + rpc.register_subscription( + "ledger_subscribeSlots", + "ledger_slotProcessed", + "ledger_unsubscribeSlots", + |_, pending_subscription, db| async move { + // Register with the ledgerDB to receive callbacks + let mut rx = db + .subscribe_slots() + .map_err(|e| to_jsonrpsee_error_object(e, LEDGER_RPC_ERROR))?; + + // Accept the subscription. This message is sent immediately + let subscription = pending_subscription.accept().await?; + let closed = subscription.closed(); + futures::pin_mut!(closed); + + // This loop continues running until the subscription ends. + loop { + let next_msg = rx.recv(); + futures::pin_mut!(next_msg); + match select(closed, next_msg).await { + // If the subscription closed, we're done + Either::Left(_) => break Ok(()), + // Otherwise, we need to send the message + Either::Right((outcome, channel_closing_future)) => { + let msg = SubscriptionMessage::from_json(&outcome?)?; + // Sending only fails if the subscriber has canceled, so we can stop sending messages + if subscription.send(msg).await.is_err() { + break Ok(()); + } + closed = channel_closing_future; + } + } + } + }, + )?; + Ok(()) } diff --git a/full-node/sov-stf-runner/src/lib.rs b/full-node/sov-stf-runner/src/lib.rs index 173e2cccf..7ae3e4307 100644 --- a/full-node/sov-stf-runner/src/lib.rs +++ b/full-node/sov-stf-runner/src/lib.rs @@ -3,20 +3,20 @@ mod batch_builder; mod config; -pub use config::RpcConfig; -mod runner_config; use std::net::SocketAddr; + +pub use config::RpcConfig; mod ledger_rpc; pub use batch_builder::FiFoStrictBatchBuilder; -pub use config::RollupConfig; +pub use config::{from_toml_path, RollupConfig, RunnerConfig, StorageConfig}; use jsonrpsee::RpcModule; pub use ledger_rpc::get_ledger_rpc; -pub use runner_config::{from_toml_path, Config, StorageConfig}; use sov_db::ledger_db::{LedgerDB, SlotCommit}; -use sov_rollup_interface::da::DaSpec; +use sov_rollup_interface::da::{BlobReaderTrait, DaSpec}; use sov_rollup_interface::services::da::DaService; use sov_rollup_interface::stf::StateTransitionFunction; use sov_rollup_interface::zk::Zkvm; +use tokio::sync::oneshot; use tracing::{debug, info}; type StateRoot = = >::InitialState; /// Combines `DaService` with `StateTransitionFunction` and "runs" the rollup. -pub struct StateTransitionRunner +pub struct StateTransitionRunner where - DA: DaService, + Da: DaService, Vm: Zkvm, - ST: StateTransitionFunction::Spec as DaSpec>::BlobTransaction>, + ST: StateTransitionFunction< + Vm, + <::Spec as DaSpec>::BlobTransaction, + Condition = ::ValidityCondition, + >, { start_height: u64, - da_service: DA, + da_service: Da, app: ST, ledger_db: LedgerDB, - state_root: StateRoot, + state_root: StateRoot, listen_address: SocketAddr, } -impl StateTransitionRunner +impl StateTransitionRunner where - DA: DaService + Clone + Send + Sync + 'static, + Da: DaService + Clone + Send + Sync + 'static, Vm: Zkvm, - ST: StateTransitionFunction::Spec as DaSpec>::BlobTransaction>, + ST: StateTransitionFunction< + Vm, + <::Spec as DaSpec>::BlobTransaction, + Condition = ::ValidityCondition, + >, { /// Creates a new `StateTransitionRunner` runner. pub fn new( - rollup_config: RollupConfig, - da_service: DA, + runner_config: RunnerConfig, + da_service: Da, ledger_db: LedgerDB, mut app: ST, should_init_chain: bool, - genesis_config: InitialState, + genesis_config: InitialState, ) -> Result { - let rpc_config = rollup_config.rpc_config; + let rpc_config = runner_config.rpc_config; let prev_state_root = { // Check if the rollup has previously been initialized if should_init_chain { info!("No history detected. Initializing chain..."); - app.init_chain(genesis_config); + let ret_hash = app.init_chain(genesis_config); info!("Chain initialization is done."); + ret_hash } else { debug!("Chain is already initialized. Skipping initialization."); + app.get_current_state_root()? } - - let res = app.apply_slot(Default::default(), []); - // HACK: Tell the rollup that you're running an empty DA layer block so that it will return the latest state root. - // This will be removed shortly. - res.state_root }; let listen_address = SocketAddr::new(rpc_config.bind_host.parse()?, rpc_config.bind_port); @@ -82,7 +87,7 @@ where // Start the main rollup loop let item_numbers = ledger_db.get_next_items_numbers(); let last_slot_processed_before_shutdown = item_numbers.slot_number - 1; - let start_height = rollup_config.start_height + last_slot_processed_before_shutdown; + let start_height = runner_config.start_height + last_slot_processed_before_shutdown; Ok(Self { start_height, @@ -94,8 +99,12 @@ where }) } - /// Starts an rpc server with provided rpc methods. - pub async fn start_rpc_server(&self, methods: RpcModule<()>) { + /// Starts a RPC server with provided rpc methods. + pub async fn start_rpc_server( + &self, + methods: RpcModule<()>, + channel: Option>, + ) { let listen_address = self.listen_address; let _handle = tokio::spawn(async move { let server = jsonrpsee::server::ServerBuilder::default() @@ -103,7 +112,12 @@ where .await .unwrap(); - info!("Starting RPC server at {} ", server.local_addr().unwrap()); + let bound_address = server.local_addr().unwrap(); + if let Some(channel) = channel { + channel.send(bound_address).unwrap(); + } + info!("Starting RPC server at {} ", &bound_address); + let _server_handle = server.start(methods).unwrap(); futures::future::pending::<()>().await; }); @@ -112,21 +126,30 @@ where /// Runs the rollup. pub async fn run(&mut self) -> Result<(), anyhow::Error> { for height in self.start_height.. { - info!("Requesting data for height {}", height,); + debug!("Requesting data for height {}", height,); let filtered_block = self.da_service.get_finalized_at(height).await?; - let mut blobs = self.da_service.extract_relevant_txs(&filtered_block); info!( - "Extracted {} relevant blobs at height {}", + "Extracted {} relevant blobs at height {}: {:?}", blobs.len(), - height + height, + blobs + .iter() + .map(|b| format!( + "sequencer={} blob_hash=0x{}", + b.sender(), + hex::encode(b.hash()) + )) + .collect::>() ); let mut data_to_commit = SlotCommit::new(filtered_block.clone()); - let slot_result = self.app.apply_slot(Default::default(), &mut blobs); + let slot_result = self + .app + .apply_slot(Default::default(), &filtered_block, &mut blobs); for receipt in slot_result.batch_receipts { data_to_commit.add_batch(receipt); } diff --git a/full-node/sov-stf-runner/src/runner_config.rs b/full-node/sov-stf-runner/src/runner_config.rs deleted file mode 100644 index fb7d4ebdc..000000000 --- a/full-node/sov-stf-runner/src/runner_config.rs +++ /dev/null @@ -1,93 +0,0 @@ -use std::fs::File; -use std::io::Read; -use std::path::Path; - -use serde::de::DeserializeOwned; -pub use sov_state::config::Config as StorageConfig; - -/// Reads toml file as a specific type. -pub fn from_toml_path, R: DeserializeOwned>(path: P) -> anyhow::Result { - let mut contents = String::new(); - { - let mut file = File::open(path)?; - file.read_to_string(&mut contents)?; - } - - let result: R = toml::from_str(&contents)?; - - Ok(result) -} - -/// StateTransitionRunner configuration -#[derive(serde::Deserialize, Debug, Clone, PartialEq, Eq)] -pub struct Config { - /// Storage config - pub storage: StorageConfig, -} - -#[cfg(test)] -mod tests { - use std::io::Write; - use std::path::PathBuf; - - use tempfile::{tempdir, NamedTempFile}; - - use super::*; - - fn create_config_from(content: &str) -> NamedTempFile { - let mut config_file = NamedTempFile::new().unwrap(); - config_file.write_all(content.as_bytes()).unwrap(); - config_file - } - - #[test] - fn test_correct_config() { - let config = r#" - [storage] - path = "/tmp" - "#; - - let config_file = create_config_from(config); - - let config: Config = from_toml_path(config_file.path()).unwrap(); - let expected = Config { - storage: StorageConfig { - path: PathBuf::from("/tmp"), - }, - }; - assert_eq!(config, expected); - } - - #[test] - fn test_incorrect_path() { - // Not closed quote - let config = r#" - [storage] - path = "/tmp - "#; - let config_file = create_config_from(config); - - let config: anyhow::Result = from_toml_path(config_file.path()); - - assert!(config.is_err()); - let error = config.unwrap_err().to_string(); - let expected_error = format!( - "{}{}{}", - "TOML parse error at line 3, column 25\n |\n3 |", - " path = \"/tmp\n | ^\n", - "invalid basic string\n" - ); - assert_eq!(error, expected_error); - } - // - #[test] - fn test_non_existent_config() { - let dir = tempdir().unwrap(); - let path = dir.path().join("non_existing_config.toml"); - - let config: anyhow::Result = from_toml_path(path); - - assert!(config.is_err()); - assert!(config.unwrap_err().to_string().ends_with("(os error 2)")); - } -} diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml new file mode 100644 index 000000000..16a41c54f --- /dev/null +++ b/fuzz/Cargo.toml @@ -0,0 +1,91 @@ +[package] +name = "sovereign-sdk-fuzz" +version = "0.1.0" +publish = false +edition = "2021" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +libfuzzer-sys = "0.4" +serde_json = "1" +tempfile = "3" +rand = "0.8" + +# Sovereign-maintained dependencies. +celestia = { path = "../adapters/celestia" } +sov-modules-api = { path = "../module-system/sov-modules-api", features = ["arbitrary", "native"] } +sov-accounts = { path = "../module-system/module-implementations/sov-accounts", features = ["arbitrary", "native"] } +sov-bank = { path = "../module-system/module-implementations/sov-bank", features = ["native"] } +sov-state = { path = "../module-system/sov-state" } + +# Prevent this from interfering with workspaces. +[workspace] +members = ["."] + +[[bin]] +name = "namespace_group_from_b64" +path = "fuzz_targets/namespace_group_from_b64.rs" +test = false +doc = false + +[[bin]] +name = "parse_address" +path = "fuzz_targets/parse_address.rs" +test = false +doc = false + +[[bin]] +name = "address_bech_32_parse_serde" +path = "fuzz_targets/address_bech_32_parse_serde.rs" +test = false +doc = false + +[[bin]] +name = "address_bech_32_try_from_bytes" +path = "fuzz_targets/address_bech_32_try_from_bytes.rs" +test = false +doc = false + +[[bin]] +name = "share_deserialize" +path = "fuzz_targets/share_deserialize.rs" +test = false +doc = false + +[[bin]] +name = "bank_call" +path = "fuzz_targets/bank_call.rs" +test = false +doc = false + +[[bin]] +name = "accounts_call" +path = "fuzz_targets/accounts_call.rs" +test = false +doc = false + +[[bin]] +name = "accounts_call_random" +path = "fuzz_targets/accounts_call_random.rs" +test = false +doc = false + +[[bin]] +name = "bank_parse_call_message" +path = "fuzz_targets/bank_parse_call_message.rs" +test = false +doc = false + +[[bin]] +name = "accounts_parse_call_message" +path = "fuzz_targets/accounts_parse_call_message.rs" +test = false +doc = false + +[[bin]] +name = "accounts_parse_call_message_random" +path = "fuzz_targets/accounts_parse_call_message_random.rs" +test = false +doc = false diff --git a/fuzz/Makefile b/fuzz/Makefile new file mode 100644 index 000000000..f0e01a141 --- /dev/null +++ b/fuzz/Makefile @@ -0,0 +1,33 @@ +.PHONY: help + +BINARIES := $(shell sed -n '/^\[\[bin\]\]/,/^$$/ { /name\s*=\s*"\(.*\)"/s//\1/p }' Cargo.toml) +PROFILE ?= debug +ARGS ?= + +help: ## Display this help message + @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) + +check: ## Checks that fuzz member compiles + cargo check + +build-target: ## Build the target fuzz + cargo rustc --bin $(TARGET) $(ARGS) -- \ + -C debuginfo=full \ + -C debug-assertions \ + -C passes='sancov-module' \ + -C llvm-args='-sanitizer-coverage-level=3' \ + -C llvm-args='-sanitizer-coverage-inline-8bit-counters' \ + -Z sanitizer=address + +build: ## Build the fuzz targets + @for t in $(BINARIES); do \ + $(MAKE) build-target TARGET=$$t; \ + done + +targets: ## Prints all fuzz targets + @for t in $(BINARIES); do \ + echo $$t; \ + done + +run: build-target ## Run the fuzz target + ./target/$(PROFILE)/$(TARGET) -artifact_prefix=artifacts/ diff --git a/fuzz/README.md b/fuzz/README.md new file mode 100644 index 000000000..78efd5e93 --- /dev/null +++ b/fuzz/README.md @@ -0,0 +1,66 @@ +# LLVM's libFuzzer + +This implementation is built upon [libfuzzer-sys](https://crates.io/crates/libfuzzer-sys). For more information, check [LLVM](https://llvm.org/docs/LibFuzzer.html) documentation. + +## Build + +To build the fuzz target, run the following command: + +```sh +make build +``` + +You can build in release mode via: + +```sh +make build ARGS=--release +``` + +Some special parameters are required to build the fuzz target. As example, let's build the `namespace_group_from_b64` fuzz target: + +```sh +cargo rustc --bin namespace_group_from_b64 \ + --manifest-path fuzz/Cargo.toml -- \ + -C debuginfo=full \ + -C debug-assertions \ + -C passes='sancov-module' \ + -C llvm-args='-sanitizer-coverage-level=3' \ + -C llvm-args='-sanitizer-coverage-inline-8bit-counters' \ + -Z sanitizer=address +``` + +We don't default these options as they depend on the `rustc` version and might change in the future. For the list of available targets, check [Cargo.toml](./fuzz/Cargo.toml) under the `bin` section. We are currently not using optimized binaries as it might impact on how rocksdb is built. If you want to activate optimization, add `--release` after `rustc`. + +Unfortunately, rustc doesn't support the `--bins` argument to build multiple binaries with custom compiler directives. We have to build every target individually. Below is a convenience [sed](https://www.gnu.org/software/sed/) script to build all targets. + +```sh +for t in `sed -n '/^\[\[bin\]\]/,/^$/ { /name\s*=\s*"\(.*\)"/s//\1/p }' fuzz/Cargo.toml` ; do cargo rustc --bin $t --manifest-path fuzz/Cargo.toml -- -C debuginfo=full -C debug-assertions -C passes='sancov-module' -C llvm-args='-sanitizer-coverage-level=3' -C llvm-args='-sanitizer-coverage-inline-8bit-counters' -Z sanitizer=address ; done +``` + +## Run + +Here is a sample command to fuzz a `namespace_group_from_b64`: + +```sh +make run TARGET=namespace_group_from_b64 +``` + +To run in release mode: + +```sh +make run TARGET=namespace_group_from_b64 PROFILE=release +``` + +To list the available targets, run: + +```sh +make targets +``` + +Once built, you can run the targets under the `fuzz/target/` directory. + +```sh +./fuzz/target/debug/namespace_group_from_b64 +``` + +It will run the fuzz until you interrupt the command (i.e. `CTRL-C`), and will record crashes under `fuzz/artifacts/*/crash-*`. If you find a crash, please report a new [bug](https://github.com/Sovereign-Labs/sovereign-sdk/issues/new?assignees=&labels=&projects=&template=bug_report.md&title=). diff --git a/fuzz/artifacts/accounts_call/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 b/fuzz/artifacts/accounts_call/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 new file mode 100644 index 000000000..e69de29bb diff --git a/fuzz/artifacts/bank_call/crash-55fd4bd554ac3dcc6a2a2719335bd95869b9f6f5 b/fuzz/artifacts/bank_call/crash-55fd4bd554ac3dcc6a2a2719335bd95869b9f6f5 new file mode 100644 index 000000000..f871261f0 Binary files /dev/null and b/fuzz/artifacts/bank_call/crash-55fd4bd554ac3dcc6a2a2719335bd95869b9f6f5 differ diff --git a/fuzz/artifacts/crash-03ff9dbf9c64fb7c125f0aec0b8b80a972907eb8 b/fuzz/artifacts/crash-03ff9dbf9c64fb7c125f0aec0b8b80a972907eb8 new file mode 100644 index 000000000..21499eb41 --- /dev/null +++ b/fuzz/artifacts/crash-03ff9dbf9c64fb7c125f0aec0b8b80a972907eb8 @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/fuzz/artifacts/crash-1261a81d957460d69943ae02e30528372736fc15 b/fuzz/artifacts/crash-1261a81d957460d69943ae02e30528372736fc15 new file mode 100644 index 000000000..d8698bc8c --- /dev/null +++ b/fuzz/artifacts/crash-1261a81d957460d69943ae02e30528372736fc15 @@ -0,0 +1 @@ +ËËËËËËËËËËËËËËËËËËËËËËËËËËËËËËËËËŠe' \ No newline at end of file diff --git a/fuzz/artifacts/crash-1f039bacf8f860eb5507d9ee3a9879dfe316cf5e b/fuzz/artifacts/crash-1f039bacf8f860eb5507d9ee3a9879dfe316cf5e new file mode 100644 index 000000000..25fa3ed32 --- /dev/null +++ b/fuzz/artifacts/crash-1f039bacf8f860eb5507d9ee3a9879dfe316cf5e @@ -0,0 +1,2 @@ +››› +ÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿ \ No newline at end of file diff --git a/fuzz/artifacts/crash-44796f5e67307b5b18e648fdd016e885ebf50da9 b/fuzz/artifacts/crash-44796f5e67307b5b18e648fdd016e885ebf50da9 new file mode 100644 index 000000000..428f2581d --- /dev/null +++ b/fuzz/artifacts/crash-44796f5e67307b5b18e648fdd016e885ebf50da9 @@ -0,0 +1 @@ +>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> \ No newline at end of file diff --git a/fuzz/artifacts/crash-5e665cf3759c0aabfc3e898f6294840582421b32 b/fuzz/artifacts/crash-5e665cf3759c0aabfc3e898f6294840582421b32 new file mode 100644 index 000000000..c1558a21a --- /dev/null +++ b/fuzz/artifacts/crash-5e665cf3759c0aabfc3e898f6294840582421b32 @@ -0,0 +1,2 @@ + +888888888888888ª88888888888888888888 \ No newline at end of file diff --git a/fuzz/artifacts/crash-6929e000e5891085cf17a75de96dd505b6499083 b/fuzz/artifacts/crash-6929e000e5891085cf17a75de96dd505b6499083 new file mode 100644 index 000000000..e71d37245 --- /dev/null +++ b/fuzz/artifacts/crash-6929e000e5891085cf17a75de96dd505b6499083 @@ -0,0 +1,2 @@ +ÖÖÖÖ~ÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖÖ +¥ \ No newline at end of file diff --git a/fuzz/artifacts/crash-816595d1445fb45b609cca5417ba5f537c74ab43 b/fuzz/artifacts/crash-816595d1445fb45b609cca5417ba5f537c74ab43 new file mode 100644 index 000000000..7c80f26e7 Binary files /dev/null and b/fuzz/artifacts/crash-816595d1445fb45b609cca5417ba5f537c74ab43 differ diff --git a/fuzz/artifacts/crash-98daf6c68def387cffda2937a318cdfd9e956627 b/fuzz/artifacts/crash-98daf6c68def387cffda2937a318cdfd9e956627 new file mode 100644 index 000000000..fa179e477 Binary files /dev/null and b/fuzz/artifacts/crash-98daf6c68def387cffda2937a318cdfd9e956627 differ diff --git a/fuzz/artifacts/crash-b4a051390ba551b5349b6f233930f32f9e16bd85 b/fuzz/artifacts/crash-b4a051390ba551b5349b6f233930f32f9e16bd85 new file mode 100644 index 000000000..e74c45da3 Binary files /dev/null and b/fuzz/artifacts/crash-b4a051390ba551b5349b6f233930f32f9e16bd85 differ diff --git a/fuzz/artifacts/crash-e5f45f193d720ae7264383fcc0763cd945120fc1 b/fuzz/artifacts/crash-e5f45f193d720ae7264383fcc0763cd945120fc1 new file mode 100644 index 000000000..bd96e04d8 --- /dev/null +++ b/fuzz/artifacts/crash-e5f45f193d720ae7264383fcc0763cd945120fc1 @@ -0,0 +1,4 @@ + + + +òòòòòòòòòòòòòòòòòòòòòòòòòòòòòòòò \ No newline at end of file diff --git a/fuzz/artifacts/fuzz_namespace_group_from_b64/crash-2221b8862d9d37ec7c714a5df89b570c1356cdba b/fuzz/artifacts/fuzz_namespace_group_from_b64/crash-2221b8862d9d37ec7c714a5df89b570c1356cdba new file mode 100644 index 000000000..c17cfdc13 --- /dev/null +++ b/fuzz/artifacts/fuzz_namespace_group_from_b64/crash-2221b8862d9d37ec7c714a5df89b570c1356cdba @@ -0,0 +1 @@ +zgggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRzggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggegggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggRRRRggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg4 \ No newline at end of file diff --git a/fuzz/artifacts/fuzz_namespace_group_from_b64/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 b/fuzz/artifacts/fuzz_namespace_group_from_b64/crash-da39a3ee5e6b4b0d3255bfef95601890afd80709 new file mode 100644 index 000000000..e69de29bb diff --git a/fuzz/artifacts/fuzz_namespace_group_from_b64/crash-ef43788e032a15a049005ce4fd839b3777597338 b/fuzz/artifacts/fuzz_namespace_group_from_b64/crash-ef43788e032a15a049005ce4fd839b3777597338 new file mode 100644 index 000000000..80ed51c71 --- /dev/null +++ b/fuzz/artifacts/fuzz_namespace_group_from_b64/crash-ef43788e032a15a049005ce4fd839b3777597338 @@ -0,0 +1 @@ +zggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggeggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRRgggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggRRRRRRRRRRRRRRRRRRRRRRRRRRRgggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggg4 \ No newline at end of file diff --git a/fuzz/artifacts/slow-unit-727720324b031a1f6408810b07628b558866fb00 b/fuzz/artifacts/slow-unit-727720324b031a1f6408810b07628b558866fb00 new file mode 100644 index 000000000..c94d0e853 --- /dev/null +++ b/fuzz/artifacts/slow-unit-727720324b031a1f6408810b07628b558866fb00 @@ -0,0 +1,2 @@ + +ïïï±±±±±±±±±±±±±±±³³³³³³³³³³“³³³ïï \ No newline at end of file diff --git a/fuzz/artifacts/slow-unit-a5b5c4ac13d3d8cbc8b4696cb715160998407a8f b/fuzz/artifacts/slow-unit-a5b5c4ac13d3d8cbc8b4696cb715160998407a8f new file mode 100644 index 000000000..7fa179b79 --- /dev/null +++ b/fuzz/artifacts/slow-unit-a5b5c4ac13d3d8cbc8b4696cb715160998407a8f @@ -0,0 +1 @@ +Rÿÿÿÿ diff --git a/fuzz/fuzz_targets/accounts_call.rs b/fuzz/fuzz_targets/accounts_call.rs new file mode 100644 index 000000000..6dff3c2d1 --- /dev/null +++ b/fuzz/fuzz_targets/accounts_call.rs @@ -0,0 +1,84 @@ +#![no_main] + +use std::collections::{HashMap, HashSet}; + +use libfuzzer_sys::arbitrary::{Arbitrary, Unstructured}; +use libfuzzer_sys::{fuzz_target, Corpus}; +use rand::rngs::StdRng; +use rand::seq::SliceRandom; +use rand::{RngCore, SeedableRng}; +use sov_accounts::{AccountConfig, Accounts, CallMessage, UPDATE_ACCOUNT_MSG}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::default_signature::private_key::DefaultPrivateKey; +use sov_modules_api::{Context, Module, PrivateKey, Spec}; +use sov_state::WorkingSet; + +type C = DefaultContext; + +// Check well-formed calls +fuzz_target!(|input: (u16, [u8; 32], Vec)| -> Corpus { + let (iterations, seed, keys) = input; + if iterations < 1024 { + // pointless to setup & run a small iterations count + return Corpus::Reject; + } + + // this is a workaround to the restriction where `ed25519_dalek::Keypair` doesn't implement + // `Eq` or `Sort`; reduce the set to a unique collection of keys so duplicated accounts are not + // used. + let keys = keys + .into_iter() + .map(|k| (k.as_hex(), k)) + .collect::>() + .into_values() + .collect::>(); + + if keys.is_empty() { + return Corpus::Reject; + } + + let rng = &mut StdRng::from_seed(seed); + let mut seed = [0u8; 32]; + let tmpdir = tempfile::tempdir().unwrap(); + let storage = ::Storage::with_path(tmpdir.path()).unwrap(); + let working_set = &mut WorkingSet::new(storage); + + let config: AccountConfig = keys.iter().map(|k| k.pub_key()).collect(); + let accounts: Accounts = Accounts::default(); + accounts.genesis(&config, working_set).unwrap(); + + // address list is constant for this test + let mut used = keys.iter().map(|k| k.as_hex()).collect::>(); + let mut state: HashMap<_, _> = keys.into_iter().map(|k| (k.default_address(), k)).collect(); + let addresses: Vec<_> = state.keys().copied().collect(); + + for _ in 0..iterations { + // we use slices for better select performance + let sender = addresses.choose(rng).unwrap(); + let context = C::new(*sender); + + // clear previous state + let previous = state.get(sender).unwrap().as_hex(); + used.remove(&previous); + + // generate an unused key + rng.fill_bytes(&mut seed); + let u = &mut Unstructured::new(&seed); + let mut secret = DefaultPrivateKey::arbitrary(u).unwrap(); + while used.contains(&secret.as_hex()) { + rng.fill_bytes(&mut seed); + let u = &mut Unstructured::new(&seed); + secret = DefaultPrivateKey::arbitrary(u).unwrap(); + } + used.insert(secret.as_hex()); + + let public = secret.pub_key(); + let sig = secret.sign(&UPDATE_ACCOUNT_MSG); + state.insert(*sender, secret); + + let msg = CallMessage::::UpdatePublicKey(public.clone(), sig); + accounts.call(msg, &context, working_set).unwrap(); + } + + Corpus::Keep +}); diff --git a/fuzz/fuzz_targets/accounts_call_random.rs b/fuzz/fuzz_targets/accounts_call_random.rs new file mode 100644 index 000000000..66a896538 --- /dev/null +++ b/fuzz/fuzz_targets/accounts_call_random.rs @@ -0,0 +1,26 @@ +#![no_main] + +use libfuzzer_sys::arbitrary::Unstructured; +use libfuzzer_sys::fuzz_target; +use sov_accounts::{Accounts, CallMessage}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::{Module, Spec}; +use sov_state::WorkingSet; + +type C = DefaultContext; + +// Check arbitrary, random calls +fuzz_target!(|input: (&[u8], Vec<(C, CallMessage)>)| { + let tmpdir = tempfile::tempdir().unwrap(); + let storage = ::Storage::with_path(tmpdir.path()).unwrap(); + let working_set = &mut WorkingSet::new(storage); + + let (seed, msgs) = input; + let u = &mut Unstructured::new(seed); + let accounts: Accounts = Accounts::arbitrary_workset(u, working_set).unwrap(); + + for (ctx, msg) in msgs { + // assert malformed calls won't panic + accounts.call(msg, &ctx, working_set).ok(); + } +}); diff --git a/fuzz/fuzz_targets/accounts_parse_call_message.rs b/fuzz/fuzz_targets/accounts_parse_call_message.rs new file mode 100644 index 000000000..0467112e1 --- /dev/null +++ b/fuzz/fuzz_targets/accounts_parse_call_message.rs @@ -0,0 +1,13 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use sov_accounts::CallMessage; +use sov_modules_api::default_context::DefaultContext; + +type C = DefaultContext; + +fuzz_target!(|input: CallMessage| { + let json = serde_json::to_vec(&input).unwrap(); + let msg = serde_json::from_slice::>(&json).unwrap(); + assert_eq!(input, msg); +}); diff --git a/fuzz/fuzz_targets/accounts_parse_call_message_random.rs b/fuzz/fuzz_targets/accounts_parse_call_message_random.rs new file mode 100644 index 000000000..a928f4c7d --- /dev/null +++ b/fuzz/fuzz_targets/accounts_parse_call_message_random.rs @@ -0,0 +1,11 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use sov_accounts::CallMessage; +use sov_modules_api::default_context::DefaultContext; + +type C = DefaultContext; + +fuzz_target!(|input: &[u8]| { + serde_json::from_slice::>(input).ok(); +}); diff --git a/fuzz/fuzz_targets/address_bech_32_parse_serde.rs b/fuzz/fuzz_targets/address_bech_32_parse_serde.rs new file mode 100644 index 000000000..a01d11178 --- /dev/null +++ b/fuzz/fuzz_targets/address_bech_32_parse_serde.rs @@ -0,0 +1,8 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use sov_modules_api::AddressBech32; + +fuzz_target!(|data: &[u8]| { + serde_json::from_slice::(data).ok(); +}); diff --git a/fuzz/fuzz_targets/address_bech_32_try_from_bytes.rs b/fuzz/fuzz_targets/address_bech_32_try_from_bytes.rs new file mode 100644 index 000000000..9546a74f3 --- /dev/null +++ b/fuzz/fuzz_targets/address_bech_32_try_from_bytes.rs @@ -0,0 +1,8 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use sov_modules_api::AddressBech32; + +fuzz_target!(|data: &[u8]| { + let _ = AddressBech32::try_from(data); +}); diff --git a/fuzz/fuzz_targets/bank_call.rs b/fuzz/fuzz_targets/bank_call.rs new file mode 100644 index 000000000..c999c262b --- /dev/null +++ b/fuzz/fuzz_targets/bank_call.rs @@ -0,0 +1,25 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use sov_bank::{Bank, CallMessage}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::Module; +use sov_state::{ProverStorage, WorkingSet}; + +type C = DefaultContext; + +fuzz_target!(|input: (&[u8], [u8; 32])| { + let (data, sender) = input; + if let Ok(msgs) = serde_json::from_slice::>>(data) { + let tmpdir = tempfile::tempdir().unwrap(); + let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); + let ctx = C { + sender: sender.into(), + }; + + let bank = Bank::default(); + for msg in msgs { + bank.call(msg, &ctx, &mut working_set).ok(); + } + } +}); diff --git a/fuzz/fuzz_targets/bank_parse_call_message.rs b/fuzz/fuzz_targets/bank_parse_call_message.rs new file mode 100644 index 000000000..13962b9cd --- /dev/null +++ b/fuzz/fuzz_targets/bank_parse_call_message.rs @@ -0,0 +1,11 @@ +#![no_main] + +use libfuzzer_sys::fuzz_target; +use sov_bank::CallMessage; +use sov_modules_api::default_context::DefaultContext; + +type C = DefaultContext; + +fuzz_target!(|input: &[u8]| { + serde_json::from_slice::>(input).ok(); +}); diff --git a/fuzz/fuzz_targets/namespace_group_from_b64.rs b/fuzz/fuzz_targets/namespace_group_from_b64.rs new file mode 100644 index 000000000..50e8d3e1a --- /dev/null +++ b/fuzz/fuzz_targets/namespace_group_from_b64.rs @@ -0,0 +1,11 @@ +#![no_main] +#[macro_use] +extern crate libfuzzer_sys; + +use celestia::shares::NamespaceGroup; + +fuzz_target!(|data: &[u8]| { + if let Ok(s) = std::str::from_utf8(data) { + let _ = NamespaceGroup::from_b64(s).ok(); + } +}); diff --git a/fuzz/fuzz_targets/parse_address.rs b/fuzz/fuzz_targets/parse_address.rs new file mode 100644 index 000000000..a5b74959d --- /dev/null +++ b/fuzz/fuzz_targets/parse_address.rs @@ -0,0 +1,15 @@ +#![no_main] +#[macro_use] +extern crate libfuzzer_sys; + +use std::str::FromStr; + +use sov_modules_api::AddressBech32; + +fuzz_target!(|data: &[u8]| { + if let Ok(data) = std::str::from_utf8(data) { + if let Ok(addr) = AddressBech32::from_str(data) { + addr.to_string(); + } + } +}); diff --git a/fuzz/fuzz_targets/share_deserialize.rs b/fuzz/fuzz_targets/share_deserialize.rs new file mode 100644 index 000000000..e08cfd1ec --- /dev/null +++ b/fuzz/fuzz_targets/share_deserialize.rs @@ -0,0 +1,8 @@ +#![no_main] + +use celestia::shares::Share; +use libfuzzer_sys::fuzz_target; + +fuzz_target!(|data: &[u8]| { + serde_json::from_slice::(data).ok(); +}); diff --git a/module-system/RPC_WALKTHROUGH.md b/module-system/RPC_WALKTHROUGH.md index b7bea1617..c0486deb9 100644 --- a/module-system/RPC_WALKTHROUGH.md +++ b/module-system/RPC_WALKTHROUGH.md @@ -6,7 +6,7 @@ from scratch. There are 5 steps that need to be completed to enable RPC on the full node: -1. Annotate you modules with `rpc_gen` and `rpc_method`. +1. Annotate your modules with `rpc_gen` and `rpc_method`. 2. Annotate your `native` `Runtime` with the `expose_rpc` macro. 3. Import and call `get_rpc_methods` in your full node implementation. 4. Configure and start your RPC server in your full node implementation. @@ -49,7 +49,7 @@ This example code will generate an RPC module which can process the `bank_balanc Under the hood `rpc_gen` and `rpc_method` create two traits - one called RpcImpl and one called RpcServer. It's important to note that the \_RpcImpl and \_RpcServer traits do not need to be implemented - this is done automatically by the SDK. -However, the do need to be imported to the file where the `expose_rpc` macro is called. +However, they do need to be imported to the file where the `expose_rpc` macro is called. ### Step 2: Expose Your RPC Server diff --git a/module-system/module-implementations/examples/sov-election/README.md b/module-system/module-implementations/examples/sov-election/README.md deleted file mode 100644 index bf764f9bb..000000000 --- a/module-system/module-implementations/examples/sov-election/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# An example of a `SOV-MODULE` - -It demonstrates the following concepts: -### 1. Module structure: -- `lib.rs` contains `sov_election` module definition and `sov_modules_api::Module` trait implementation for `sov_election`. -- `genesis.rs` contains the module initialization logic. -- `call.rs` contains methods that change module state in response to `CallMessage`. -- `query.rs` contains functions for querying the module state. - -### 2. Functionality: -This module demonstrates the functionality of an election where a group of 'voters' vote for 'candidates' to determine a winner. Please note that this module serves only as an example and should not be used in real-life scenarios. As an exercise, check how the winner is chosen in the case of a tie between multiple candidates. - -The `sov_election` module has a special role called `admin` that is set in `sov_election` genesis method. Only the `admin` can set `candidates` and register `voters`. Once registered, a `voter` votes for a chosen `candidate`. After some period of time the `admin` freezes the election and anyone can query who the winner is. The `sov_election` module determines the winner, and ensures that the election was fair. For example, it checks that each `voter` voted only once. - -For implementation details, please check comments in the `genesis.rs, call.rs & query.rs`. \ No newline at end of file diff --git a/module-system/module-implementations/examples/sov-election/src/call.rs b/module-system/module-implementations/examples/sov-election/src/call.rs deleted file mode 100644 index a5fa6569a..000000000 --- a/module-system/module-implementations/examples/sov-election/src/call.rs +++ /dev/null @@ -1,191 +0,0 @@ -use anyhow::{anyhow, bail, ensure, Result}; -use sov_modules_api::{CallResponse, Context}; -use sov_state::WorkingSet; - -use super::types::{Candidate, Voter}; -use super::Election; - -/// Call actions supported byte the module. -#[cfg_attr( - feature = "native", - derive(serde::Serialize), - derive(serde::Deserialize), - derive(schemars::JsonSchema), - derive(sov_modules_api::macros::CliWalletArg), - schemars(bound = "C: sov_modules_api::Context", rename = "CallMessage") -)] -#[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] -pub enum CallMessage { - SetCandidates { names: Vec }, - AddVoter(C::Address), - Vote(usize), - ClearElection, - FreezeElection, -} - -impl Election { - /// Sets the candidates. Must be called by the Admin. - pub(crate) fn set_candidates( - &self, - candidate_names: Vec, - context: &C, - working_set: &mut WorkingSet, - ) -> Result { - self.exit_if_frozen(working_set)?; - self.exit_if_not_admin(context, working_set)?; - self.exit_if_candidates_already_set(working_set)?; - - let candidates = candidate_names.into_iter().map(Candidate::new).collect(); - self.candidates.set(&candidates, working_set); - working_set.add_event("Election: set_candidates", "Candidate was set"); - - Ok(CallResponse::default()) - } - - /// Adds voter to the allow list. Must be called by the Admin. - pub(crate) fn add_voter( - &self, - voter_address: C::Address, - context: &C, - working_set: &mut WorkingSet, - ) -> Result { - self.exit_if_frozen(working_set)?; - self.exit_if_not_admin(context, working_set)?; - self.exit_if_voter_already_set(&voter_address, working_set)?; - - self.allowed_voters - .set(&voter_address, &Voter::fresh(), working_set); - - working_set.add_event( - "Election: add_voter", - &format!("Voter was added: {voter_address}"), - ); - - Ok(CallResponse::default()) - } - - /// Votes for a candidate. Must be called by the Voter. - pub(crate) fn make_vote( - &self, - // TODO the candidates are stored in `Vec` which allows iteration, but it forces us - // to use candidate_index instead of candidate_name here. We will change it once - // we have iterator for `StateMap`. - candidate_index: usize, - context: &C, - working_set: &mut WorkingSet, - ) -> Result { - working_set.add_event( - "Election: make_vote", - &format!("Attempt to make a from: {}", context.sender()), - ); - - let new_number_of_votes = self - .number_of_votes - .get(working_set) - .unwrap_or_default() - .checked_add(1) - .ok_or(anyhow!("Vote count overflow"))?; - - self.number_of_votes.set(&new_number_of_votes, working_set); - self.exit_if_frozen(working_set)?; - - let voter = self - .allowed_voters - .get_or_err(context.sender(), working_set)?; - - match voter { - Voter::Voted => bail!("Voter tried voting a second time!"), - Voter::Fresh => { - self.allowed_voters - .set(context.sender(), &Voter::voted(), working_set); - - let mut candidates = self.candidates.get_or_err(working_set)?; - - // Check if a candidate exist. - let candidate = candidates - .get_mut(candidate_index) - .ok_or(anyhow!("Candidate doesn't exist"))?; - - candidate.count = candidate - .count - .checked_add(1) - .ok_or(anyhow!("Vote count overflow"))?; - - self.candidates.set(&candidates, working_set); - - working_set.add_event( - "Election: make_vote", - &format!("Vote from: {} accepted", context.sender()), - ); - Ok(CallResponse::default()) - } - } - } - - /// Freezes the election. - pub(crate) fn freeze_election( - &self, - context: &C, - working_set: &mut WorkingSet, - ) -> Result { - self.exit_if_not_admin(context, working_set)?; - self.is_frozen.set(&true, working_set); - working_set.add_event("Election: freeze_election", "Election was frozen"); - Ok(CallResponse::default()) - } - - /// Clears the election. - pub(crate) fn clear(&self) -> Result { - // TODO: https://github.com/Sovereign-Labs/sovereign-sdk/issues/62 - todo!() - } - - fn exit_if_not_admin( - &self, - context: &C, - working_set: &mut WorkingSet, - ) -> Result<()> { - let admin = self.admin.get_or_err(working_set)?; - - ensure!( - &admin == context.sender(), - "Only admin can trigger this action." - ); - Ok(()) - } - - fn exit_if_frozen(&self, working_set: &mut WorkingSet) -> Result<()> { - let is_frozen = self.is_frozen.get_or_err(working_set)?; - - if is_frozen { - bail!("Election is frozen.") - } - - Ok(()) - } - - fn exit_if_candidates_already_set( - &self, - working_set: &mut WorkingSet, - ) -> Result<()> { - ensure!( - self.candidates.get(working_set).is_none(), - "Candidate already set." - ); - Ok(()) - } - - fn exit_if_voter_already_set( - &self, - voter_address: &C::Address, - working_set: &mut WorkingSet, - ) -> Result<()> { - ensure!( - self.allowed_voters - .get(voter_address, working_set) - .is_none(), - "Voter already has the right to vote." - ); - Ok(()) - } -} diff --git a/module-system/module-implementations/examples/sov-election/src/genesis.rs b/module-system/module-implementations/examples/sov-election/src/genesis.rs deleted file mode 100644 index 8a31db785..000000000 --- a/module-system/module-implementations/examples/sov-election/src/genesis.rs +++ /dev/null @@ -1,17 +0,0 @@ -use anyhow::Result; -use sov_state::WorkingSet; - -use super::Election; - -impl Election { - pub(crate) fn init_module( - &self, - config: &::Config, - working_set: &mut WorkingSet, - ) -> Result<()> { - self.admin.set(&config.admin, working_set); - self.is_frozen.set(&false, working_set); - - Ok(()) - } -} diff --git a/module-system/module-implementations/examples/sov-election/src/lib.rs b/module-system/module-implementations/examples/sov-election/src/lib.rs deleted file mode 100644 index f9033dab1..000000000 --- a/module-system/module-implementations/examples/sov-election/src/lib.rs +++ /dev/null @@ -1,92 +0,0 @@ -mod call; -mod genesis; -#[cfg(feature = "native")] -mod query; - -#[cfg(test)] -mod tests; - -mod types; - -pub use call::CallMessage; -#[cfg(feature = "native")] -pub use query::{ElectionRpcImpl, ElectionRpcServer, GetNbOfVotesResponse, GetResultResponse}; -use sov_modules_api::{Error, ModuleInfo}; -use sov_state::WorkingSet; -pub use types::Candidate; -use types::Voter; - -pub struct ElectionConfig { - pub admin: C::Address, -} - -#[cfg_attr(feature = "native", derive(sov_modules_api::ModuleCallJsonSchema))] -#[derive(ModuleInfo, Clone)] -pub struct Election { - #[address] - pub address: C::Address, - - #[state] - pub(crate) admin: sov_state::StateValue, - - #[state] - pub(crate) is_frozen: sov_state::StateValue, - - // There are two issues here: - // 1. We use `std::Vec` inside `StateValue` this might be inefficient because - // on every get, we are fetching the whole vector. We will add `StateVec` type in the future, - // see: https://github.com/Sovereign-Labs/sovereign-sdk/issues/33 - // - // 2. It would be better to use `StateMap`, but it doesn't support iteration, - // see: https://github.com/Sovereign-Labs/sovereign-sdk/issues/61 - #[state] - pub(crate) candidates: sov_state::StateValue>, - - #[state] - pub(crate) allowed_voters: sov_state::StateMap, - - // This is used for testing revert functionality in the demo-stf. - #[state] - pub(crate) number_of_votes: sov_state::StateValue, -} - -impl sov_modules_api::Module for Election { - type Context = C; - - type Config = ElectionConfig; - - type CallMessage = call::CallMessage; - - fn genesis( - &self, - config: &Self::Config, - working_set: &mut WorkingSet, - ) -> Result<(), Error> { - Ok(self.init_module(config, working_set)?) - } - - fn call( - &self, - msg: Self::CallMessage, - context: &Self::Context, - working_set: &mut WorkingSet, - ) -> Result { - match msg { - Self::CallMessage::SetCandidates { names } => { - Ok(self.set_candidates(names, context, working_set)?) - } - - Self::CallMessage::AddVoter(voter_address) => { - Ok(self.add_voter(voter_address, context, working_set)?) - } - - Self::CallMessage::Vote(candidate_index) => { - Ok(self.make_vote(candidate_index, context, working_set)?) - } - - Self::CallMessage::ClearElection => Ok(self.clear()?), - - Self::CallMessage::FreezeElection => Ok(self.freeze_election(context, working_set)?), - } - } -} diff --git a/module-system/module-implementations/examples/sov-election/src/query.rs b/module-system/module-implementations/examples/sov-election/src/query.rs deleted file mode 100644 index a5b109067..000000000 --- a/module-system/module-implementations/examples/sov-election/src/query.rs +++ /dev/null @@ -1,50 +0,0 @@ -use jsonrpsee::core::RpcResult; -use sov_modules_api::macros::rpc_gen; -use sov_state::WorkingSet; - -use super::types::Candidate; -use super::Election; - -#[derive(Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize, Clone)] -pub enum GetResultResponse { - Result(Option), - Err(String), -} - -#[derive(Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize, Clone)] -pub enum GetNbOfVotesResponse { - Result(u64), -} - -#[rpc_gen(client, server, namespace = "election")] -impl Election { - #[rpc_method(name = "results")] - pub fn results( - &self, - working_set: &mut WorkingSet, - ) -> RpcResult { - let is_frozen = self.is_frozen.get(working_set).unwrap_or_default(); - - if is_frozen { - let candidates = self.candidates.get(working_set).unwrap_or(Vec::default()); - - // In case of tie, returns the candidate with the higher index in the vec, it is ok for the example. - let candidate = candidates - .into_iter() - .max_by(|c1, c2| c1.count.cmp(&c2.count)); - - Ok(GetResultResponse::Result(candidate)) - } else { - Ok(GetResultResponse::Err("Election is not frozen".to_owned())) - } - } - - #[rpc_method(name = "numberOfVotes")] - pub fn number_of_votes( - &self, - working_set: &mut WorkingSet, - ) -> RpcResult { - let number_of_votes = self.number_of_votes.get(working_set).unwrap_or_default(); - Ok(GetNbOfVotesResponse::Result(number_of_votes)) - } -} diff --git a/module-system/module-implementations/examples/sov-election/src/tests.rs b/module-system/module-implementations/examples/sov-election/src/tests.rs deleted file mode 100644 index e0c71663b..000000000 --- a/module-system/module-implementations/examples/sov-election/src/tests.rs +++ /dev/null @@ -1,110 +0,0 @@ -use sov_modules_api::default_context::{DefaultContext, ZkDefaultContext}; -use sov_modules_api::default_signature::private_key::DefaultPrivateKey; -use sov_modules_api::{Address, Context, Module, PrivateKey, PublicKey}; -use sov_state::{ProverStorage, WorkingSet, ZkStorage}; - -use super::call::CallMessage; -use super::query::GetResultResponse; -use super::types::Candidate; -use super::Election; -use crate::ElectionConfig; - -#[test] -fn test_election() { - let admin = Address::from([1; 32]); - - let tmpdir = tempfile::tempdir().unwrap(); - let native_storage = ProverStorage::with_path(tmpdir.path()).unwrap(); - let mut native_working_set = WorkingSet::new(native_storage); - - test_module::(admin.clone(), &mut native_working_set); - - let (_log, witness) = native_working_set.checkpoint().freeze(); - let zk_storage = ZkStorage::new([0u8; 32]); - let mut zk_working_set = WorkingSet::with_witness(zk_storage, witness); - test_module::(admin, &mut zk_working_set); -} - -fn test_module(admin: C::Address, working_set: &mut WorkingSet) { - let admin_context = C::new(admin.clone()); - let election = &mut Election::::default(); - - // Init module - { - let config = ElectionConfig { admin }; - election.genesis(&config, working_set).unwrap(); - } - - // Send candidates - { - let set_candidates = CallMessage::SetCandidates { - names: vec!["candidate_1".to_owned(), "candidate_2".to_owned()], - }; - - election - .call(set_candidates, &admin_context, working_set) - .unwrap(); - } - - let voter_1 = DefaultPrivateKey::generate() - .pub_key() - .to_address::(); - - let voter_2 = DefaultPrivateKey::generate() - .pub_key() - .to_address::(); - - let voter_3 = DefaultPrivateKey::generate() - .pub_key() - .to_address::(); - - // Register voters - { - let add_voter = CallMessage::AddVoter(voter_1.clone()); - election - .call(add_voter, &admin_context, working_set) - .unwrap(); - - let add_voter = CallMessage::AddVoter(voter_2.clone()); - election - .call(add_voter, &admin_context, working_set) - .unwrap(); - - let add_voter = CallMessage::AddVoter(voter_3.clone()); - election - .call(add_voter, &admin_context, working_set) - .unwrap(); - } - - // Vote - { - let sender_context = C::new(voter_1); - let vote = CallMessage::Vote(0); - election.call(vote, &sender_context, working_set).unwrap(); - - let sender_context = C::new(voter_2); - let vote = CallMessage::Vote(1); - election.call(vote, &sender_context, working_set).unwrap(); - - let sender_context = C::new(voter_3); - let vote = CallMessage::Vote(1); - election.call(vote, &sender_context, working_set).unwrap(); - } - - election - .call(CallMessage::FreezeElection, &admin_context, working_set) - .unwrap(); - - // Get result - { - let query_response: GetResultResponse = election.results(working_set).unwrap(); - - assert_eq!( - query_response, - GetResultResponse::Result(Some(Candidate { - name: "candidate_2".to_owned(), - count: 2 - })) - ) - } -} diff --git a/module-system/module-implementations/examples/sov-election/src/types.rs b/module-system/module-implementations/examples/sov-election/src/types.rs deleted file mode 100644 index 1c0d8e7a9..000000000 --- a/module-system/module-implementations/examples/sov-election/src/types.rs +++ /dev/null @@ -1,32 +0,0 @@ -use borsh::{BorshDeserialize, BorshSerialize}; - -// Represents a candidate. -#[cfg_attr(feature = "native", derive(serde::Deserialize, serde::Serialize))] -#[derive(BorshDeserialize, BorshSerialize, Debug, Eq, PartialEq, Clone)] -pub struct Candidate { - pub name: String, - pub count: u32, -} - -impl Candidate { - pub fn new(name: String) -> Self { - Self { name, count: 0 } - } -} - -/// Represents a voter. -#[derive(BorshDeserialize, BorshSerialize, Clone)] -pub(crate) enum Voter { - Fresh, - Voted, -} - -impl Voter { - pub(crate) fn fresh() -> Self { - Self::Fresh - } - - pub(crate) fn voted() -> Self { - Self::Voted - } -} diff --git a/module-system/module-implementations/examples/sov-value-setter/Cargo.toml b/module-system/module-implementations/examples/sov-value-setter/Cargo.toml index 4bd3f006f..7524722b5 100644 --- a/module-system/module-implementations/examples/sov-value-setter/Cargo.toml +++ b/module-system/module-implementations/examples/sov-value-setter/Cargo.toml @@ -13,13 +13,12 @@ resolver = "2" publish = false [dev-dependencies] -sov-modules-api = { path = "../../../sov-modules-api" } tempfile = { workspace = true } [dependencies] anyhow = { workspace = true } -sov-modules-api = { path = "../../../sov-modules-api", default-features = false, features = ["macros"] } -sov-state = { path = "../../../sov-state", default-features = false } +sov-modules-api = { path = "../../../sov-modules-api" } +sov-state = { path = "../../../sov-state" } sov-rollup-interface = { path = "../../../../rollup-interface" } schemars = { workspace = true, optional = true } serde = { workspace = true, optional = true } @@ -30,6 +29,5 @@ jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], clap = { workspace = true, optional = true } [features] -default = ["native"] -serde = ["dep:serde", "dep:serde_json"] -native = ["serde", "sov-modules-api/native", "dep:jsonrpsee", "dep:schemars", "dep:clap"] +default = [] +native = ["serde", "serde_json", "jsonrpsee", "schemars", "clap", "sov-modules-api/native", "sov-state/native"] diff --git a/module-system/module-implementations/examples/sov-value-setter/src/lib.rs b/module-system/module-implementations/examples/sov-value-setter/src/lib.rs index f0eaa7398..e4766691d 100644 --- a/module-system/module-implementations/examples/sov-value-setter/src/lib.rs +++ b/module-system/module-implementations/examples/sov-value-setter/src/lib.rs @@ -7,11 +7,9 @@ mod genesis; mod tests; #[cfg(feature = "native")] -mod query; +pub mod query; pub use call::CallMessage; -#[cfg(feature = "native")] -pub use query::{Response, ValueSetterRpcImpl, ValueSetterRpcServer}; use sov_modules_api::{Error, ModuleInfo}; use sov_state::WorkingSet; diff --git a/module-system/module-implementations/examples/sov-value-setter/src/query.rs b/module-system/module-implementations/examples/sov-value-setter/src/query.rs index 092b02f67..07ad7a3ba 100644 --- a/module-system/module-implementations/examples/sov-value-setter/src/query.rs +++ b/module-system/module-implementations/examples/sov-value-setter/src/query.rs @@ -1,4 +1,4 @@ -#![allow(missing_docs)] +//! Defines rpc queries exposed by the module use jsonrpsee::core::RpcResult; use sov_modules_api::macros::rpc_gen; use sov_state::WorkingSet; diff --git a/module-system/module-implementations/examples/sov-value-setter/src/tests.rs b/module-system/module-implementations/examples/sov-value-setter/src/tests.rs index 6bb5ad002..c45a4e803 100644 --- a/module-system/module-implementations/examples/sov-value-setter/src/tests.rs +++ b/module-system/module-implementations/examples/sov-value-setter/src/tests.rs @@ -12,11 +12,10 @@ fn test_value_setter() { let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); let admin = Address::from([1; 32]); // Test Native-Context + #[cfg(feature = "native")] { - let config = ValueSetterConfig { - admin: admin.clone(), - }; - let context = DefaultContext::new(admin.clone()); + let config = ValueSetterConfig { admin }; + let context = DefaultContext::new(admin); test_value_setter_helper(context, &config, &mut working_set); } @@ -24,9 +23,7 @@ fn test_value_setter() { // Test Zk-Context { - let config = ValueSetterConfig { - admin: admin.clone(), - }; + let config = ValueSetterConfig { admin }; let zk_context = ZkDefaultContext::new(admin); let mut zk_working_set = WorkingSet::with_witness(ZkStorage::new([0u8; 32]), witness); test_value_setter_helper(zk_context, &config, &mut zk_working_set); @@ -74,11 +71,12 @@ fn test_err_on_sender_is_not_admin() { let sender_not_admin = Address::from([2; 32]); // Test Native-Context + #[cfg(feature = "native")] { let config = ValueSetterConfig { - admin: sender_not_admin.clone(), + admin: sender_not_admin, }; - let context = DefaultContext::new(sender.clone()); + let context = DefaultContext::new(sender); test_err_on_sender_is_not_admin_helper(context, &config, &mut native_working_set); } let (_, witness) = native_working_set.checkpoint().freeze(); diff --git a/module-system/module-implementations/examples/sov-election/Cargo.toml b/module-system/module-implementations/examples/sov-vec-setter/Cargo.toml similarity index 76% rename from module-system/module-implementations/examples/sov-election/Cargo.toml rename to module-system/module-implementations/examples/sov-vec-setter/Cargo.toml index 11692c5a5..0f3d77719 100644 --- a/module-system/module-implementations/examples/sov-election/Cargo.toml +++ b/module-system/module-implementations/examples/sov-vec-setter/Cargo.toml @@ -1,6 +1,6 @@ [package] -name = "sov-election" -description = "A Sovereign SDK module for demonstrating election" +name = "sov-vec-setter" +description = "A Sovereign SDK example module for setting/reading vectors from state" authors = { workspace = true } edition = { workspace = true } homepage = { workspace = true } @@ -12,25 +12,23 @@ readme = "README.md" resolver = "2" publish = false +[dev-dependencies] +sov-modules-api = { path = "../../../sov-modules-api" } +tempfile = { workspace = true } + [dependencies] anyhow = { workspace = true } -borsh = { workspace = true, features = ["rc"] } -clap = { workspace = true, optional = true, features = ["derive"] } -hex = { workspace = true } -jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], optional = true } -schemars = { workspace = true, optional = true } -serde = { workspace = true, optional = true } -serde_json = { workspace = true, optional = true } - sov-modules-api = { path = "../../../sov-modules-api", default-features = false, features = ["macros"] } sov-state = { path = "../../../sov-state", default-features = false } sov-rollup-interface = { path = "../../../../rollup-interface" } - -[dev-dependencies] -sov-modules-api = { path = "../../../sov-modules-api" } -tempfile = { workspace = true } +schemars = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } +thiserror = { workspace = true } +borsh = { workspace = true, features = ["rc"] } +jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], optional = true } +clap = { workspace = true, optional = true } [features] default = ["native"] -serde = ["dep:serde", "dep:serde_json"] -native = ["serde", "sov-modules-api/native", "dep:jsonrpsee", "dep:schemars", "dep:clap"] +native = ["serde", "serde_json", "jsonrpsee", "schemars", "clap", "sov-modules-api/native"] diff --git a/module-system/module-implementations/examples/sov-vec-setter/README.md b/module-system/module-implementations/examples/sov-vec-setter/README.md new file mode 100644 index 000000000..ad83f8ed3 --- /dev/null +++ b/module-system/module-implementations/examples/sov-vec-setter/README.md @@ -0,0 +1,16 @@ +# An example of a `SOV-MODULE` + +It demonstrates the following concepts: + +### 1. Module structure: + +- `lib.rs` contains `VecSetter` module definition and `sov_modules_api::Module` trait implementation for `VecSetter`. +- `genesis.rs` contains the module initialization logic. +- `call.rs` contains methods that change module state in response to `CallMessage`. +- `query.rs` contains functions for querying the module state. + +### 2. Functionality: + +The `admin` (specified in the `VecSetter` genesis) can update a single `u32` value by creating `CallMessage::SetValue(new_value)` message. Anyone can query the module state by calling the `vecSetter_queryValue` endpoint. + +For implementation details, please check comments in the `genesis.rs, call.rs & query.rs`. \ No newline at end of file diff --git a/module-system/module-implementations/examples/sov-vec-setter/src/call.rs b/module-system/module-implementations/examples/sov-vec-setter/src/call.rs new file mode 100644 index 000000000..2e0fc900d --- /dev/null +++ b/module-system/module-implementations/examples/sov-vec-setter/src/call.rs @@ -0,0 +1,151 @@ +use std::fmt::Debug; + +use anyhow::Result; +#[cfg(feature = "native")] +use sov_modules_api::macros::CliWalletArg; +use sov_modules_api::CallResponse; +use sov_state::WorkingSet; +use thiserror::Error; + +use super::VecSetter; + +/// This enumeration represents the available call messages for interacting with the `sov-vec-setter` module. +#[cfg_attr( + feature = "native", + derive(serde::Serialize), + derive(serde::Deserialize), + derive(CliWalletArg), + derive(schemars::JsonSchema) +)] +#[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] +pub enum CallMessage { + /// value to push + PushValue(u32), + /// value to set + SetValue { + /// index to set + index: usize, + /// value to set + value: u32, + }, + /// values to set + SetAllValues(Vec), + /// Pop + PopValue, +} + +/// Example of a custom error. +#[derive(Debug, Error)] +enum SetValueError { + #[error("Only admin can change the value")] + WrongSender, +} + +impl VecSetter { + /// Pushes `value` field to the `vector`, only admin is authorized to call this method. + pub(crate) fn push_value( + &self, + new_value: u32, + context: &C, + working_set: &mut WorkingSet, + ) -> Result { + // If admin is not then early return: + let admin = self.admin.get_or_err(working_set)?; + + if &admin != context.sender() { + // Here we use a custom error type. + Err(SetValueError::WrongSender)?; + } + + // This is how we push a new value to vector: + self.vector.push(&new_value, working_set); + + let new_length = self.vector.len(working_set); + + working_set.add_event( + "push", + &format!("value_push: {new_value:?}, new length: {new_length:?}"), + ); + + Ok(CallResponse::default()) + } + + /// Sets `value` field to the given index of `vector`, only admin is authorized to call this method. + pub(crate) fn set_value( + &self, + index: usize, + new_value: u32, + context: &C, + working_set: &mut WorkingSet, + ) -> Result { + // If admin is not then early return: + let admin = self.admin.get_or_err(working_set)?; + + if &admin != context.sender() { + // Here we use a custom error type. + Err(SetValueError::WrongSender)?; + } + + // This is how we set a new value: + self.vector.set(index, &new_value, working_set)?; + + working_set.add_event( + "set", + &format!("value_set: {new_value:?} for index: {index:?}"), + ); + + Ok(CallResponse::default()) + } + + /// Sets `values` completely to the `vector`, only admin is authorized to call this method. + pub(crate) fn set_all_values( + &self, + values: Vec, + context: &C, + working_set: &mut WorkingSet, + ) -> Result { + // If admin is not then early return: + let admin = self.admin.get_or_err(working_set)?; + + if &admin != context.sender() { + // Here we use a custom error type. + Err(SetValueError::WrongSender)?; + } + + // This is how we set all the vector: + self.vector.set_all(values, working_set); + + let new_length = self.vector.len(working_set); + + working_set.add_event("set_all", &format!("new length: {new_length:?}")); + + Ok(CallResponse::default()) + } + + /// Pops last value from the `vector`, only admin is authorized to call this method. + pub(crate) fn pop_value( + &self, + context: &C, + working_set: &mut WorkingSet, + ) -> Result { + // If admin is not then early return: + let admin = self.admin.get_or_err(working_set)?; + + if &admin != context.sender() { + // Here we use a custom error type. + Err(SetValueError::WrongSender)?; + } + + // This is how we pop last value value: + let pop_value = self.vector.pop(working_set); + + let new_length = self.vector.len(working_set); + + working_set.add_event( + "pop", + &format!("value_pop: {pop_value:?}, new length: {new_length:?}"), + ); + + Ok(CallResponse::default()) + } +} diff --git a/module-system/module-implementations/examples/sov-vec-setter/src/genesis.rs b/module-system/module-implementations/examples/sov-vec-setter/src/genesis.rs new file mode 100644 index 000000000..99f4cf1bf --- /dev/null +++ b/module-system/module-implementations/examples/sov-vec-setter/src/genesis.rs @@ -0,0 +1,16 @@ +use anyhow::Result; +use sov_state::WorkingSet; + +use super::VecSetter; + +impl VecSetter { + /// Initializes module with the `admin` role. + pub(crate) fn init_module( + &self, + admin_config: &::Config, + working_set: &mut WorkingSet, + ) -> Result<()> { + self.admin.set(&admin_config.admin, working_set); + Ok(()) + } +} diff --git a/module-system/module-implementations/examples/sov-vec-setter/src/lib.rs b/module-system/module-implementations/examples/sov-vec-setter/src/lib.rs new file mode 100644 index 000000000..0fbf5b900 --- /dev/null +++ b/module-system/module-implementations/examples/sov-vec-setter/src/lib.rs @@ -0,0 +1,76 @@ +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] +mod call; +mod genesis; + +#[cfg(feature = "native")] +mod query; + +pub use call::CallMessage; +#[cfg(feature = "native")] +pub use query::{VecSetterRpcImpl, VecSetterRpcServer}; +use sov_modules_api::{Error, ModuleInfo}; +use sov_state::WorkingSet; + +/// Initial configuration for sov-vec-setter module. +pub struct VecSetterConfig { + /// Admin of the module. + pub admin: C::Address, +} + +/// A new module: +/// - Must derive `ModuleInfo` +/// - Must contain `[address]` field +/// - Can contain any number of ` #[state]` or `[module]` fields +#[cfg_attr(feature = "native", derive(sov_modules_api::ModuleCallJsonSchema))] +#[derive(ModuleInfo)] +pub struct VecSetter { + /// Address of the module. + #[address] + pub address: C::Address, + + /// Some vector kept in the state. + #[state] + pub vector: sov_state::StateVec, + + /// Holds the address of the admin user who is allowed to update the vector. + #[state] + pub admin: sov_state::StateValue, +} + +impl sov_modules_api::Module for VecSetter { + type Context = C; + + type Config = VecSetterConfig; + + type CallMessage = call::CallMessage; + + fn genesis( + &self, + config: &Self::Config, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + // The initialization logic + Ok(self.init_module(config, working_set)?) + } + + fn call( + &self, + msg: Self::CallMessage, + context: &Self::Context, + working_set: &mut WorkingSet, + ) -> Result { + match msg { + call::CallMessage::PushValue(new_value) => { + Ok(self.push_value(new_value, context, working_set)?) + } + call::CallMessage::SetValue { index, value } => { + Ok(self.set_value(index, value, context, working_set)?) + } + call::CallMessage::SetAllValues(values) => { + Ok(self.set_all_values(values, context, working_set)?) + } + call::CallMessage::PopValue => Ok(self.pop_value(context, working_set)?), + } + } +} diff --git a/module-system/module-implementations/examples/sov-vec-setter/src/query.rs b/module-system/module-implementations/examples/sov-vec-setter/src/query.rs new file mode 100644 index 000000000..17e4d710a --- /dev/null +++ b/module-system/module-implementations/examples/sov-vec-setter/src/query.rs @@ -0,0 +1,41 @@ +use jsonrpsee::core::RpcResult; +use sov_modules_api::macros::rpc_gen; +use sov_state::WorkingSet; + +use super::VecSetter; + +/// Response returned from the vecSetter_queryVec endpoint. +#[derive(serde::Serialize, serde::Deserialize, Debug, Eq, PartialEq, Clone)] +pub struct QueryResponse { + /// Value saved in the module's state vector. + pub value: Option, +} + +/// Response returned from the vecSetter_lenVec endpoint +#[derive(serde::Serialize, serde::Deserialize, Debug, Eq, PartialEq, Clone)] +pub struct LenResponse { + /// Length of the vector + pub value: usize, +} + +#[rpc_gen(client, server, namespace = "vecSetter")] +impl VecSetter { + /// Queries the state vector of the module. + #[rpc_method(name = "queryVec")] + pub fn query_vec( + &self, + index: usize, + working_set: &mut WorkingSet, + ) -> RpcResult { + Ok(QueryResponse { + value: self.vector.get(index, working_set), + }) + } + /// Queries the length of the vector + #[rpc_method(name = "lenVec")] + pub fn len_vec(&self, working_set: &mut WorkingSet) -> RpcResult { + Ok(LenResponse { + value: self.vector.len(working_set), + }) + } +} diff --git a/module-system/module-implementations/examples/sov-vec-setter/tests/tests.rs b/module-system/module-implementations/examples/sov-vec-setter/tests/tests.rs new file mode 100644 index 000000000..d8109fa7b --- /dev/null +++ b/module-system/module-implementations/examples/sov-vec-setter/tests/tests.rs @@ -0,0 +1,55 @@ +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::{Address, Context, Module}; +use sov_state::{ProverStorage, WorkingSet}; +use sov_vec_setter::{CallMessage, VecSetter, VecSetterConfig}; + +// rustfmt doesn't like long lines, but it's easier to read in this case. +#[rustfmt::skip] +fn test_cases() -> Vec<(Address, CallMessage, Option>)> { + let admin = Address::from([1; 32]); + let not_admin = Address::from([2; 32]); + + // (sender, call, expected vec contents or None if call should fail) + vec![ + (admin, CallMessage::PushValue(1), Some(vec![1])), + (admin, CallMessage::PushValue(2), Some(vec![1, 2])), + (admin, CallMessage::PopValue, Some(vec![1])), + (not_admin, CallMessage::PopValue, None), + (admin, CallMessage::PopValue, Some(vec![])), + (not_admin, CallMessage::SetValue { index: 0, value: 10 }, None), + (admin, CallMessage::SetValue { index: 0, value: 10 }, None), + (admin, CallMessage::PushValue(8), Some(vec![8])), + (admin, CallMessage::SetValue { index: 0, value: 10 }, Some(vec![10])), + (admin, CallMessage::PushValue(0), Some(vec![10, 0])), + (admin, CallMessage::SetAllValues(vec![11, 12]), Some(vec![11, 12])), + (not_admin, CallMessage::SetAllValues(vec![]), None), + ] +} + +#[test] +#[cfg(feature = "native")] +fn test_vec_setter_calls() { + let tmpdir = tempfile::tempdir().unwrap(); + + let storage = ProverStorage::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(storage); + + let admin = Address::from([1; 32]); + let config = VecSetterConfig { admin }; + + let vec_setter = VecSetter::default(); + vec_setter.genesis(&config, &mut working_set).unwrap(); + + for (sender, call, expected_contents) in test_cases().iter().cloned() { + let context = DefaultContext::new(sender); + + let call_result = vec_setter.call(call, &context, &mut working_set); + + if call_result.is_ok() { + let vec_contents = vec_setter.vector.iter(&mut working_set).collect::>(); + assert_eq!(Some(vec_contents), expected_contents); + } else { + assert_eq!(expected_contents, None); + } + } +} diff --git a/module-system/module-implementations/integration-tests/Cargo.toml b/module-system/module-implementations/integration-tests/Cargo.toml index 7c92d1b9c..6cbe0677d 100644 --- a/module-system/module-implementations/integration-tests/Cargo.toml +++ b/module-system/module-implementations/integration-tests/Cargo.toml @@ -8,15 +8,21 @@ repository = { workspace = true } rust-version = { workspace = true } version = { workspace = true } readme = "README.md" -resolver = "2" publish = false +resolver = "2" [dev-dependencies] anyhow = { workspace = true } borsh = { workspace = true, features = ["rc"] } tempfile = { workspace = true } -sov-modules-api = { path = "../../sov-modules-api" } -sov-state = { path = "../../sov-state" } +sov-modules-api = { path = "../../sov-modules-api", features = ["native"] } +sov-state = { path = "../../sov-state", features = ["native"] } sov-rollup-interface = { path = "../../../rollup-interface" } sov-schema-db = { path = "../../../full-node/db/sov-schema-db" } +sov-data-generators = { path = "../../utils/sov-data-generators" } +sov-modules-stf-template = { path = "../../sov-modules-stf-template", features = ["native"] } +sov-modules-macros = { path = "../../sov-modules-macros", features = ["native"] } + +sov-chain-state = { path = "../sov-chain-state", features = ["native"] } +sov-value-setter = { path = "../examples/sov-value-setter", features = ["native"] } diff --git a/module-system/module-implementations/integration-tests/src/chain_state/helpers.rs b/module-system/module-implementations/integration-tests/src/chain_state/helpers.rs new file mode 100644 index 000000000..4b345b520 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/chain_state/helpers.rs @@ -0,0 +1,116 @@ +use sov_chain_state::{ChainState, ChainStateConfig}; +use sov_modules_api::capabilities::{BlobRefOrOwned, BlobSelector}; +use sov_modules_api::hooks::{ApplyBlobHooks, SlotHooks, TxHooks}; +use sov_modules_api::transaction::Transaction; +use sov_modules_api::{Context, PublicKey, Spec}; +use sov_modules_macros::{DefaultRuntime, DispatchCall, Genesis, MessageCodec}; +use sov_modules_stf_template::{AppTemplate, Runtime, SequencerOutcome}; +use sov_rollup_interface::da::{BlobReaderTrait, DaSpec}; +use sov_rollup_interface::mocks::MockZkvm; +use sov_value_setter::{ValueSetter, ValueSetterConfig}; + +#[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] +#[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] +pub(crate) struct TestRuntime { + pub value_setter: ValueSetter, + pub chain_state: ChainState, +} + +impl TxHooks for TestRuntime { + type Context = C; + + fn pre_dispatch_tx_hook( + &self, + tx: &Transaction, + _working_set: &mut sov_state::WorkingSet<::Storage>, + ) -> anyhow::Result<::Address> { + Ok(tx.pub_key().to_address()) + } + + fn post_dispatch_tx_hook( + &self, + _tx: &Transaction, + _working_set: &mut sov_state::WorkingSet<::Storage>, + ) -> anyhow::Result<()> { + Ok(()) + } +} + +impl ApplyBlobHooks for TestRuntime { + type Context = C; + type BlobResult = + SequencerOutcome<<::BlobTransaction as BlobReaderTrait>::Address>; + + fn begin_blob_hook( + &self, + _blob: &mut Da::BlobTransaction, + _working_set: &mut sov_state::WorkingSet<::Storage>, + ) -> anyhow::Result<()> { + Ok(()) + } + + fn end_blob_hook( + &self, + _result: Self::BlobResult, + _working_set: &mut sov_state::WorkingSet<::Storage>, + ) -> anyhow::Result<()> { + Ok(()) + } +} + +impl SlotHooks for TestRuntime { + type Context = C; + + fn begin_slot_hook( + &self, + slot_data: &impl sov_rollup_interface::services::da::SlotData, + working_set: &mut sov_state::WorkingSet<::Storage>, + ) { + self.chain_state.begin_slot_hook(slot_data, working_set) + } + + fn end_slot_hook( + &self, + _root_hash: [u8; 32], + _working_set: &mut sov_state::WorkingSet<::Storage>, + ) { + } +} + +impl BlobSelector for TestRuntime +where + C: Context, + Da: DaSpec, +{ + type Context = C; + + fn get_blobs_for_this_slot<'a, I>( + &self, + current_blobs: I, + _working_set: &mut sov_state::WorkingSet<::Storage>, + ) -> anyhow::Result>> + where + I: IntoIterator, + { + Ok(current_blobs.into_iter().map(Into::into).collect()) + } +} + +impl Runtime for TestRuntime {} + +pub(crate) fn create_demo_genesis_config( + admin: ::Address, +) -> GenesisConfig { + let value_setter_config = ValueSetterConfig { admin }; + let chain_state_config = ChainStateConfig { + initial_slot_height: 0, + }; + GenesisConfig::new(value_setter_config, chain_state_config) +} + +/// Clones the [`AppTemplate`]'s [`Storage`] and extract the underlying [`WorkingSet`] +pub(crate) fn get_working_set( + app_template: &AppTemplate>, +) -> sov_state::WorkingSet<::Storage> { + sov_state::WorkingSet::new(app_template.current_storage.clone()) +} diff --git a/module-system/module-implementations/integration-tests/src/chain_state/mod.rs b/module-system/module-implementations/integration-tests/src/chain_state/mod.rs new file mode 100644 index 000000000..8018c49a3 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/chain_state/mod.rs @@ -0,0 +1,3 @@ +pub mod helpers; + +pub mod tests; diff --git a/module-system/module-implementations/integration-tests/src/chain_state/tests.rs b/module-system/module-implementations/integration-tests/src/chain_state/tests.rs new file mode 100644 index 000000000..6ef675bfd --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/chain_state/tests.rs @@ -0,0 +1,168 @@ +use sov_chain_state::{StateTransitionId, TransitionInProgress}; +use sov_data_generators::value_setter_data::ValueSetterMessages; +use sov_data_generators::{has_tx_events, new_test_blob_from_batch, MessageGenerator}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_stf_template::{AppTemplate, SequencerOutcome}; +use sov_rollup_interface::mocks::{ + MockBlock, MockBlockHeader, MockDaSpec, MockHash, MockValidityCond, MockZkvm, +}; +use sov_rollup_interface::stf::StateTransitionFunction; +use sov_state::{ProverStorage, Storage}; + +use crate::chain_state::helpers::{create_demo_genesis_config, get_working_set, TestRuntime}; + +type C = DefaultContext; + +/// This test generates a new mock rollup having a simple value setter module +/// with an associated chain state, and checks that the height, the genesis hash +/// and the state transitions are correctly stored and updated. +#[test] +fn test_simple_value_setter_with_chain_state() { + // Build an app template with the module configurations + let runtime = TestRuntime::default(); + + let tmpdir = tempfile::tempdir().unwrap(); + + let storage: ProverStorage = + ProverStorage::with_path(tmpdir.path()).unwrap(); + + let mut app_template = + AppTemplate::>::new(storage, runtime); + + let value_setter_messages = ValueSetterMessages::default(); + let value_setter = value_setter_messages.create_raw_txs::>(); + + let admin_pub_key = value_setter_messages.messages[0].admin.default_address(); + + // Genesis + let init_root_hash = app_template.init_chain(create_demo_genesis_config(admin_pub_key)); + + const MOCK_SEQUENCER_DA_ADDRESS: [u8; 32] = [1_u8; 32]; + + let blob = new_test_blob_from_batch( + sov_modules_stf_template::Batch { txs: value_setter }, + &MOCK_SEQUENCER_DA_ADDRESS, + [2; 32], + ); + + let slot_data: MockBlock = MockBlock { + curr_hash: [10; 32], + header: MockBlockHeader { + prev_hash: MockHash([0; 32]), + }, + height: 0, + validity_cond: MockValidityCond::default(), + blobs: Default::default(), + }; + + // Computes the initial working set + let mut working_set = get_working_set(&app_template); + + // Check the slot height before apply slot + let new_height_storage = app_template + .runtime + .chain_state + .get_slot_height(&mut working_set); + + assert_eq!(new_height_storage, 0, "The initial height was not computed"); + + let result = app_template.apply_slot(Default::default(), &slot_data, &mut [blob.clone()]); + + assert_eq!(1, result.batch_receipts.len()); + let apply_blob_outcome = result.batch_receipts[0].clone(); + assert_eq!( + SequencerOutcome::Rewarded(0), + apply_blob_outcome.inner, + "Sequencer execution should have succeeded but failed " + ); + + // Computes the new working set after slot application + let mut working_set = get_working_set(&app_template); + let chain_state_ref = &app_template.runtime.chain_state; + + // Get the new state root hash + let new_root_hash = app_template + .current_storage + .get_state_root(&Default::default()); + + // Check that the root hash has been stored correctly + let stored_root: [u8; 32] = chain_state_ref.get_genesis_hash(&mut working_set).unwrap(); + + assert_eq!(stored_root, init_root_hash.0, "Root hashes don't match"); + + // Check the slot height + let new_height_storage = chain_state_ref.get_slot_height(&mut working_set); + + assert_eq!(new_height_storage, 1, "The new height did not update"); + + // Check the tx in progress + let new_tx_in_progress: TransitionInProgress = chain_state_ref + .get_in_progress_transition(&mut working_set) + .unwrap(); + + assert_eq!( + new_tx_in_progress, + TransitionInProgress::::new([10; 32], MockValidityCond::default()), + "The new transition has not been correctly stored" + ); + + assert!(has_tx_events(&apply_blob_outcome),); + + // We apply a new transaction with the same values + let new_slot_data: MockBlock = MockBlock { + curr_hash: [20; 32], + header: MockBlockHeader { + prev_hash: MockHash([10; 32]), + }, + height: 1, + validity_cond: MockValidityCond::default(), + blobs: Default::default(), + }; + + let result = app_template.apply_slot(Default::default(), &new_slot_data, &mut [blob]); + + assert_eq!(1, result.batch_receipts.len()); + let apply_blob_outcome = result.batch_receipts[0].clone(); + assert_eq!( + SequencerOutcome::Rewarded(0), + apply_blob_outcome.inner, + "Sequencer execution should have succeeded but failed " + ); + + // Computes the new working set after slot application + let mut working_set = get_working_set(&app_template); + let chain_state_ref = &app_template.runtime.chain_state; + + // Check that the root hash has been stored correctly + let stored_root: [u8; 32] = chain_state_ref.get_genesis_hash(&mut working_set).unwrap(); + + assert_eq!(stored_root, init_root_hash.0, "Root hashes don't match"); + + // Check the slot height + let new_height_storage = chain_state_ref.get_slot_height(&mut working_set); + assert_eq!(new_height_storage, 2, "The new height did not update"); + + // Check the tx in progress + let new_tx_in_progress: TransitionInProgress = chain_state_ref + .get_in_progress_transition(&mut working_set) + .unwrap(); + + assert_eq!( + new_tx_in_progress, + TransitionInProgress::::new([20; 32], MockValidityCond::default()), + "The new transition has not been correctly stored" + ); + + let last_tx_stored: StateTransitionId = chain_state_ref + .get_historical_transitions(1, &mut working_set) + .unwrap(); + + assert_eq!( + last_tx_stored, + StateTransitionId::new( + [10; 32], + new_root_hash.unwrap(), + MockValidityCond::default() + ) + ); +} diff --git a/module-system/module-implementations/integration-tests/src/lib.rs b/module-system/module-implementations/integration-tests/src/lib.rs new file mode 100644 index 000000000..43b899ec3 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/lib.rs @@ -0,0 +1,5 @@ +#[cfg(test)] +mod chain_state; + +#[cfg(test)] +mod nested_modules; diff --git a/module-system/module-implementations/integration-tests/src/nested_modules/helpers.rs b/module-system/module-implementations/integration-tests/src/nested_modules/helpers.rs new file mode 100644 index 000000000..70122ab06 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/nested_modules/helpers.rs @@ -0,0 +1,82 @@ +use sov_modules_api::{Context, ModuleInfo}; +use sov_state::{StateMap, StateValue, WorkingSet}; + +pub mod module_a { + use super::*; + + #[derive(ModuleInfo)] + pub(crate) struct ModuleA { + #[address] + pub address_module_a: C::Address, + + #[state] + pub(crate) state_1_a: StateMap, + + #[state] + pub(crate) state_2_a: StateValue, + } + + impl ModuleA { + pub fn update(&mut self, key: &str, value: &str, working_set: &mut WorkingSet) { + working_set.add_event("module A", "update"); + self.state_1_a + .set(&key.to_owned(), &value.to_owned(), working_set); + self.state_2_a.set(&value.to_owned(), working_set) + } + } +} + +pub mod module_b { + use super::*; + + #[derive(ModuleInfo)] + pub(crate) struct ModuleB { + #[address] + pub address_module_b: C::Address, + + #[state] + state_1_b: StateMap, + + #[module] + pub(crate) mod_1_a: module_a::ModuleA, + } + + impl ModuleB { + pub fn update(&mut self, key: &str, value: &str, working_set: &mut WorkingSet) { + working_set.add_event("module B", "update"); + self.state_1_b + .set(&key.to_owned(), &value.to_owned(), working_set); + self.mod_1_a.update("key_from_b", value, working_set); + } + } +} + +pub(crate) mod module_c { + use super::*; + + #[derive(ModuleInfo)] + pub(crate) struct ModuleC { + #[address] + pub address: C::Address, + + #[module] + pub(crate) mod_1_a: module_a::ModuleA, + + #[module] + mod_1_b: module_b::ModuleB, + } + + impl ModuleC { + pub fn execute( + &mut self, + key: &str, + value: &str, + working_set: &mut WorkingSet, + ) { + working_set.add_event("module C", "execute"); + self.mod_1_a.update(key, value, working_set); + self.mod_1_b.update(key, value, working_set); + self.mod_1_a.update(key, value, working_set); + } + } +} diff --git a/module-system/module-implementations/integration-tests/src/nested_modules/mod.rs b/module-system/module-implementations/integration-tests/src/nested_modules/mod.rs new file mode 100644 index 000000000..e68e16bdf --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/nested_modules/mod.rs @@ -0,0 +1,4 @@ +#[allow(dead_code)] +pub mod helpers; + +pub mod tests; diff --git a/module-system/module-implementations/integration-tests/src/nested_modules/tests.rs b/module-system/module-implementations/integration-tests/src/nested_modules/tests.rs new file mode 100644 index 000000000..5c5efc8f7 --- /dev/null +++ b/module-system/module-implementations/integration-tests/src/nested_modules/tests.rs @@ -0,0 +1,94 @@ +use sov_modules_api::default_context::{DefaultContext, ZkDefaultContext}; +use sov_modules_api::{Context, Prefix}; +use sov_rollup_interface::stf::Event; +use sov_state::{ProverStorage, StateMap, Storage, WorkingSet, ZkStorage}; + +use super::helpers::module_c; + +#[test] +fn nested_module_call_test() { + let tmpdir = tempfile::tempdir().unwrap(); + let native_storage = ProverStorage::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(native_storage.clone()); + + // Test the `native` execution. + { + execute_module_logic::(&mut working_set); + test_state_update::(&mut working_set); + } + assert_eq!( + working_set.events(), + &vec![ + Event::new("module C", "execute"), + Event::new("module A", "update"), + Event::new("module B", "update"), + Event::new("module A", "update"), + Event::new("module A", "update"), + ] + ); + + let (log, witness) = working_set.checkpoint().freeze(); + native_storage + .validate_and_commit(log, &witness) + .expect("State update is valid"); + + // Test the `zk` execution. + { + let zk_storage = ZkStorage::new([0u8; 32]); + let working_set = &mut WorkingSet::with_witness(zk_storage, witness); + execute_module_logic::(working_set); + test_state_update::(working_set); + } +} + +fn execute_module_logic(working_set: &mut WorkingSet) { + let module = &mut module_c::ModuleC::::default(); + module.execute("some_key", "some_value", working_set); +} + +fn test_state_update(working_set: &mut WorkingSet) { + let module = as Default>::default(); + + let expected_value = "some_value".to_owned(); + + { + let prefix = Prefix::new_storage( + "integration_tests::nested_modules::helpers::module_a", + "ModuleA", + "state_1_a", + ); + let state_map = StateMap::::new(prefix.into()); + let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); + + assert_eq!(expected_value, value); + } + + { + let prefix = Prefix::new_storage( + "integration_tests::nested_modules::helpers::module_b", + "ModuleB", + "state_1_b", + ); + let state_map = StateMap::::new(prefix.into()); + let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); + + assert_eq!(expected_value, value); + } + + { + let prefix = Prefix::new_storage( + "integration_tests::nested_modules::helpers::module_a", + "ModuleA", + "state_1_a", + ); + let state_map = StateMap::::new(prefix.into()); + let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); + + assert_eq!(expected_value, value); + } + + { + let value = module.mod_1_a.state_2_a.get(working_set).unwrap(); + assert_eq!(expected_value, value); + } +} diff --git a/module-system/module-implementations/integration-tests/tests/nested_modules_tests.rs b/module-system/module-implementations/integration-tests/tests/nested_modules_tests.rs deleted file mode 100644 index f8774ed03..000000000 --- a/module-system/module-implementations/integration-tests/tests/nested_modules_tests.rs +++ /dev/null @@ -1,160 +0,0 @@ -use sov_modules_api::default_context::{DefaultContext, ZkDefaultContext}; -use sov_modules_api::{Context, ModuleInfo, Prefix}; -use sov_rollup_interface::stf::Event; -use sov_state::{ProverStorage, StateMap, StateValue, Storage, WorkingSet, ZkStorage}; - -pub mod module_a { - use super::*; - - #[derive(ModuleInfo)] - pub(crate) struct ModuleA { - #[address] - pub address_module_a: C::Address, - - #[state] - pub(crate) state_1_a: StateMap, - - #[state] - pub(crate) state_2_a: StateValue, - } - - impl ModuleA { - pub fn update(&mut self, key: &str, value: &str, working_set: &mut WorkingSet) { - working_set.add_event("module A", "update"); - self.state_1_a - .set(&key.to_owned(), &value.to_owned(), working_set); - self.state_2_a.set(&value.to_owned(), working_set) - } - } -} - -pub mod module_b { - use super::*; - - #[derive(ModuleInfo)] - pub(crate) struct ModuleB { - #[address] - pub address_module_b: C::Address, - - #[state] - state_1_b: StateMap, - - #[module] - pub(crate) mod_1_a: module_a::ModuleA, - } - - impl ModuleB { - pub fn update(&mut self, key: &str, value: &str, working_set: &mut WorkingSet) { - working_set.add_event("module B", "update"); - self.state_1_b - .set(&key.to_owned(), &value.to_owned(), working_set); - self.mod_1_a.update("key_from_b", value, working_set); - } - } -} - -mod module_c { - use super::*; - - #[derive(ModuleInfo)] - pub(crate) struct ModuleC { - #[address] - pub address: C::Address, - - #[module] - pub(crate) mod_1_a: module_a::ModuleA, - - #[module] - mod_1_b: module_b::ModuleB, - } - - impl ModuleC { - pub fn execute( - &mut self, - key: &str, - value: &str, - working_set: &mut WorkingSet, - ) { - working_set.add_event("module C", "execute"); - self.mod_1_a.update(key, value, working_set); - self.mod_1_b.update(key, value, working_set); - self.mod_1_a.update(key, value, working_set); - } - } -} - -#[test] -fn nested_module_call_test() { - let tmpdir = tempfile::tempdir().unwrap(); - let native_storage = ProverStorage::with_path(tmpdir.path()).unwrap(); - let mut working_set = WorkingSet::new(native_storage.clone()); - - // Test the `native` execution. - { - execute_module_logic::(&mut working_set); - test_state_update::(&mut working_set); - } - assert_eq!( - working_set.events(), - &vec![ - Event::new("module C", "execute"), - Event::new("module A", "update"), - Event::new("module B", "update"), - Event::new("module A", "update"), - Event::new("module A", "update"), - ] - ); - - let (log, witness) = working_set.checkpoint().freeze(); - native_storage - .validate_and_commit(log, &witness) - .expect("State update is valid"); - - // Test the `zk` execution. - { - let zk_storage = ZkStorage::new([0u8; 32]); - let working_set = &mut WorkingSet::with_witness(zk_storage, witness); - execute_module_logic::(working_set); - test_state_update::(working_set); - } -} - -fn execute_module_logic(working_set: &mut WorkingSet) { - let module = &mut module_c::ModuleC::::default(); - module.execute("some_key", "some_value", working_set); -} - -fn test_state_update(working_set: &mut WorkingSet) { - let module = as Default>::default(); - - let expected_value = "some_value".to_owned(); - - { - let prefix = Prefix::new_storage("nested_modules_tests::module_a", "ModuleA", "state_1_a"); - let state_map = StateMap::::new(prefix.into()); - let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); - - assert_eq!(expected_value, value); - } - - { - let prefix = Prefix::new_storage("nested_modules_tests::module_b", "ModuleB", "state_1_b"); - let state_map = StateMap::::new(prefix.into()); - let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); - - assert_eq!(expected_value, value); - } - - { - let prefix = Prefix::new_storage("nested_modules_tests::module_a", "ModuleA", "state_1_a"); - let state_map = StateMap::::new(prefix.into()); - let value = state_map.get(&"some_key".to_owned(), working_set).unwrap(); - - assert_eq!(expected_value, value); - } - - { - let value = module.mod_1_a.state_2_a.get(working_set).unwrap(); - assert_eq!(expected_value, value); - } -} diff --git a/module-system/module-implementations/module-template/Cargo.toml b/module-system/module-implementations/module-template/Cargo.toml index ce00b2733..c6ca48549 100644 --- a/module-system/module-implementations/module-template/Cargo.toml +++ b/module-system/module-implementations/module-template/Cargo.toml @@ -13,21 +13,22 @@ resolver = "2" [dependencies] anyhow = { workspace = true } -sov-bank = { path = "../sov-bank", default-features = false } -sov-modules-api = { path = "../../sov-modules-api", default-features = false, features = ["macros"] } -sov-state = { path = "../../sov-state", default-features = false } -sov-rollup-interface = { path = "../../../rollup-interface" } +borsh = { workspace = true, features = ["rc"] } +thiserror = { workspace = true } schemars = { workspace = true, optional = true } serde = { workspace = true, optional = true } serde_json = { workspace = true, optional = true } -thiserror = { workspace = true } -borsh = { workspace = true, features = ["rc"] } -[dev-dependencies] +sov-bank = { path = "../sov-bank" } sov-modules-api = { path = "../../sov-modules-api" } +sov-state = { path = "../../sov-state" } +sov-rollup-interface = { path = "../../../rollup-interface" } + + +[dev-dependencies] tempfile = { workspace = true } +module-template = { path = ".", version = "*", features = ["native"] } [features] -default = ["native"] -serde = ["dep:serde", "dep:serde_json"] -native = ["serde", "sov-modules-api/native", "dep:schemars"] +default = [] +native = ["serde", "serde_json", "schemars", "sov-modules-api/native"] diff --git a/module-system/module-implementations/module-template/src/lib.rs b/module-system/module-implementations/module-template/src/lib.rs index ede00b279..df28481d0 100644 --- a/module-system/module-implementations/module-template/src/lib.rs +++ b/module-system/module-implementations/module-template/src/lib.rs @@ -1,11 +1,7 @@ mod call; mod genesis; - -#[cfg(test)] -mod tests; - #[cfg(feature = "native")] -mod query; +pub mod query; pub use call::CallMessage; #[cfg(feature = "native")] diff --git a/module-system/module-implementations/module-template/src/tests.rs b/module-system/module-implementations/module-template/tests/value_setter.rs similarity index 68% rename from module-system/module-implementations/module-template/src/tests.rs rename to module-system/module-implementations/module-template/tests/value_setter.rs index e58241478..415258c6f 100644 --- a/module-system/module-implementations/module-template/src/tests.rs +++ b/module-system/module-implementations/module-template/tests/value_setter.rs @@ -1,20 +1,25 @@ -use sov_modules_api::default_context::{DefaultContext, ZkDefaultContext}; +use module_template::{CallMessage, ExampleModule, ExampleModuleConfig, Response}; +#[cfg(feature = "native")] +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::default_context::ZkDefaultContext; use sov_modules_api::{Address, Context, Module}; use sov_rollup_interface::stf::Event; -use sov_state::{ProverStorage, WorkingSet, ZkStorage}; - -use super::ExampleModule; -use crate::{call, query, ExampleModuleConfig}; +use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet, ZkStorage}; #[test] fn test_value_setter() { let tmpdir = tempfile::tempdir().unwrap(); - let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); + + #[cfg(feature = "native")] + let mut working_set = + WorkingSet::new(ProverStorage::::with_path(tmpdir.path()).unwrap()); + let admin = Address::from([1; 32]); // Test Native-Context + #[cfg(feature = "native")] { let config = ExampleModuleConfig {}; - let context = DefaultContext::new(admin.clone()); + let context = DefaultContext::new(admin); test_value_setter_helper(context, &config, &mut working_set); } @@ -38,7 +43,7 @@ fn test_value_setter_helper( module.genesis(config, working_set).unwrap(); let new_value = 99; - let call_msg = call::CallMessage::SetValue(new_value); + let call_msg = CallMessage::SetValue(new_value); // Test events { @@ -48,10 +53,11 @@ fn test_value_setter_helper( } // Test query + #[cfg(feature = "native")] { let query_response = module.query_value(working_set); assert_eq!( - query::Response { + Response { value: Some(new_value) }, query_response diff --git a/module-system/module-implementations/sov-accounts/Cargo.toml b/module-system/module-implementations/sov-accounts/Cargo.toml index e6bf60a73..d0ab3e840 100644 --- a/module-system/module-implementations/sov-accounts/Cargo.toml +++ b/module-system/module-implementations/sov-accounts/Cargo.toml @@ -13,6 +13,7 @@ resolver = "2" [dependencies] anyhow = { workspace = true } +arbitrary = { workspace = true, optional = true } borsh = { workspace = true, features = ["rc"] } schemars = { workspace = true, optional = true } serde = { workspace = true, optional = true } @@ -21,15 +22,14 @@ thiserror = { workspace = true } clap = { workspace = true, optional = true } jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], optional = true } -sov-modules-api = { path = "../../sov-modules-api", version = "0.1", default-features = false, features = ["macros"] } -sov-state = { path = "../../sov-state", version = "0.1", default-features = false } +sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-state = { path = "../../sov-state", version = "0.1" } [dev-dependencies] -sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } tempfile = { workspace = true } [features] -default = ["native"] -serde = ["dep:serde", "dep:serde_json"] -native = ["serde", "sov-state/native", "sov-modules-api/native", "dep:jsonrpsee", "dep:schemars", "dep:clap"] +default = [] +arbitrary = ["dep:arbitrary", "sov-state/arbitrary"] +native = ["serde", "serde_json", "jsonrpsee", "schemars", "clap", "sov-state/native", "sov-modules-api/native"] diff --git a/module-system/module-implementations/sov-accounts/src/call.rs b/module-system/module-implementations/sov-accounts/src/call.rs index 1b716284c..23f9b7c1d 100644 --- a/module-system/module-implementations/sov-accounts/src/call.rs +++ b/module-system/module-implementations/sov-accounts/src/call.rs @@ -1,5 +1,5 @@ use anyhow::{ensure, Result}; -use sov_modules_api::{CallResponse, Signature}; +use sov_modules_api::{CallResponse, Context, Signature}; use sov_state::WorkingSet; use crate::Accounts; @@ -20,7 +20,7 @@ pub const UPDATE_ACCOUNT_MSG: [u8; 32] = [1; 32]; ) )] #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] -pub enum CallMessage { +pub enum CallMessage { /// Updates a public key for the corresponding Account. /// The sender must be in possession of the new key. UpdatePublicKey( @@ -31,7 +31,7 @@ pub enum CallMessage { ), } -impl Accounts { +impl Accounts { pub(crate) fn update_public_key( &self, new_pub_key: C::PublicKey, @@ -72,3 +72,23 @@ impl Accounts { Ok(()) } } + +#[cfg(all(feature = "arbitrary", feature = "native"))] +impl<'a, C> arbitrary::Arbitrary<'a> for CallMessage +where + C: Context, + C::PrivateKey: arbitrary::Arbitrary<'a>, +{ + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + use sov_modules_api::PrivateKey; + + let secret = C::PrivateKey::arbitrary(u)?; + let public = secret.pub_key(); + + let payload_len = u.arbitrary_len::()?; + let payload = u.bytes(payload_len)?; + let signature = secret.sign(payload); + + Ok(Self::UpdatePublicKey(public, signature)) + } +} diff --git a/module-system/module-implementations/sov-accounts/src/lib.rs b/module-system/module-implementations/sov-accounts/src/lib.rs index 8385a8693..f15b1fbbd 100644 --- a/module-system/module-implementations/sov-accounts/src/lib.rs +++ b/module-system/module-implementations/sov-accounts/src/lib.rs @@ -5,25 +5,32 @@ mod hooks; mod call; mod genesis; #[cfg(feature = "native")] -mod query; +pub mod query; #[cfg(test)] mod tests; pub use call::{CallMessage, UPDATE_ACCOUNT_MSG}; -#[cfg(feature = "native")] -pub use query::{AccountsRpcImpl, AccountsRpcServer, Response}; -use sov_modules_api::{Error, ModuleInfo}; +use sov_modules_api::{Context, Error, ModuleInfo}; use sov_state::WorkingSet; /// Initial configuration for sov-accounts module. -pub struct AccountConfig { +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct AccountConfig { /// Public keys to initialize the rollup. pub pub_keys: Vec, } +impl FromIterator for AccountConfig { + fn from_iter>(iter: T) -> Self { + Self { + pub_keys: iter.into_iter().collect(), + } + } +} + /// An account on the rollup. #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Copy, Clone)] -pub struct Account { +pub struct Account { /// The address of the account. pub addr: C::Address, /// The current nonce value associated with the account. @@ -33,7 +40,7 @@ pub struct Account { /// A module responsible for managing accounts on the rollup. #[cfg_attr(feature = "native", derive(sov_modules_api::ModuleCallJsonSchema))] #[derive(ModuleInfo, Clone)] -pub struct Accounts { +pub struct Accounts { /// The address of the sov-accounts module. #[address] pub address: C::Address, @@ -47,7 +54,7 @@ pub struct Accounts { pub(crate) accounts: sov_state::StateMap>, } -impl sov_modules_api::Module for Accounts { +impl sov_modules_api::Module for Accounts { type Context = C; type Config = AccountConfig; @@ -75,3 +82,56 @@ impl sov_modules_api::Module for Accounts { } } } + +#[cfg(feature = "arbitrary")] +impl<'a, C> arbitrary::Arbitrary<'a> for Account +where + C: Context, + C::Address: arbitrary::Arbitrary<'a>, +{ + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + let addr = u.arbitrary()?; + let nonce = u.arbitrary()?; + Ok(Self { addr, nonce }) + } +} + +#[cfg(feature = "arbitrary")] +impl<'a, C> arbitrary::Arbitrary<'a> for AccountConfig +where + C: Context, + C::PublicKey: arbitrary::Arbitrary<'a>, +{ + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + // TODO we might want a dedicated struct that will generate the private key counterpart so + // payloads can be signed and verified + Ok(Self { + pub_keys: u.arbitrary_iter()?.collect::>()?, + }) + } +} + +#[cfg(feature = "arbitrary")] +impl<'a, C> Accounts +where + C: Context, + C::Address: arbitrary::Arbitrary<'a>, + C::PublicKey: arbitrary::Arbitrary<'a>, +{ + /// Creates an arbitrary set of accounts and stores it under `working_set`. + pub fn arbitrary_workset( + u: &mut arbitrary::Unstructured<'a>, + working_set: &mut WorkingSet, + ) -> arbitrary::Result { + use sov_modules_api::Module; + + let config: AccountConfig = u.arbitrary()?; + let accounts = Accounts::default(); + + accounts + .genesis(&config, working_set) + .map_err(|_| arbitrary::Error::IncorrectFormat)?; + + Ok(accounts) + } +} diff --git a/module-system/module-implementations/sov-accounts/src/query.rs b/module-system/module-implementations/sov-accounts/src/query.rs index e89a5106d..28a854f76 100644 --- a/module-system/module-implementations/sov-accounts/src/query.rs +++ b/module-system/module-implementations/sov-accounts/src/query.rs @@ -1,4 +1,4 @@ -#![allow(missing_docs)] +//! Defines rpc queries exposed by the accounts module, along with the relevant types use jsonrpsee::core::RpcResult; use sov_modules_api::macros::rpc_gen; use sov_modules_api::AddressBech32; @@ -23,6 +23,7 @@ pub enum Response { #[rpc_gen(client, server, namespace = "accounts")] impl Accounts { #[rpc_method(name = "getAccount")] + /// Get the account corresponding to the given public key. pub fn get_account( &self, pub_key: C::PublicKey, diff --git a/module-system/module-implementations/sov-accounts/src/tests.rs b/module-system/module-implementations/sov-accounts/src/tests.rs index 31471f096..c9b236b83 100644 --- a/module-system/module-implementations/sov-accounts/src/tests.rs +++ b/module-system/module-implementations/sov-accounts/src/tests.rs @@ -49,7 +49,7 @@ fn test_update_account() { let sender = priv_key.pub_key(); let sender_addr = sender.to_address::<::Address>(); - let sender_context = C::new(sender_addr.clone()); + let sender_context = C::new(sender_addr); // Test new account creation { @@ -142,7 +142,7 @@ fn test_get_account_after_pub_key_update() { let sender_1 = DefaultPrivateKey::generate().pub_key(); let sender_1_addr = sender_1.to_address::<::Address>(); - let sender_context_1 = C::new(sender_1_addr.clone()); + let sender_context_1 = C::new(sender_1_addr); accounts .create_default_account(sender_1, native_working_set) diff --git a/module-system/module-implementations/sov-attester-incentives/Cargo.toml b/module-system/module-implementations/sov-attester-incentives/Cargo.toml new file mode 100644 index 000000000..0dc126aab --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/Cargo.toml @@ -0,0 +1,46 @@ +[package] +name = "sov-attester-incentives" +description = "A Sovereign SDK module for incentivizing provers" +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } +version = { workspace = true } +readme = "README.md" +resolver = "2" + +[dev-dependencies] +sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1", features = ["mocks"] } +sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-attester-incentives = { path = ".", features = ["native"] } +tempfile = { workspace = true } + +[dependencies] +anyhow = { workspace = true } +borsh = { workspace = true, features = ["rc"] } +jmt = { workspace = true } +thiserror = { workspace = true } +serde = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } + +sov-bank = { path = "../sov-bank", version = "0.1" } +sov-chain-state = { path = "../sov-chain-state", version = "0.1" } +sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-modules-macros = { path = "../../sov-modules-macros", version = "0.1" } +sov-state = { path = "../../sov-state", version = "0.1" } +sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1" } + + +[features] +default = [] +native = [ + "serde", + "serde_json", + "sov-modules-api/native", + "sov-modules-macros/native", + "sov-bank/native", + "sov-chain-state/native", + "sov-state/native", +] diff --git a/module-system/module-implementations/sov-attester-incentives/README.md b/module-system/module-implementations/sov-attester-incentives/README.md new file mode 100644 index 000000000..925eeb735 --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/README.md @@ -0,0 +1,8 @@ +# Attester Incentive module + +**_This module is a placeholder for the logic incentivizing attesters and challengers. This is the full node implementation of the optimistic rollup workflow_** + +This module implements the logic for processing optimistic rollup attestations and challenges. Such +logic is necessary if you want to reward attesters/challengers or do anything else that's "aware" of attestation and challenge generation inside you state transition function. + +This module now implements the complete attestion/challenge verification workflow, as well as the bonding and unbonding processes for attesters and challengers. diff --git a/module-system/module-implementations/sov-attester-incentives/src/call.rs b/module-system/module-implementations/sov-attester-incentives/src/call.rs new file mode 100644 index 000000000..b321df3e1 --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/call.rs @@ -0,0 +1,777 @@ +use core::result::Result::Ok; +use std::fmt::Debug; + +use borsh::{BorshDeserialize, BorshSerialize}; +use sov_bank::{Amount, Coins}; +use sov_chain_state::TransitionHeight; +use sov_modules_api::{CallResponse, Spec}; +use sov_rollup_interface::da::DaSpec; +use sov_rollup_interface::optimistic::Attestation; +use sov_rollup_interface::zk::{StateTransition, ValidityConditionChecker, Zkvm}; +use sov_state::storage::StorageProof; +use sov_state::{Storage, WorkingSet}; +use thiserror::Error; + +use crate::{AttesterIncentives, UnbondingInfo}; + +/// This enumeration represents the available call messages for interacting with the `AttesterIncentives` module. +#[derive(BorshDeserialize, BorshSerialize, Debug)] +pub enum CallMessage { + /// Bonds an attester, the parameter is the bond amount + BondAttester(Amount), + /// Start the first phase of the two-phase unbonding process + BeginUnbondingAttester, + /// Finish the two phase unbonding + EndUnbondingAttester, + /// Bonds a challenger, the parameter is the bond amount + BondChallenger(Amount), + /// Unbonds a challenger + UnbondChallenger, + /// Processes an attestation. + ProcessAttestation(Attestation::Storage as Storage>::Proof>>), + /// Processes a challenge. The challenge is encoded as a [`Vec`]. The second parameter is the transition number + ProcessChallenge(Vec, TransitionHeight), +} + +#[derive(Debug, Error, PartialEq, Eq)] +/// Error type that explains why a user is slashed +pub enum SlashingReason { + #[error("Transition isn't found")] + /// The specified transition does not exist + TransitionNotFound, + + #[error("The attestation does not contain the right block hash and post-state transition")] + /// The specified transition is invalid (block hash, post-root hash or validity condition) + TransitionInvalid, + + #[error("The initial hash of the transition is invalid")] + /// The initial hash of the transition is invalid. + InvalidInitialHash, + + #[error("The proof opening raised an error")] + /// The proof verification raised an error + InvalidProofOutputs, + + #[error("No invalid transition to challenge")] + /// No invalid transition to challenge. + NoInvalidTransition, +} + +/// Error raised while processing the attester incentives +#[derive(Debug, Error, PartialEq, Eq)] +pub enum AttesterIncentiveErrors { + #[error("Attester slashed")] + /// The user was slashed. Reason specified by [`SlashingReason`] + UserSlashed(#[source] SlashingReason), + + #[error("Invalid bonding proof")] + /// The bonding proof was invalid + InvalidBondingProof, + + #[error("The sender key doesn't match the attester key provided in the proof")] + /// The sender key doesn't match the attester key provided in the proof + InvalidSender, + + #[error("Attester is unbonding")] + /// The attester is in the first unbonding phase + AttesterIsUnbonding, + + #[error("User is not trying to unbond at the time of the transaction")] + /// User is not trying to unbond at the time of the transaction + AttesterIsNotUnbonding, + + #[error("The first phase of unbonding has not been finalized")] + /// The attester is trying to finish the two-phase unbonding too soon + UnbondingNotFinalized, + + #[error("The bond is not a 64-bit number")] + /// The bond is not a 64-bit number + InvalidBondFormat, + + #[error("User is not bonded at the time of the transaction")] + /// User is not bonded at the time of the transaction + UserNotBonded, + + #[error("Transition invariant isn't respected")] + /// Transition invariant isn't respected + InvalidTransitionInvariant, + + #[error("Error occurred when transferred funds")] + /// An error occurred when transferred funds + TransferFailure, + + #[error("Error when trying to mint the reward token")] + /// An error occurred when trying to mint the reward token + MintFailure, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +/// A role in the attestation process +pub enum Role { + /// A user who attests to new state transitions + Attester, + /// A user who challenges attestations + Challenger, +} + +impl AttesterIncentives +where + C: sov_modules_api::Context, + Vm: Zkvm, + Da: DaSpec, + Checker: ValidityConditionChecker, +{ + /// This returns the address of the reward token supply + pub fn get_reward_token_supply_address( + &self, + working_set: &mut WorkingSet, + ) -> C::Address { + self.reward_token_supply_address + .get(working_set) + .expect("The reward token supply address should be set at genesis") + } + + /// A helper function that simply slashes an attester and returns a reward value + fn slash_user( + &self, + user: &C::Address, + role: Role, + working_set: &mut WorkingSet, + ) -> u64 { + let bonded_set = match role { + Role::Attester => { + // We have to remove the attester from the unbonding set + // to prevent him from skipping the first phase + // unbonding if he bonds himself again. + self.unbonding_attesters.remove(user, working_set); + + &self.bonded_attesters + } + Role::Challenger => &self.bonded_challengers, + }; + + // We have to deplete the attester's bonded account, it amounts to removing the attester from the bonded set + let reward = bonded_set.get(user, working_set).unwrap_or_default(); + bonded_set.remove(user, working_set); + + // We raise an event + working_set.add_event("user_slashed", &format!("address {user:?}")); + + reward + } + + fn slash_burn_reward( + &self, + user: &C::Address, + role: Role, + reason: SlashingReason, + working_set: &mut WorkingSet, + ) -> AttesterIncentiveErrors { + self.slash_user(user, role, working_set); + AttesterIncentiveErrors::UserSlashed(reason) + } + + /// A helper function that is used to slash an attester, and put the associated attestation in the slashed pool + fn slash_and_invalidate_attestation( + &self, + attester: &C::Address, + height: TransitionHeight, + reason: SlashingReason, + working_set: &mut WorkingSet, + ) -> AttesterIncentiveErrors { + let reward = self.slash_user(attester, Role::Attester, working_set); + + let curr_reward_value = self + .bad_transition_pool + .get(&height, working_set) + .unwrap_or_default(); + + self.bad_transition_pool + .set(&height, &(curr_reward_value + reward), working_set); + + AttesterIncentiveErrors::UserSlashed(reason) + } + + fn reward_sender( + &self, + context: &C, + amount: u64, + working_set: &mut WorkingSet, + ) -> Result { + let reward_address = self + .reward_token_supply_address + .get(working_set) + .expect("The reward supply address must be set at genesis"); + + let coins = Coins { + token_address: self + .bonding_token_address + .get(working_set) + .expect("Bonding token address must be set"), + amount, + }; + + // Mint tokens and send them + self.bank + .mint_from_eoa( + &coins, + context.sender(), + &C::new(reward_address), + working_set, + ) + .map_err(|_err| AttesterIncentiveErrors::MintFailure)?; + + Ok(CallResponse::default()) + } + + /// A helper function for the `bond_challenger/attester` call. Also used to bond challengers/attesters + /// during genesis when no context is available. + pub(super) fn bond_user_helper( + &self, + bond_amount: u64, + user_address: &C::Address, + role: Role, + working_set: &mut WorkingSet, + ) -> Result { + // If the user is an attester, we have to check that he's not trying to unbond + if role == Role::Attester + && self + .unbonding_attesters + .get(user_address, working_set) + .is_some() + { + return Err(AttesterIncentiveErrors::AttesterIsUnbonding); + } + + // Transfer the bond amount from the module's token minting address to the sender. + // On failure, no state is changed + let coins = Coins { + token_address: self + .bonding_token_address + .get(working_set) + .expect("Bonding token address must be set"), + amount: bond_amount, + }; + + self.bank + .transfer_from(user_address, &self.address, coins, working_set) + .map_err(|_err| AttesterIncentiveErrors::TransferFailure)?; + + let (balances, event_key) = match role { + Role::Attester => (&self.bonded_attesters, "bonded_attester"), + Role::Challenger => (&self.bonded_challengers, "bonded_challenger"), + }; + + // Update our record of the total bonded amount for the sender. + // This update is infallible, so no value can be destroyed. + let old_balance = balances.get(user_address, working_set).unwrap_or_default(); + let total_balance = old_balance + bond_amount; + balances.set(user_address, &total_balance, working_set); + + // Emit the bonding event + working_set.add_event( + event_key, + &format!("new_deposit: {bond_amount:?}. total_bond: {total_balance:?}"), + ); + + Ok(CallResponse::default()) + } + + /// Try to unbond the requested amount of coins with context.sender() as the beneficiary. + pub(crate) fn unbond_challenger( + &self, + context: &C, + working_set: &mut WorkingSet, + ) -> anyhow::Result { + // Get the user's old balance. + if let Some(old_balance) = self.bonded_challengers.get(context.sender(), working_set) { + // Transfer the bond amount from the sender to the module's address. + // On failure, no state is changed + self.reward_sender(context, old_balance, working_set)?; + + // Emit the unbonding event + working_set.add_event( + "unbonded_challenger", + &format!("amount_withdrawn: {old_balance:?}"), + ); + } + + Ok(CallResponse::default()) + } + + /// The attester starts the first phase of the two-phase unbonding. + /// We put the current max finalized height with the attester address + /// in the set of unbonding attesters if the attester + /// is already present in the unbonding set + pub(crate) fn begin_unbond_attester( + &self, + context: &C, + working_set: &mut WorkingSet, + ) -> anyhow::Result { + // First get the bonded attester + if let Some(bond) = self.bonded_attesters.get(context.sender(), working_set) { + let finalized_height = self + .light_client_finalized_height + .get(working_set) + .expect("Must be set at genesis"); + + // Remove the attester from the bonding set + self.bonded_attesters.remove(context.sender(), working_set); + + // Then add the bonded attester to the unbonding set, with the current finalized height + self.unbonding_attesters.set( + context.sender(), + &UnbondingInfo { + unbonding_initiated_height: finalized_height, + amount: bond, + }, + working_set, + ); + } + + Ok(CallResponse::default()) + } + + pub(crate) fn end_unbond_attester( + &self, + context: &C, + working_set: &mut WorkingSet, + ) -> anyhow::Result { + // We have to ensure that the attester is unbonding, and that the unbonding transaction + // occurred at least `finality_period` blocks ago to let the attester unbond + if let Some(unbonding_info) = self.unbonding_attesters.get(context.sender(), working_set) { + // These two constants should always be set beforehand, hence we can panic if they're not set + let curr_height = self + .light_client_finalized_height + .get(working_set) + .expect("Should be defined at genesis"); + let finality_period = self + .rollup_finality_period + .get(working_set) + .expect("Should be defined at genesis"); + + if unbonding_info + .unbonding_initiated_height + .saturating_add(finality_period) + > curr_height + { + return Err(AttesterIncentiveErrors::UnbondingNotFinalized); + } + + // Get the user's old balance. + // Transfer the bond amount from the sender to the module's address. + // On failure, no state is changed + self.reward_sender(context, unbonding_info.amount, working_set)?; + + // Update our internal tracking of the total bonded amount for the sender. + self.bonded_attesters.remove(context.sender(), working_set); + self.unbonding_attesters + .remove(context.sender(), working_set); + + // Emit the unbonding event + working_set.add_event("unbonded_challenger", { + let amount = unbonding_info.amount; + &format!("amount_withdrawn: {:?}", amount) + }); + } else { + return Err(AttesterIncentiveErrors::AttesterIsNotUnbonding); + } + Ok(CallResponse::default()) + } + + /// The bonding proof is now a proof that an attester was bonded during the last `finality_period` range. + /// The proof must refer to a valid state of the rollup. The initial root hash must represent a state between + /// the bonding proof one and the current state. + fn check_bonding_proof( + &self, + context: &C, + attestation: &Attestation::Proof>>, + working_set: &mut WorkingSet, + ) -> anyhow::Result<(), AttesterIncentiveErrors> { + let bonding_root = { + // If we cannot get the transition before the current one, it means that we are trying + // to get the genesis state root + if let Some(transition) = self.chain_state.historical_transitions.get( + &(attestation.proof_of_bond.claimed_transition_num - 1), + working_set, + ) { + transition.post_state_root() + } else { + self.chain_state + .genesis_hash + .get(working_set) + .expect("The genesis hash should be set at genesis") + } + }; + + // This proof checks that the attester was bonded at the given transition num + let bond_opt = working_set + .backing() + .verify_proof( + bonding_root, + attestation.proof_of_bond.proof.clone(), + context.sender(), + &self.bonded_attesters, + ) + .map_err(|_err| AttesterIncentiveErrors::InvalidBondingProof)?; + + let bond = bond_opt.ok_or(AttesterIncentiveErrors::UserNotBonded)?; + let bond: u64 = BorshDeserialize::deserialize(&mut bond.value()) + .map_err(|_err| AttesterIncentiveErrors::InvalidBondFormat)?; + + let minimum_bond = self + .minimum_attester_bond + .get_or_err(working_set) + .expect("The minimum bond should be set at genesis"); + + // We then have to check that the bond was greater than the minimum bond + if bond < minimum_bond { + return Err(AttesterIncentiveErrors::UserNotBonded); + } + + Ok(()) + } + + fn check_transition( + &self, + claimed_transition_height: TransitionHeight, + attester: &C::Address, + attestation: &Attestation::Proof>>, + working_set: &mut WorkingSet, + ) -> anyhow::Result { + if let Some(curr_tx) = self + .chain_state + .historical_transitions + .get(&claimed_transition_height, working_set) + { + // We first need to compare the initial block hash to the previous post state root + if !curr_tx.compare_hashes(&attestation.da_block_hash, &attestation.post_state_root) { + // Check if the attestation has the same da_block_hash and post_state_root as the actual transition + // that we found in state. If not, slash the attester. + // If so, the attestation is valid, so return Ok + return Err(self.slash_and_invalidate_attestation( + attester, + claimed_transition_height, + SlashingReason::TransitionInvalid, + working_set, + )); + } + Ok(CallResponse::default()) + } else { + // Case where we cannot get the transition from the chain state historical transitions. + Err(self.slash_burn_reward( + attester, + Role::Attester, + SlashingReason::TransitionNotFound, + working_set, + )) + } + } + + fn check_initial_hash( + &self, + claimed_transition_height: TransitionHeight, + attester: &C::Address, + attestation: &Attestation::Proof>>, + working_set: &mut WorkingSet, + ) -> anyhow::Result { + // Normal state + if let Some(transition) = self + .chain_state + .historical_transitions + .get(&claimed_transition_height.saturating_sub(1), working_set) + { + if transition.post_state_root() != attestation.initial_state_root { + // The initial root hashes don't match, just slash the attester + return Err(self.slash_burn_reward( + attester, + Role::Attester, + SlashingReason::InvalidInitialHash, + working_set, + )); + } + } else { + // Genesis state + // We can assume that the genesis hash is always set, otherwise we need to panic. + // We don't need to prove that the attester was bonded, simply need to check that the current bond is higher than the + // minimal bond and that the attester is not unbonding + + // We add a check here that the claimed transition height is the same as the genesis height. + if self + .chain_state + .genesis_height + .get(working_set) + .expect("Must be set at genesis") + != (claimed_transition_height - 1) + { + return Err(self.slash_burn_reward( + attester, + Role::Attester, + SlashingReason::TransitionNotFound, + working_set, + )); + } + + if self + .chain_state + .get_genesis_hash(working_set) + .expect("The initial hash should be set") + != attestation.initial_state_root + { + // Slash the attester, and burn the fees + return Err(self.slash_burn_reward( + attester, + Role::Attester, + SlashingReason::InvalidInitialHash, + working_set, + )); + } + + // Normal state + } + + Ok(CallResponse::default()) + } + + /// Try to process an attestation if the attester is bonded + pub(crate) fn process_attestation( + &self, + context: &C, + attestation: Attestation::Proof>>, + working_set: &mut WorkingSet, + ) -> anyhow::Result { + // We first need to check that the attester is still in the bonding set + if self + .bonded_attesters + .get(context.sender(), working_set) + .is_none() + { + return Err(AttesterIncentiveErrors::UserNotBonded); + } + + // If the bonding proof in the attestation is invalid, light clients will ignore the attestation. In that case, we should too. + self.check_bonding_proof(context, &attestation, working_set)?; + + // We suppose that these values are always defined, otherwise we panic + let last_attested_height = self + .maximum_attested_height + .get(working_set) + .expect("The maximum attested height should be set at genesis"); + let current_finalized_height = self + .light_client_finalized_height + .get(working_set) + .expect("The light client finalized height should be set at genesis"); + let finality = self + .rollup_finality_period + .get(working_set) + .expect("The rollup finality period should be set at genesis"); + + assert!( + current_finalized_height <= last_attested_height, + "The last attested height should always be below the current finalized height." + ); + + // Update the max_attested_height in case the blocks have already been finalized + let new_height_to_attest = last_attested_height + 1; + + // Minimum height at which the proof of bond can be valid + let min_height = new_height_to_attest.saturating_sub(finality); + + // We have to check the following order invariant is respected: + // (height to attest - finality) <= bonding_proof.transition_num <= height to attest + // + // Which with our variable gives: + // min_height <= bonding_proof.transition_num <= new_height_to_attest + // If this invariant is respected, we can be sure that the attester was bonded at new_height_to_attest. + if !(min_height <= attestation.proof_of_bond.claimed_transition_num + && attestation.proof_of_bond.claimed_transition_num <= new_height_to_attest) + { + return Err(AttesterIncentiveErrors::InvalidTransitionInvariant); + } + + // First compare the initial hashes + self.check_initial_hash( + attestation.proof_of_bond.claimed_transition_num, + context.sender(), + &attestation, + working_set, + )?; + + // Then compare the transition + self.check_transition( + attestation.proof_of_bond.claimed_transition_num, + context.sender(), + &attestation, + working_set, + )?; + + working_set.add_event( + "processed_valid_attestation", + &format!("attester: {:?}", context.sender()), + ); + + // Now we have to check whether the claimed_transition_num is the max_attested_height. + // If so, update the maximum attested height and reward the sender + if attestation.proof_of_bond.claimed_transition_num == new_height_to_attest { + // Update the maximum attested height + self.maximum_attested_height + .set(&(new_height_to_attest), working_set); + + // Reward the sender + self.reward_sender( + context, + self.minimum_attester_bond + .get(working_set) + .expect("Should be defined at genesis"), + working_set, + )?; + } + + // Then we can optimistically process the transaction + Ok(CallResponse::default()) + } + + fn check_challenge_outputs_against_transition( + &self, + public_outputs: StateTransition, + height: &TransitionHeight, + condition_checker: &mut impl ValidityConditionChecker, + working_set: &mut WorkingSet, + ) -> anyhow::Result<(), SlashingReason> { + let transition = self + .chain_state + .historical_transitions + .get_or_err(height, working_set) + .map_err(|_| SlashingReason::TransitionInvalid)?; + + let initial_hash = { + if let Some(prev_transition) = self + .chain_state + .historical_transitions + .get(&height.saturating_sub(1), working_set) + { + prev_transition.post_state_root() + } else { + self.chain_state + .genesis_hash + .get(working_set) + .expect("The genesis hash should be set") + } + }; + + if public_outputs.initial_state_root != initial_hash { + return Err(SlashingReason::InvalidInitialHash); + } + + if public_outputs.slot_hash != transition.da_block_hash() { + return Err(SlashingReason::TransitionInvalid); + } + + if public_outputs.validity_condition != *transition.validity_condition() { + return Err(SlashingReason::TransitionInvalid); + } + + // TODO: Should we compare the validity conditions of the public outputs with the ones of the recorded transition? + condition_checker + .check(&public_outputs.validity_condition) + .map_err(|_err| SlashingReason::TransitionInvalid)?; + + Ok(()) + } + + /// Try to process a zk proof if the challenger is bonded. + pub(crate) fn process_challenge( + &self, + context: &C, + proof: &[u8], + transition_num: &TransitionHeight, + working_set: &mut WorkingSet, + ) -> anyhow::Result { + // Get the challenger's old balance. + // Revert if they aren't bonded + let old_balance = self + .bonded_challengers + .get_or_err(context.sender(), working_set) + .map_err(|_| AttesterIncentiveErrors::UserNotBonded)?; + + // Check that the challenger has enough balance to process the proof. + let minimum_bond = self + .minimum_challenger_bond + .get(working_set) + .expect("Should be set at genesis"); + + if old_balance < minimum_bond { + return Err(AttesterIncentiveErrors::UserNotBonded); + } + + let code_commitment = self + .commitment_to_allowed_challenge_method + .get(working_set) + .expect("Should be set at genesis") + .commitment; + + // Find the faulty attestation pool and get the associated reward + let attestation_reward: u64 = self + .bad_transition_pool + .get_or_err(transition_num, working_set) + .map_err(|_| { + self.slash_burn_reward( + context.sender(), + Role::Challenger, + SlashingReason::NoInvalidTransition, + working_set, + ) + })?; + + let public_outputs_opt: anyhow::Result> = + Vm::verify_and_extract_output::( + proof, + &code_commitment, + ) + .map_err(|e| anyhow::format_err!("{:?}", e)); + + // Don't return an error for invalid proofs - those are expected and shouldn't cause reverts. + match public_outputs_opt { + Ok(public_output) => { + // We get the validity condition checker from the state + let mut validity_checker = self + .validity_cond_checker + .get(working_set) + .expect("Should be defined at genesis"); + + // We have to perform the checks to ensure that the challenge is valid while the attestation isn't. + self.check_challenge_outputs_against_transition( + public_output, + transition_num, + &mut validity_checker, + working_set, + ) + .map_err(|err| { + self.slash_burn_reward(context.sender(), Role::Challenger, err, working_set) + })?; + + // Reward the challenger with half of the attestation reward (avoid DOS) + self.reward_sender(context, attestation_reward / 2, working_set)?; + + // Now remove the bad transition from the pool + self.bad_transition_pool.remove(transition_num, working_set); + + working_set.add_event( + "processed_valid_proof", + &format!("challenger: {:?}", context.sender()), + ); + } + Err(_err) => { + // Slash the challenger + return Err(self.slash_burn_reward( + context.sender(), + Role::Challenger, + SlashingReason::InvalidProofOutputs, + working_set, + )); + } + } + + Ok(CallResponse::default()) + } +} diff --git a/module-system/module-implementations/sov-attester-incentives/src/genesis.rs b/module-system/module-implementations/sov-attester-incentives/src/genesis.rs new file mode 100644 index 000000000..a845abb8c --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/genesis.rs @@ -0,0 +1,65 @@ +use anyhow::Result; +use borsh::{BorshDeserialize, BorshSerialize}; +use sov_rollup_interface::da::DaSpec; +use sov_rollup_interface::zk::{ValidityConditionChecker, Zkvm}; +use sov_state::{Storage, WorkingSet}; + +use crate::call::Role; +use crate::AttesterIncentives; + +impl AttesterIncentives +where + C: sov_modules_api::Context, + Vm: Zkvm, + S: Storage, + P: BorshDeserialize + BorshSerialize, + Da: DaSpec, + Checker: ValidityConditionChecker, +{ + pub(crate) fn init_module( + &self, + config: &::Config, + working_set: &mut WorkingSet, + ) -> Result<()> { + anyhow::ensure!( + !config.initial_attesters.is_empty(), + "At least one prover must be set at genesis!" + ); + + self.minimum_attester_bond + .set(&config.minimum_attester_bond, working_set); + self.minimum_challenger_bond + .set(&config.minimum_challenger_bond, working_set); + + self.commitment_to_allowed_challenge_method.set( + &crate::StoredCodeCommitment { + commitment: config.commitment_to_allowed_challenge_method.clone(), + }, + working_set, + ); + + self.rollup_finality_period + .set(&config.rollup_finality_period, working_set); + + self.bonding_token_address + .set(&config.bonding_token_address, working_set); + + self.reward_token_supply_address + .set(&config.reward_token_supply_address, working_set); + + for (attester, bond) in config.initial_attesters.iter() { + self.bond_user_helper(*bond, attester, Role::Attester, working_set)?; + } + + self.maximum_attested_height + .set(&config.maximum_attested_height, working_set); + + self.light_client_finalized_height + .set(&config.light_client_finalized_height, working_set); + + self.validity_cond_checker + .set(&config.validity_condition_checker, working_set); + + Ok(()) + } +} diff --git a/module-system/module-implementations/sov-attester-incentives/src/lib.rs b/module-system/module-implementations/sov-attester-incentives/src/lib.rs new file mode 100644 index 000000000..a8360e4d7 --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/lib.rs @@ -0,0 +1,208 @@ +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] + +/// Call methods for the module +pub mod call; + +/// Methods used to instantiate the module +pub mod genesis; + +#[cfg(test)] +mod tests; + +#[cfg(feature = "native")] +pub mod query; + +use std::marker::PhantomData; + +use borsh::{BorshDeserialize, BorshSerialize}; +use call::Role; +use sov_bank::Amount; +use sov_chain_state::TransitionHeight; +use sov_modules_api::{Context, Error}; +use sov_modules_macros::ModuleInfo; +use sov_rollup_interface::da::DaSpec; +use sov_rollup_interface::zk::{StoredCodeCommitment, ValidityConditionChecker, Zkvm}; +use sov_state::{Storage, WorkingSet}; + +/// Configuration of the attester incentives module +pub struct AttesterIncentivesConfig +where + C: Context, + Vm: Zkvm, + Da: DaSpec, + Checker: ValidityConditionChecker, +{ + /// The address of the token to be used for bonding. + pub bonding_token_address: C::Address, + /// The address of the account holding the reward token supply + pub reward_token_supply_address: C::Address, + /// The minimum bond for an attester. + pub minimum_attester_bond: Amount, + /// The minimum bond for a challenger. + pub minimum_challenger_bond: Amount, + /// A code commitment to be used for verifying proofs + pub commitment_to_allowed_challenge_method: Vm::CodeCommitment, + /// A list of initial provers and their bonded amount. + pub initial_attesters: Vec<(C::Address, Amount)>, + /// The finality period of the rollup (constant) in the number of DA layer slots processed. + pub rollup_finality_period: TransitionHeight, + /// The current maximum attested height + pub maximum_attested_height: TransitionHeight, + /// The light client finalized height + pub light_client_finalized_height: TransitionHeight, + /// The validity condition checker used to check validity conditions + pub validity_condition_checker: Checker, + /// Phantom data that contains the validity condition + phantom_data: PhantomData, +} + +/// The information about an attender's unbonding +#[derive(BorshDeserialize, BorshSerialize, Clone, Debug)] +pub struct UnbondingInfo { + /// The height at which an attester started unbonding + pub unbonding_initiated_height: TransitionHeight, + /// The number of tokens that the attester may withdraw + pub amount: Amount, +} + +/// A new module: +/// - Must derive `ModuleInfo` +/// - Must contain `[address]` field +/// - Can contain any number of ` #[state]` or `[module]` fields +#[derive(ModuleInfo)] +pub struct AttesterIncentives +where + C: Context, + Vm: Zkvm, + Da: DaSpec, + Checker: ValidityConditionChecker, +{ + /// Address of the module. + #[address] + pub address: C::Address, + + /// The amount of time it takes to a light client to be confident + /// that an attested state transition won't be challenged. Measured in + /// number of slots. + #[state] + pub rollup_finality_period: sov_state::StateValue, + + /// The address of the token used for bonding provers + #[state] + pub bonding_token_address: sov_state::StateValue, + + /// The address of the account holding the reward token supply + /// TODO: maybe mint the token before transferring it? The mint method is private in bank + /// so we need a reward address that contains the supply. + #[state] + pub reward_token_supply_address: sov_state::StateValue, + + /// The code commitment to be used for verifying proofs + #[state] + pub commitment_to_allowed_challenge_method: sov_state::StateValue>, + + /// Constant validity condition checker for the module. + #[state] + pub validity_cond_checker: sov_state::StateValue, + + /// The set of bonded attesters and their bonded amount. + #[state] + pub bonded_attesters: sov_state::StateMap, + + /// The set of unbonding attesters, and the unbonding information (ie the + /// height of the chain where they started the unbonding and their associated bond). + #[state] + pub unbonding_attesters: sov_state::StateMap, + + /// The current maximum attestation height + #[state] + pub maximum_attested_height: sov_state::StateValue, + + /// Challengers now challenge a transition and not a specific attestation + /// Mapping from a transition number to the associated reward value. + /// This mapping is populated when the attestations are processed by the rollup + #[state] + pub bad_transition_pool: sov_state::StateMap, + + /// The set of bonded challengers and their bonded amount. + #[state] + pub bonded_challengers: sov_state::StateMap, + + /// The minimum bond for an attester to be eligble + #[state] + pub minimum_attester_bond: sov_state::StateValue, + + /// The minimum bond for an attester to be eligble + #[state] + pub minimum_challenger_bond: sov_state::StateValue, + + /// The height of the most recent block which light clients know to be finalized + #[state] + pub light_client_finalized_height: sov_state::StateValue, + + /// Reference to the Bank module. + #[module] + pub(crate) bank: sov_bank::Bank, + + /// Reference to the chain state module, used to check the initial hashes of the state transition. + #[module] + pub(crate) chain_state: sov_chain_state::ChainState, +} + +impl sov_modules_api::Module for AttesterIncentives +where + C: sov_modules_api::Context, + Vm: Zkvm, + S: Storage, + P: BorshDeserialize + BorshSerialize, + Da: DaSpec, + Checker: ValidityConditionChecker, +{ + type Context = C; + + type Config = AttesterIncentivesConfig; + + type CallMessage = call::CallMessage; + + fn genesis( + &self, + config: &Self::Config, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + // The initialization logic + Ok(self.init_module(config, working_set)?) + } + + fn call( + &self, + msg: Self::CallMessage, + context: &Self::Context, + working_set: &mut WorkingSet, + ) -> Result { + match msg { + call::CallMessage::BondAttester(bond_amount) => self + .bond_user_helper(bond_amount, context.sender(), Role::Attester, working_set) + .map_err(|err| err.into()), + call::CallMessage::BeginUnbondingAttester => self + .begin_unbond_attester(context, working_set) + .map_err(|error| error.into()), + + call::CallMessage::EndUnbondingAttester => self + .end_unbond_attester(context, working_set) + .map_err(|error| error.into()), + call::CallMessage::BondChallenger(bond_amount) => self + .bond_user_helper(bond_amount, context.sender(), Role::Challenger, working_set) + .map_err(|err| err.into()), + call::CallMessage::UnbondChallenger => self.unbond_challenger(context, working_set), + call::CallMessage::ProcessAttestation(attestation) => self + .process_attestation(context, attestation, working_set) + .map_err(|error| error.into()), + + call::CallMessage::ProcessChallenge(proof, transition) => self + .process_challenge(context, &proof, &transition, working_set) + .map_err(|error| error.into()), + } + .map_err(|e| e.into()) + } +} diff --git a/module-system/module-implementations/sov-attester-incentives/src/query.rs b/module-system/module-implementations/sov-attester-incentives/src/query.rs new file mode 100644 index 000000000..61297774c --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/query.rs @@ -0,0 +1,78 @@ +//! Defines the query methods for the attester incentives module +use serde::{Deserialize, Serialize}; +use sov_modules_api::Spec; +use sov_rollup_interface::da::DaSpec; +use sov_rollup_interface::zk::{ValidityConditionChecker, Zkvm}; +use sov_state::storage::{NativeStorage, StorageProof}; +use sov_state::{Storage, WorkingSet}; + +use super::AttesterIncentives; +use crate::call::Role; + +/// The response type to the `getBondAmount` query. +#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] +pub struct BondAmountResponse { + /// The value of the bond + pub value: u64, +} + +// TODO: implement rpc_gen macro +impl AttesterIncentives +where + C: sov_modules_api::Context, + Vm: Zkvm, + Da: DaSpec, + Checker: ValidityConditionChecker, +{ + /// Queries the state of the module. + pub fn get_bond_amount( + &self, + address: C::Address, + role: Role, + working_set: &mut WorkingSet, + ) -> BondAmountResponse { + match role { + Role::Attester => BondAmountResponse { + value: self + .bonded_attesters + .get(&address, working_set) + .unwrap_or_default(), + }, + Role::Challenger => BondAmountResponse { + value: self + .bonded_challengers + .get(&address, working_set) + .unwrap_or_default(), + }, + } + } + + /// Used by attesters to get a proof that they were bonded before starting to produce attestations. + /// A bonding proof is valid for `max_finality_period` blocks, the attester can only produce transition + /// attestations for this specific amount of time. + pub fn get_bond_proof( + &self, + address: C::Address, + witness: &<::Storage as Storage>::Witness, + working_set: &mut WorkingSet, + ) -> StorageProof<::Proof> + where + C::Storage: NativeStorage, + { + working_set.backing().get_with_proof_from_state_map( + &address, + &self.bonded_attesters, + witness, + ) + } + + /// TODO: Make the unbonding amount queryable: + pub fn get_unbonding_amount( + &self, + _address: C::Address, + _witness: &<::Storage as Storage>::Witness, + _working_set: &mut WorkingSet, + ) -> u64 { + todo!("Make the unbonding amount queryable: https://github.com/Sovereign-Labs/sovereign-sdk/issues/675") + } +} diff --git a/module-system/module-implementations/sov-attester-incentives/src/tests/attestation_proccessing.rs b/module-system/module-implementations/sov-attester-incentives/src/tests/attestation_proccessing.rs new file mode 100644 index 000000000..7be107a73 --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/tests/attestation_proccessing.rs @@ -0,0 +1,282 @@ +use sov_modules_api::default_context::DefaultContext; +use sov_rollup_interface::optimistic::Attestation; +use sov_state::{ProverStorage, WorkingSet}; + +use crate::call::AttesterIncentiveErrors; +use crate::tests::helpers::{ + execution_simulation, setup, BOND_AMOUNT, INITIAL_BOND_AMOUNT, INIT_HEIGHT, +}; + +/// Start by testing the positive case where the attestations are valid +#[test] +fn test_process_valid_attestation() { + let tmpdir = tempfile::tempdir().unwrap(); + let storage = ProverStorage::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(storage.clone()); + let (module, token_address, attester_address, _) = setup(&mut working_set); + + // Assert that the attester has the correct bond amount before processing the proof + assert_eq!( + module + .get_bond_amount( + attester_address, + crate::call::Role::Attester, + &mut working_set + ) + .value, + BOND_AMOUNT + ); + + // Simulate the execution of a chain, with the genesis hash and two transitions after. + // Update the chain_state module and the optimistic module accordingly + let (mut exec_vars, mut working_set) = + execution_simulation(3, &module, &storage, attester_address, working_set); + + let context = DefaultContext { + sender: attester_address, + }; + + let transition_2 = exec_vars.pop().unwrap(); + let transition_1 = exec_vars.pop().unwrap(); + let initial_transition = exec_vars.pop().unwrap(); + + // Process a valid attestation for the first transition + { + let attestation = Attestation { + initial_state_root: initial_transition.state_root, + da_block_hash: [1; 32], + post_state_root: transition_1.state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: INIT_HEIGHT + 1, + proof: initial_transition.state_proof, + }, + }; + + module + .process_attestation(&context, attestation, &mut working_set) + .expect("An invalid proof is an error"); + } + + // We can now proceed with the next attestation + { + let attestation = Attestation { + initial_state_root: transition_1.state_root, + da_block_hash: [2; 32], + post_state_root: transition_2.state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: INIT_HEIGHT + 2, + proof: transition_1.state_proof, + }, + }; + + module + .process_attestation(&context, attestation, &mut working_set) + .expect("An invalid proof is an error"); + } + + // Assert that the attester's bond amount has not been burned + assert_eq!( + module + .get_bond_amount( + attester_address, + crate::call::Role::Attester, + &mut working_set + ) + .value, + BOND_AMOUNT + ); + + // Assert that the attester has been awarded the tokens + assert_eq!( + module + .bank + .get_balance_of(attester_address, token_address, &mut working_set) + .unwrap(), + // The attester is bonded at the beginning so he loses BOND_AMOUNT + INITIAL_BOND_AMOUNT - BOND_AMOUNT + 2 * BOND_AMOUNT + ); +} + +#[test] +fn test_burn_on_invalid_attestation() { + let tmpdir = tempfile::tempdir().unwrap(); + let storage = ProverStorage::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(storage.clone()); + let (module, _token_address, attester_address, _) = setup(&mut working_set); + + // Assert that the prover has the correct bond amount before processing the proof + assert_eq!( + module + .get_bond_amount( + attester_address, + crate::call::Role::Attester, + &mut working_set + ) + .value, + BOND_AMOUNT + ); + + // Simulate the execution of a chain, with the genesis hash and two transitions after. + // Update the chain_state module and the optimistic module accordingly + let (mut exec_vars, mut working_set) = + execution_simulation(3, &module, &storage, attester_address, working_set); + + let transition_2 = exec_vars.pop().unwrap(); + let transition_1 = exec_vars.pop().unwrap(); + let initial_transition = exec_vars.pop().unwrap(); + + let context = DefaultContext { + sender: attester_address, + }; + + // Process an invalid proof for genesis: everything is correct except the storage proof. + // Must simply return an error. Cannot burn the token at this point because we don't know if the + // sender is bonded or not. + { + let attestation = Attestation { + initial_state_root: initial_transition.state_root, + da_block_hash: [1; 32], + post_state_root: transition_1.state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: INIT_HEIGHT + 1, + proof: transition_1.state_proof.clone(), + }, + }; + + let attestation_error = module + .process_attestation(&context, attestation, &mut working_set) + .unwrap_err(); + + assert_eq!( + attestation_error, + AttesterIncentiveErrors::InvalidBondingProof, + "The bonding proof should fail" + ); + } + + // Assert that the prover's bond amount has not been burned + assert_eq!( + module + .get_bond_amount( + attester_address, + crate::call::Role::Attester, + &mut working_set + ) + .value, + BOND_AMOUNT + ); + + // Now proccess a valid attestation for genesis. + { + let attestation = Attestation { + initial_state_root: initial_transition.state_root, + da_block_hash: [1; 32], + post_state_root: transition_1.state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: INIT_HEIGHT + 1, + proof: initial_transition.state_proof, + }, + }; + + module + .process_attestation(&context, attestation, &mut working_set) + .expect("An invalid proof is an error"); + } + + // Then process a new attestation having the wrong initial state root. The attester must be slashed, and the fees burnt + { + let attestation = Attestation { + initial_state_root: initial_transition.state_root, + da_block_hash: [2; 32], + post_state_root: transition_2.state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: INIT_HEIGHT + 2, + proof: transition_1.state_proof.clone(), + }, + }; + + let attestation_error = module + .process_attestation(&context, attestation, &mut working_set) + .unwrap_err(); + + assert_eq!( + attestation_error, + AttesterIncentiveErrors::UserSlashed(crate::call::SlashingReason::InvalidInitialHash) + ) + } + + // Check that the attester's bond has been burnt + assert_eq!( + module + .get_bond_amount( + attester_address, + crate::call::Role::Attester, + &mut working_set + ) + .value, + 0 + ); + + // Check that the attestation is not part of the challengeable set + assert!( + module + .bad_transition_pool + .get(&(INIT_HEIGHT + 2), &mut working_set) + .is_none(), + "The transition should not exist in the pool" + ); + + // Bond the attester once more + module + .bond_user_helper( + BOND_AMOUNT, + &attester_address, + crate::call::Role::Attester, + &mut working_set, + ) + .unwrap(); + + // Process an attestation that has the right bonding proof and initial hash but has a faulty post transition hash. + { + let attestation = Attestation { + initial_state_root: transition_1.state_root, + da_block_hash: [2; 32], + post_state_root: transition_1.state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: INIT_HEIGHT + 2, + proof: transition_1.state_proof, + }, + }; + + let attestation_error = module + .process_attestation(&context, attestation, &mut working_set) + .unwrap_err(); + + assert_eq!( + attestation_error, + AttesterIncentiveErrors::UserSlashed(crate::call::SlashingReason::TransitionInvalid) + ) + } + + // Check that the attester's bond has been burnt + assert_eq!( + module + .get_bond_amount( + attester_address, + crate::call::Role::Attester, + &mut working_set + ) + .value, + 0 + ); + + // The attestation should be part of the challengeable set and its associated value should be the BOND_AMOUNT + assert_eq!( + module + .bad_transition_pool + .get(&(INIT_HEIGHT + 2), &mut working_set) + .unwrap(), + BOND_AMOUNT, + "The transition should not exist in the pool" + ); +} diff --git a/module-system/module-implementations/sov-attester-incentives/src/tests/challenger.rs b/module-system/module-implementations/sov-attester-incentives/src/tests/challenger.rs new file mode 100644 index 000000000..01e5a7303 --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/tests/challenger.rs @@ -0,0 +1,344 @@ +use borsh::BorshSerialize; +use sov_modules_api::default_context::DefaultContext; +use sov_rollup_interface::mocks::{ + MockCodeCommitment, MockDaSpec, MockProof, MockValidityCond, MockValidityCondChecker, +}; +use sov_rollup_interface::zk::StateTransition; +use sov_state::{ProverStorage, WorkingSet}; + +use crate::call::{AttesterIncentiveErrors, SlashingReason}; +use crate::tests::helpers::{ + commit_get_new_working_set, execution_simulation, setup, BOND_AMOUNT, INITIAL_BOND_AMOUNT, + INIT_HEIGHT, +}; + +/// Test that given an invalid transition, a challenger can successfully challenge it and get rewarded +#[test] +fn test_valid_challenge() { + let tmpdir = tempfile::tempdir().unwrap(); + let storage = ProverStorage::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(storage.clone()); + let (module, token_address, attester_address, challenger_address) = setup(&mut working_set); + + let working_set = commit_get_new_working_set(&storage, working_set); + + // Simulate the execution of a chain, with the genesis hash and two transitions after. + // Update the chain_state module and the optimistic module accordingly + let (mut exec_vars, mut working_set) = + execution_simulation(3, &module, &storage, attester_address, working_set); + + let _ = exec_vars.pop().unwrap(); + let transition_1 = exec_vars.pop().unwrap(); + let initial_transition = exec_vars.pop().unwrap(); + + module + .bond_user_helper( + BOND_AMOUNT, + &challenger_address, + crate::call::Role::Challenger, + &mut working_set, + ) + .unwrap(); + + // Assert that the challenger has the correct bond amount before processing the proof + assert_eq!( + module + .get_bond_amount( + challenger_address, + crate::call::Role::Challenger, + &mut working_set + ) + .value, + BOND_AMOUNT + ); + + // Set a bad transition to get a reward from + module + .bad_transition_pool + .set(&(INIT_HEIGHT + 1), &BOND_AMOUNT, &mut working_set); + + // Process a correct challenge + let context = DefaultContext { + sender: challenger_address, + }; + + { + let transition = StateTransition { + initial_state_root: initial_transition.state_root, + slot_hash: [1; 32], + final_state_root: transition_1.state_root, + rewarded_address: challenger_address, + validity_condition: MockValidityCond { is_valid: true }, + }; + + let serialized_transition = transition.try_to_vec().unwrap(); + + let commitment = module + .commitment_to_allowed_challenge_method + .get(&mut working_set) + .expect("Should be set at genesis") + .commitment; + + let proof = &MockProof { + program_id: commitment, + is_valid: true, + log: serialized_transition.as_slice(), + } + .encode_to_vec(); + + module + .process_challenge( + &context, + proof.as_slice(), + &(INIT_HEIGHT + 1), + &mut working_set, + ) + .expect("Should not fail"); + + // Check that the challenger was rewarded + assert_eq!( + module + .bank + .get_balance_of(challenger_address, token_address, &mut working_set) + .unwrap(), + INITIAL_BOND_AMOUNT - BOND_AMOUNT + BOND_AMOUNT / 2, + "The challenger should have been rewarded" + ); + + // Check that the challenge set is empty + assert_eq!( + module + .bad_transition_pool + .get(&(INIT_HEIGHT + 1), &mut working_set), + None, + "The transition should have disappeared" + ) + } + + { + // Now try to unbond the challenger + module + .unbond_challenger(&context, &mut working_set) + .expect("The challenger should be able to unbond"); + + // Check the final balance of the challenger + assert_eq!( + module + .bank + .get_balance_of(challenger_address, token_address, &mut working_set) + .unwrap(), + INITIAL_BOND_AMOUNT + BOND_AMOUNT / 2, + "The challenger should have been unbonded" + ) + } +} + +fn invalid_proof_helper( + context: &DefaultContext, + proof: &Vec, + reason: SlashingReason, + challenger_address: sov_modules_api::Address, + module: &crate::AttesterIncentives< + DefaultContext, + sov_rollup_interface::mocks::MockZkvm, + MockDaSpec, + MockValidityCondChecker, + >, + working_set: &mut WorkingSet>, +) { + // Let's bond the challenger and try to publish a false challenge + module + .bond_user_helper( + BOND_AMOUNT, + &challenger_address, + crate::call::Role::Challenger, + working_set, + ) + .expect("Should be able to bond"); + + let err = module + .process_challenge(context, proof.as_slice(), &(INIT_HEIGHT + 1), working_set) + .unwrap_err(); + + // Check the error raised + assert_eq!( + err, + AttesterIncentiveErrors::UserSlashed(reason), + "The challenge processing should fail with an invalid proof error" + ) +} + +#[test] +fn test_invalid_challenge() { + let tmpdir = tempfile::tempdir().unwrap(); + let storage = ProverStorage::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(storage.clone()); + let (module, _token_address, attester_address, challenger_address) = setup(&mut working_set); + + let working_set = commit_get_new_working_set(&storage, working_set); + + // Simulate the execution of a chain, with the genesis hash and two transitions after. + // Update the chain_state module and the optimistic module accordingly + let (mut exec_vars, mut working_set) = + execution_simulation(3, &module, &storage, attester_address, working_set); + + let _ = exec_vars.pop().unwrap(); + let transition_1 = exec_vars.pop().unwrap(); + let initial_transition = exec_vars.pop().unwrap(); + + // Set a bad transition to get a reward from + module + .bad_transition_pool + .set(&(INIT_HEIGHT + 1), &BOND_AMOUNT, &mut working_set); + + // Process a correct challenge but without a bonded attester + let context = DefaultContext { + sender: challenger_address, + }; + + let transition = StateTransition { + initial_state_root: initial_transition.state_root, + slot_hash: [1; 32], + final_state_root: transition_1.state_root, + rewarded_address: challenger_address, + validity_condition: MockValidityCond { is_valid: true }, + }; + + let serialized_transition = transition.try_to_vec().unwrap(); + + let commitment = module + .commitment_to_allowed_challenge_method + .get(&mut working_set) + .expect("Should be set at genesis") + .commitment; + + { + // A valid proof + let proof = &MockProof { + program_id: commitment.clone(), + is_valid: true, + log: serialized_transition.as_slice(), + } + .encode_to_vec(); + + let err = module + .process_challenge( + &context, + proof.as_slice(), + &(INIT_HEIGHT + 1), + &mut working_set, + ) + .unwrap_err(); + + // Check the error raised + assert_eq!( + err, + AttesterIncentiveErrors::UserNotBonded, + "The challenge processing should fail with an unbonded error" + ) + } + + // Invalid proofs + { + // An invalid proof + let proof = &MockProof { + program_id: commitment.clone(), + is_valid: false, + log: serialized_transition.as_slice(), + } + .encode_to_vec(); + + invalid_proof_helper( + &context, + proof, + SlashingReason::InvalidProofOutputs, + challenger_address, + &module, + &mut working_set, + ); + + // Bad slot hash + let bad_transition = StateTransition { + initial_state_root: initial_transition.state_root, + slot_hash: [2; 32], + final_state_root: transition_1.state_root, + rewarded_address: challenger_address, + validity_condition: MockValidityCond { is_valid: true }, + } + .try_to_vec() + .unwrap(); + + // An invalid proof + let proof = &MockProof { + program_id: commitment, + is_valid: true, + log: bad_transition.as_slice(), + } + .encode_to_vec(); + + invalid_proof_helper( + &context, + proof, + SlashingReason::TransitionInvalid, + challenger_address, + &module, + &mut working_set, + ); + + // Bad validity condition + let bad_transition = StateTransition { + initial_state_root: initial_transition.state_root, + slot_hash: [1; 32], + final_state_root: transition_1.state_root, + rewarded_address: challenger_address, + validity_condition: MockValidityCond { is_valid: false }, + } + .try_to_vec() + .unwrap(); + + // An invalid proof + let proof = &MockProof { + program_id: MockCodeCommitment([0; 32]), + is_valid: true, + log: bad_transition.as_slice(), + } + .encode_to_vec(); + + invalid_proof_helper( + &context, + proof, + SlashingReason::TransitionInvalid, + challenger_address, + &module, + &mut working_set, + ); + + // Bad initial root + let bad_transition = StateTransition { + initial_state_root: transition_1.state_root, + slot_hash: [1; 32], + final_state_root: transition_1.state_root, + rewarded_address: challenger_address, + validity_condition: MockValidityCond { is_valid: true }, + } + .try_to_vec() + .unwrap(); + + // An invalid proof + let proof = &MockProof { + program_id: MockCodeCommitment([0; 32]), + is_valid: true, + log: bad_transition.as_slice(), + } + .encode_to_vec(); + + invalid_proof_helper( + &context, + proof, + SlashingReason::InvalidInitialHash, + challenger_address, + &module, + &mut working_set, + ); + } +} diff --git a/module-system/module-implementations/sov-attester-incentives/src/tests/helpers.rs b/module-system/module-implementations/sov-attester-incentives/src/tests/helpers.rs new file mode 100644 index 000000000..c36ae0bf8 --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/tests/helpers.rs @@ -0,0 +1,183 @@ +use jmt::proof::SparseMerkleProof; +use sov_bank::{BankConfig, TokenConfig}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::hooks::SlotHooks; +use sov_modules_api::utils::generate_address; +use sov_modules_api::{Address, Genesis, Spec}; +use sov_rollup_interface::mocks::{ + MockBlock, MockBlockHeader, MockCodeCommitment, MockDaSpec, MockHash, MockValidityCond, + MockValidityCondChecker, MockZkvm, +}; +use sov_rollup_interface::zk::ValidityConditionChecker; +use sov_state::storage::StorageProof; +use sov_state::{DefaultStorageSpec, ProverStorage, Storage, WorkingSet}; + +use crate::AttesterIncentives; + +type C = DefaultContext; + +pub const TOKEN_NAME: &str = "TEST_TOKEN"; +pub const BOND_AMOUNT: u64 = 1000; +pub const INITIAL_BOND_AMOUNT: u64 = 5 * BOND_AMOUNT; +pub const SALT: u64 = 5; +pub const DEFAULT_ROLLUP_FINALITY: u64 = 3; +pub const INIT_HEIGHT: u64 = 0; + +/// Consumes and commit the existing working set on the underlying storage +/// `storage` must be the underlying storage defined on the working set for this method to work. +pub(crate) fn commit_get_new_working_set( + storage: &ProverStorage, + working_set: WorkingSet<::Storage>, +) -> WorkingSet<::Storage> { + let (reads_writes, witness) = working_set.checkpoint().freeze(); + + storage + .validate_and_commit(reads_writes, &witness) + .expect("Should be able to commit"); + + WorkingSet::new(storage.clone()) +} + +pub(crate) fn create_bank_config_with_token( + token_name: String, + salt: u64, + addresses_count: usize, + initial_balance: u64, +) -> (BankConfig, Vec
) { + let address_and_balances: Vec<(Address, u64)> = (0..addresses_count) + .map(|i| { + let key = format!("key_{}", i); + let addr = generate_address::(&key); + (addr, initial_balance) + }) + .collect(); + + let token_config = TokenConfig { + token_name, + address_and_balances: address_and_balances.clone(), + authorized_minters: vec![address_and_balances.first().unwrap().0], + salt, + }; + + ( + BankConfig { + tokens: vec![token_config], + }, + address_and_balances + .into_iter() + .map(|(addr, _)| addr) + .collect(), + ) +} + +/// Creates a bank config with a token, and a prover incentives module. +/// Returns the prover incentives module and the attester and challenger's addresses. +pub(crate) fn setup( + working_set: &mut WorkingSet<::Storage>, +) -> ( + AttesterIncentives>, + Address, + Address, + Address, +) { + // Initialize bank + let (bank_config, mut addresses) = + create_bank_config_with_token(TOKEN_NAME.to_string(), SALT, 3, INITIAL_BOND_AMOUNT); + let bank = sov_bank::Bank::::default(); + bank.genesis(&bank_config, working_set) + .expect("bank genesis must succeed"); + + let attester_address = addresses.pop().unwrap(); + let challenger_address = addresses.pop().unwrap(); + let reward_supply = addresses.pop().unwrap(); + + let token_address = sov_bank::get_genesis_token_address::(TOKEN_NAME, SALT); + + // Initialize chain state + let chain_state_config = sov_chain_state::ChainStateConfig { + initial_slot_height: INIT_HEIGHT, + }; + + let chain_state = sov_chain_state::ChainState::::default(); + chain_state + .genesis(&chain_state_config, working_set) + .expect("Chain state genesis must succeed"); + + // initialize prover incentives + let module = AttesterIncentives::< + C, + MockZkvm, + MockDaSpec, + MockValidityCondChecker, + >::default(); + let config = crate::AttesterIncentivesConfig { + bonding_token_address: token_address, + reward_token_supply_address: reward_supply, + minimum_attester_bond: BOND_AMOUNT, + minimum_challenger_bond: BOND_AMOUNT, + commitment_to_allowed_challenge_method: MockCodeCommitment([0u8; 32]), + initial_attesters: vec![(attester_address, BOND_AMOUNT)], + rollup_finality_period: DEFAULT_ROLLUP_FINALITY, + maximum_attested_height: INIT_HEIGHT, + light_client_finalized_height: INIT_HEIGHT, + validity_condition_checker: MockValidityCondChecker::::new(), + phantom_data: Default::default(), + }; + + module + .genesis(&config, working_set) + .expect("prover incentives genesis must succeed"); + + (module, token_address, attester_address, challenger_address) +} + +pub(crate) struct ExecutionSimulationVars { + pub state_root: [u8; 32], + pub state_proof: StorageProof::Hasher>>, +} + +/// Generate an execution simulation for a given number of rounds. Returns a list of the successive state roots +/// with associated bonding proofs, as long as the last state root +pub(crate) fn execution_simulation>( + rounds: u8, + module: &AttesterIncentives, + storage: &ProverStorage, + attester_address: ::Address, + mut working_set: WorkingSet<::Storage>, +) -> ( + // Vector of the successive state roots with associated bonding proofs + Vec, + WorkingSet<::Storage>, +) { + let mut ret_exec_vars = Vec::::new(); + + for i in 0..rounds { + // Commit the working set + working_set = commit_get_new_working_set(storage, working_set); + + ret_exec_vars.push(ExecutionSimulationVars { + state_root: storage.get_state_root(&Default::default()).unwrap(), + state_proof: module.get_bond_proof( + attester_address, + &Default::default(), + &mut working_set, + ), + }); + + // Then process the first transaction. Only sets the genesis hash and a transition in progress. + let slot_data = MockBlock { + curr_hash: [i + 1; 32], + header: MockBlockHeader { + prev_hash: MockHash([i; 32]), + }, + height: INIT_HEIGHT + u64::from(i + 1), + validity_cond: MockValidityCond { is_valid: true }, + blobs: Default::default(), + }; + module + .chain_state + .begin_slot_hook(&slot_data, &mut working_set); + } + + (ret_exec_vars, working_set) +} diff --git a/module-system/module-implementations/sov-attester-incentives/src/tests/invariant.rs b/module-system/module-implementations/sov-attester-incentives/src/tests/invariant.rs new file mode 100644 index 000000000..2a0ed75a3 --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/tests/invariant.rs @@ -0,0 +1,218 @@ +use sov_modules_api::default_context::DefaultContext; +use sov_rollup_interface::optimistic::Attestation; +use sov_state::{ProverStorage, WorkingSet}; + +use crate::call::AttesterIncentiveErrors; +use crate::tests::helpers::{ + execution_simulation, setup, BOND_AMOUNT, DEFAULT_ROLLUP_FINALITY, INIT_HEIGHT, +}; + +// Test the transition invariant +#[test] +fn test_transition_invariant() { + let tmpdir = tempfile::tempdir().unwrap(); + let storage = ProverStorage::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(storage.clone()); + let (module, _token_address, attester_address, _) = setup(&mut working_set); + + // Assert that the attester has the correct bond amount before processing the proof + assert_eq!( + module + .get_bond_amount( + attester_address, + crate::call::Role::Attester, + &mut working_set + ) + .value, + BOND_AMOUNT + ); + + // Simulate the execution of a chain, with the genesis hash and two transitions after. + // Update the chain_state module and the optimistic module accordingly + let (exec_vars, mut working_set) = + execution_simulation(20, &module, &storage, attester_address, working_set); + + let context = DefaultContext { + sender: attester_address, + }; + + const NEW_LIGHT_CLIENT_FINALIZED_HEIGHT: u64 = DEFAULT_ROLLUP_FINALITY + INIT_HEIGHT + 1; + + // Update the finalized height and try to prove the INIT_HEIGHT: should fail + module + .light_client_finalized_height + .set(&NEW_LIGHT_CLIENT_FINALIZED_HEIGHT, &mut working_set); + + // Update the initial height + module + .maximum_attested_height + .set(&NEW_LIGHT_CLIENT_FINALIZED_HEIGHT, &mut working_set); + + // Process a valid attestation for the first transition *should fail* + { + let init_height_usize = usize::try_from(INIT_HEIGHT).unwrap(); + let attestation = Attestation { + initial_state_root: exec_vars[init_height_usize].state_root, + da_block_hash: [(init_height_usize + 1).try_into().unwrap(); 32], + post_state_root: exec_vars[init_height_usize + 1].state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: INIT_HEIGHT + 1, + proof: exec_vars[init_height_usize].state_proof.clone(), + }, + }; + + let err = module + .process_attestation(&context, attestation, &mut working_set) + .unwrap_err(); + + assert_eq!( + err, + AttesterIncentiveErrors::InvalidTransitionInvariant, + "Incorrect error raised" + ); + + // The attester should not be slashed + assert_eq!( + module + .get_bond_amount( + attester_address, + crate::call::Role::Attester, + &mut working_set + ) + .value, + BOND_AMOUNT + ); + } + + let new_height = usize::try_from(NEW_LIGHT_CLIENT_FINALIZED_HEIGHT).unwrap(); + + // The attester should be able to process multiple attestations with the same bonding proof + for i in 0..usize::try_from(DEFAULT_ROLLUP_FINALITY + 1).unwrap() { + let old_attestation = Attestation { + initial_state_root: exec_vars[new_height - 1].state_root, + da_block_hash: [(new_height).try_into().unwrap(); 32], + post_state_root: exec_vars[new_height].state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: new_height.try_into().unwrap(), + proof: exec_vars[new_height - 1].state_proof.clone(), + }, + }; + + let new_attestation = Attestation { + initial_state_root: exec_vars[new_height + i - 1].state_root, + da_block_hash: [(new_height + i).try_into().unwrap(); 32], + post_state_root: exec_vars[new_height + i].state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: (new_height + i).try_into().unwrap(), + proof: exec_vars[new_height + i - 1].state_proof.clone(), + }, + }; + + // Testing the transition invariant + // We suppose that these values are always defined, otherwise we panic + let last_height_attested = module + .maximum_attested_height + .get(&mut working_set) + .expect("The maximum attested height should be set at genesis"); + + // Update the max_attested_height in case the blocks have already been finalized + let new_height_to_attest = last_height_attested + 1; + + let min_height = new_height_to_attest.saturating_sub(DEFAULT_ROLLUP_FINALITY); + + // We have to check the following order invariant is respected: + // min_height <= bonding_proof.transition_num <= new_height_to_attest + // If this invariant is respected, we can be sure that the attester was bonded at new_height_to_attest. + let transition_num = old_attestation.proof_of_bond.claimed_transition_num; + + assert!( + min_height <= transition_num, + "The transition number {transition_num} should be above the minimum height {min_height}" + ); + + assert!( + transition_num <= new_height_to_attest, + "The transition number {transition_num} should be below the new max attested height {new_height_to_attest}" + ); + + module + .process_attestation(&context, old_attestation, &mut working_set) + .expect("Should succeed"); + + module + .process_attestation(&context, new_attestation, &mut working_set) + .expect("Should succeed"); + } + + let finality_usize = usize::try_from(DEFAULT_ROLLUP_FINALITY).unwrap(); + + // Now the transition invariant is no longer respected: the transition number is below the minimum height or above the max height + let old_attestation = Attestation { + initial_state_root: exec_vars[new_height].state_root, + da_block_hash: [(new_height + finality_usize + 1).try_into().unwrap(); 32], + post_state_root: exec_vars[new_height + 1].state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: new_height.try_into().unwrap(), + proof: exec_vars[new_height - 1].state_proof.clone(), + }, + }; + + // Testing the transition invariant + // We suppose that these values are always defined, otherwise we panic + let last_height_attested = module + .maximum_attested_height + .get(&mut working_set) + .expect("The maximum attested height should be set at genesis"); + + // Update the max_attested_height in case the blocks have already been finalized + let new_height_to_attest = last_height_attested + 1; + + let min_height = new_height_to_attest.saturating_sub(DEFAULT_ROLLUP_FINALITY); + + let transition_num = old_attestation.proof_of_bond.claimed_transition_num; + + assert!( + min_height > transition_num, + "The transition number {transition_num} should now be below the minimum height {min_height}" + ); + + let err = module + .process_attestation(&context, old_attestation, &mut working_set) + .unwrap_err(); + + assert_eq!( + err, + AttesterIncentiveErrors::InvalidTransitionInvariant, + "The transition invariant is not respected anymore" + ); + + // Now we do the same, except that the proof of bond refers to a transition above the transition to prove + let attestation = Attestation { + initial_state_root: exec_vars[new_height + finality_usize].state_root, + da_block_hash: [(new_height + finality_usize + 1).try_into().unwrap(); 32], + post_state_root: exec_vars[new_height + finality_usize + 1].state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: (new_height + finality_usize + 2).try_into().unwrap(), + proof: exec_vars[new_height + finality_usize + 1] + .state_proof + .clone(), + }, + }; + + let transition_num = attestation.proof_of_bond.claimed_transition_num; + + assert!( + transition_num > new_height_to_attest, + "The transition number {transition_num} should now be below the new height to attest {new_height_to_attest}" + ); + + let err = module + .process_attestation(&context, attestation, &mut working_set) + .unwrap_err(); + + assert_eq!( + err, + AttesterIncentiveErrors::InvalidTransitionInvariant, + "The transition invariant is not respected anymore" + ); +} diff --git a/module-system/module-implementations/sov-attester-incentives/src/tests/mod.rs b/module-system/module-implementations/sov-attester-incentives/src/tests/mod.rs new file mode 100644 index 000000000..a0a9435eb --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/tests/mod.rs @@ -0,0 +1,6 @@ +pub(crate) mod helpers; + +mod attestation_proccessing; +mod challenger; +mod invariant; +mod unbonding; diff --git a/module-system/module-implementations/sov-attester-incentives/src/tests/unbonding.rs b/module-system/module-implementations/sov-attester-incentives/src/tests/unbonding.rs new file mode 100644 index 000000000..c5c7c13d2 --- /dev/null +++ b/module-system/module-implementations/sov-attester-incentives/src/tests/unbonding.rs @@ -0,0 +1,165 @@ +use sov_modules_api::default_context::DefaultContext; +use sov_rollup_interface::optimistic::Attestation; +use sov_state::{ProverStorage, WorkingSet}; + +use crate::call::AttesterIncentiveErrors; +use crate::tests::helpers::{ + execution_simulation, setup, BOND_AMOUNT, DEFAULT_ROLLUP_FINALITY, INIT_HEIGHT, +}; + +#[test] +fn test_two_phase_unbonding() { + let tmpdir = tempfile::tempdir().unwrap(); + let storage = ProverStorage::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(storage.clone()); + let (module, token_address, attester_address, _) = setup(&mut working_set); + + // Assert that the attester has the correct bond amount before processing the proof + assert_eq!( + module + .get_bond_amount( + attester_address, + crate::call::Role::Attester, + &mut working_set + ) + .value, + BOND_AMOUNT + ); + + let context = DefaultContext { + sender: attester_address, + }; + + // Try to skip the first phase of the two phase unbonding. Should fail + { + // Should fail + let err = module + .end_unbond_attester(&context, &mut working_set) + .unwrap_err(); + assert_eq!(err, AttesterIncentiveErrors::AttesterIsNotUnbonding); + } + + // Simulate the execution of a chain, with the genesis hash and two transitions after. + // Update the chain_state module and the optimistic module accordingly + let (mut exec_vars, mut working_set) = + execution_simulation(3, &module, &storage, attester_address, working_set); + + // Start unbonding and then try to prove a transition. User slashed + module + .begin_unbond_attester(&context, &mut working_set) + .expect("Should succeed"); + + let _transition_2 = exec_vars.pop().unwrap(); + let transition_1 = exec_vars.pop().unwrap(); + let initial_transition = exec_vars.pop().unwrap(); + + // Process a valid attestation but get slashed because the attester was trying to unbond. + { + let attestation = Attestation { + initial_state_root: initial_transition.state_root, + da_block_hash: [1; 32], + post_state_root: transition_1.state_root, + proof_of_bond: sov_rollup_interface::optimistic::ProofOfBond { + claimed_transition_num: INIT_HEIGHT + 1, + proof: initial_transition.state_proof, + }, + }; + + let err = module + .process_attestation(&context, attestation, &mut working_set) + .unwrap_err(); + + assert_eq!( + err, + AttesterIncentiveErrors::UserNotBonded, + "The attester should not be bonded" + ); + + // We cannot try to bond either + let err = module + .bond_user_helper( + BOND_AMOUNT, + &attester_address, + crate::call::Role::Attester, + &mut working_set, + ) + .unwrap_err(); + + assert_eq!( + err, + AttesterIncentiveErrors::AttesterIsUnbonding, + "Should raise an AttesterIsUnbonding error" + ); + } + + // Cannot bond again while unbonding + { + let err = module + .bond_user_helper( + BOND_AMOUNT, + &attester_address, + crate::call::Role::Attester, + &mut working_set, + ) + .unwrap_err(); + + assert_eq!( + err, + AttesterIncentiveErrors::AttesterIsUnbonding, + "Should raise that error" + ); + } + + // Now try to complete the two phase unbonding immediately: the second phase should fail because the + // first phase cannot get finalized + { + // Should fail + let err = module + .end_unbond_attester(&context, &mut working_set) + .unwrap_err(); + assert_eq!(err, AttesterIncentiveErrors::UnbondingNotFinalized); + } + + // Now unbond the right way. + { + let initial_account_balance = module + .bank + .get_balance_of(attester_address, token_address, &mut working_set) + .unwrap(); + + // Start unbonding the user: should succeed + module + .begin_unbond_attester(&context, &mut working_set) + .unwrap(); + + let unbonding_info = module + .unbonding_attesters + .get(&attester_address, &mut working_set) + .unwrap(); + + assert_eq!( + unbonding_info.unbonding_initiated_height, INIT_HEIGHT, + "Invalid beginning unbonding height" + ); + + // Wait for the light client to finalize + module + .light_client_finalized_height + .set(&(INIT_HEIGHT + DEFAULT_ROLLUP_FINALITY), &mut working_set); + + // Finish the unbonding: should succeed + module + .end_unbond_attester(&context, &mut working_set) + .unwrap(); + + // Check that the final balance is the same as the initial balance + assert_eq!( + initial_account_balance + BOND_AMOUNT, + module + .bank + .get_balance_of(attester_address, token_address, &mut working_set) + .unwrap(), + "The initial and final account balance don't match" + ); + } +} diff --git a/module-system/module-implementations/sov-bank/Cargo.toml b/module-system/module-implementations/sov-bank/Cargo.toml index 1b0226b6b..4d5a71fd2 100644 --- a/module-system/module-implementations/sov-bank/Cargo.toml +++ b/module-system/module-implementations/sov-bank/Cargo.toml @@ -19,19 +19,18 @@ jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], schemars = { workspace = true, optional = true } serde = { workspace = true, optional = true } serde_json = { workspace = true, optional = true } - -sov-modules-api = { path = "../../sov-modules-api", version = "0.1", default-features = false, features = ["macros"] } -sov-state = { path = "../../sov-state", version = "0.1", default-features = false } -sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1" } thiserror = { workspace = true } hex = { workspace = true } -[dev-dependencies] sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-state = { path = "../../sov-state", version = "0.1" } +sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1" } + + +[dev-dependencies] tempfile = { workspace = true } [features] -default = ["native"] -serde = ["dep:serde", "dep:serde_json"] -native = ["serde", "sov-state/native", "dep:jsonrpsee", "sov-modules-api/native", "dep:clap", "dep:schemars"] +default = [] +native = ["serde", "serde_json", "jsonrpsee", "clap", "schemars", "sov-state/native", "sov-modules-api/native", ] cli = ["native"] diff --git a/module-system/module-implementations/sov-bank/src/call.rs b/module-system/module-implementations/sov-bank/src/call.rs index f0f96d800..fca0b8f32 100644 --- a/module-system/module-implementations/sov-bank/src/call.rs +++ b/module-system/module-implementations/sov-bank/src/call.rs @@ -53,7 +53,7 @@ pub enum CallMessage { minter_address: C::Address, }, - /// Freeze a token so that the supply is frozen + /// Freezes a token so that the supply is frozen Freeze { /// Address of the token to be frozen token_address: C::Address, @@ -61,8 +61,10 @@ pub enum CallMessage { } impl Bank { + /// Creates a token from a set of configuration parameters. + /// Checks if a token already exists at that address. If so return an error. #[allow(clippy::too_many_arguments)] - pub(crate) fn create_token( + pub fn create_token( &self, token_name: String, salt: u64, @@ -71,7 +73,7 @@ impl Bank { authorized_minters: Vec, context: &C, working_set: &mut WorkingSet, - ) -> Result { + ) -> Result { let (token_address, token) = Token::::create( &token_name, &[(minter_address, initial_balance)], @@ -91,9 +93,11 @@ impl Bank { } self.tokens.set(&token_address, &token, working_set); - Ok(CallResponse::default()) + Ok(token_address) } + /// Transfers the set of `coins` to the address specified by `to`. + /// Helper function that calls the [`transfer_from`] method from the bank module pub fn transfer( &self, to: C::Address, @@ -104,45 +108,68 @@ impl Bank { self.transfer_from(context.sender(), &to, coins, working_set) } - pub(crate) fn burn( + /// Burns the set of `coins`. If there is no token at the address specified in the + /// `Coins` structure, return an error. + /// Calls the [`Token::burn`] function and updates the total supply of tokens. + pub fn burn( &self, coins: Coins, - context: &C, + owner: &C::Address, working_set: &mut WorkingSet, - ) -> Result { - let context_logger = || { - format!( - "Failed burn coins({}) by sender {}", - coins, - context.sender() - ) - }; + ) -> Result<()> { + let context_logger = || format!("Failed to burn coins({}) from owner {}", coins, owner,); let mut token = self .tokens .get_or_err(&coins.token_address, working_set) .with_context(context_logger)?; token - .burn(context.sender(), coins.amount, working_set) + .burn(owner, coins.amount, working_set) .with_context(context_logger)?; token.total_supply -= coins.amount; self.tokens.set(&coins.token_address, &token, working_set); - Ok(CallResponse::default()) + Ok(()) } - pub(crate) fn mint( + /// Burns coins from an externally owned address ("EOA") + pub(crate) fn burn_from_eoa( &self, coins: Coins, - minter_address: C::Address, context: &C, working_set: &mut WorkingSet, ) -> Result { + self.burn(coins, context.sender(), working_set)?; + Ok(CallResponse::default()) + } + + /// Mints the `coins`to the address `mint_to_address` using the externally owned account ("EOA") supplied by + /// `context.sender()` as the authorizer. + /// Returns an error if the token address doesn't exist or `context.sender()` is not authorized to mint tokens. + /// Calls the [`Token::mint`] function and update the `self.tokens` set to store the new balance. + pub fn mint_from_eoa( + &self, + coins: &Coins, + mint_to_address: &C::Address, + context: &C, + working_set: &mut WorkingSet, + ) -> Result<()> { + self.mint(coins, mint_to_address, context.sender(), working_set) + } + + /// Mints the `coins` to the address `mint_to_address` if `authorizer` is an allowed minter. + /// Returns an error if the token address doesn't exist or `context.sender()` is not authorized to mint tokens. + /// Calls the [`Token::mint`] function and update the `self.tokens` set to store the new minted address. + pub fn mint( + &self, + coins: &Coins, + mint_to_address: &C::Address, + authorizer: &C::Address, + working_set: &mut WorkingSet, + ) -> Result<()> { let context_logger = || { format!( - "Failed mint coins({}) to {} by minter {}", - coins, - minter_address, - context.sender() + "Failed mint coins({}) to {} by authorizer {}", + coins, mint_to_address, authorizer ) }; let mut token = self @@ -150,13 +177,16 @@ impl Bank { .get_or_err(&coins.token_address, working_set) .with_context(context_logger)?; token - .mint(context.sender(), &minter_address, coins.amount, working_set) + .mint(authorizer, mint_to_address, coins.amount, working_set) .with_context(context_logger)?; self.tokens.set(&coins.token_address, &token, working_set); - Ok(CallResponse::default()) + Ok(()) } + /// Tries to freeze the token address `token_address`. + /// Returns an error if the token address doesn't exist, + /// otherwise calls the [`Token::freeze`] function, and update the token set upon success. pub(crate) fn freeze( &self, token_address: C::Address, @@ -184,6 +214,8 @@ impl Bank { } impl Bank { + /// Transfers the set of `coins` from the address `from` to the address `to`. + /// Returns an error if the token address doesn't exist. Otherwise, call the [`Token::transfer`] function. pub fn transfer_from( &self, from: &C::Address, @@ -206,8 +238,24 @@ impl Bank { .with_context(context_logger)?; Ok(CallResponse::default()) } + + /// Helper function used by the rpc method [`balance_of`] to return the balance of the token stored at `token_address` + /// for the user having the address `user_address` from the underlying storage. If the token address doesn't exist, or + /// if the user doesn't have tokens of that type, return `None`. Otherwise, wrap the resulting balance in `Some`. + pub fn get_balance_of( + &self, + user_address: C::Address, + token_address: C::Address, + working_set: &mut WorkingSet, + ) -> Option { + self.tokens + .get(&token_address, working_set) + .and_then(|token| token.balances.get(&user_address, working_set)) + } } +/// Creates a new prefix from an already existing prefix `parent_prefix` and a `token_address` +/// by extending the parent prefix. pub(crate) fn prefix_from_address_with_parent( parent_prefix: &sov_state::Prefix, token_address: &C::Address, diff --git a/module-system/module-implementations/sov-bank/src/genesis.rs b/module-system/module-implementations/sov-bank/src/genesis.rs index 8d8b8a092..a214670d2 100644 --- a/module-system/module-implementations/sov-bank/src/genesis.rs +++ b/module-system/module-implementations/sov-bank/src/genesis.rs @@ -4,9 +4,13 @@ use sov_state::WorkingSet; use crate::token::Token; use crate::Bank; +/// The address of the deployment node. For now, set to [0; 32] pub(crate) const DEPLOYER: [u8; 32] = [0; 32]; impl Bank { + /// Init an instance of the bank module from the configuration `config`. + /// For each token in the `config`, calls the [`Token::create`] function to create + /// the token. Upon success, updates the token set if the token address doesn't already exist. pub(crate) fn init_module( &self, config: &::Config, diff --git a/module-system/module-implementations/sov-bank/src/lib.rs b/module-system/module-implementations/sov-bank/src/lib.rs index a90d052b9..6d81a201d 100644 --- a/module-system/module-implementations/sov-bank/src/lib.rs +++ b/module-system/module-implementations/sov-bank/src/lib.rs @@ -1,28 +1,38 @@ +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] mod call; mod genesis; #[cfg(feature = "native")] -mod query; +pub mod query; mod token; mod utils; +/// Specifies the call methods using in that module. pub use call::CallMessage; -#[cfg(feature = "native")] -pub use query::{BalanceResponse, BankRpcImpl, BankRpcServer, TotalSupplyResponse}; -use sov_modules_api::{Error, ModuleInfo}; +use sov_modules_api::{CallResponse, Error, ModuleInfo}; use sov_state::WorkingSet; use token::Token; +/// Specifies an interface to interact with tokens. pub use token::{Amount, Coins}; +/// Methods to get a token address. pub use utils::{get_genesis_token_address, get_token_address}; +/// [`TokenConfig`] specifies a configuration used when generating a token for the bank +/// module. pub struct TokenConfig { + /// The name of the token. pub token_name: String, + /// A vector of tuples containing the initial addresses and balances (as u64) pub address_and_balances: Vec<(C::Address, u64)>, + /// The addresses that are authorized to mint the token. pub authorized_minters: Vec, + /// A salt used to encrypt the token address. pub salt: u64, } /// Initial configuration for sov-bank module. pub struct BankConfig { + /// A list of configurations for the initial tokens. pub tokens: Vec>, } @@ -70,26 +80,34 @@ impl sov_modules_api::Module for Bank { initial_balance, minter_address, authorized_minters, - } => Ok(self.create_token( - token_name, - salt, - initial_balance, - minter_address, - authorized_minters, - context, - working_set, - )?), + } => { + self.create_token( + token_name, + salt, + initial_balance, + minter_address, + authorized_minters, + context, + working_set, + )?; + Ok(CallResponse::default()) + } call::CallMessage::Transfer { to, coins } => { Ok(self.transfer(to, coins, context, working_set)?) } - call::CallMessage::Burn { coins } => Ok(self.burn(coins, context, working_set)?), + call::CallMessage::Burn { coins } => { + Ok(self.burn_from_eoa(coins, context, working_set)?) + } call::CallMessage::Mint { coins, minter_address, - } => Ok(self.mint(coins, minter_address, context, working_set)?), + } => { + self.mint_from_eoa(&coins, &minter_address, context, working_set)?; + Ok(CallResponse::default()) + } call::CallMessage::Freeze { token_address } => { Ok(self.freeze(token_address, context, working_set)?) diff --git a/module-system/module-implementations/sov-bank/src/query.rs b/module-system/module-implementations/sov-bank/src/query.rs index 6fba96dcc..c683d6dc3 100644 --- a/module-system/module-implementations/sov-bank/src/query.rs +++ b/module-system/module-implementations/sov-bank/src/query.rs @@ -1,22 +1,29 @@ +//! Defines rpc queries exposed by the bank module, along with the relevant types use jsonrpsee::core::RpcResult; use sov_modules_api::macros::rpc_gen; use sov_state::WorkingSet; use crate::{Amount, Bank}; +/// Structure returned by the `balance_of` rpc method. #[derive(Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize, Clone)] pub struct BalanceResponse { + /// The balance amount of a given user for a given token. Equivalent to u64. pub amount: Option, } +/// Structure returned by the `supply_of` rpc method. #[derive(Debug, Eq, PartialEq, serde::Deserialize, serde::Serialize, Clone)] pub struct TotalSupplyResponse { + /// The amount of token supply for a given token address. Equivalent to u64. pub amount: Option, } #[rpc_gen(client, server, namespace = "bank")] impl Bank { #[rpc_method(name = "balanceOf")] + /// Rpc method that returns the balance of the user at the address `user_address` for the token + /// stored at the address `token_address`. pub fn balance_of( &self, user_address: C::Address, @@ -29,6 +36,7 @@ impl Bank { } #[rpc_method(name = "supplyOf")] + /// Rpc method that returns the supply of token of the token stored at the address `token_address`. pub fn supply_of( &self, token_address: C::Address, @@ -42,16 +50,3 @@ impl Bank { }) } } - -impl Bank { - pub fn get_balance_of( - &self, - user_address: C::Address, - token_address: C::Address, - working_set: &mut WorkingSet, - ) -> Option { - self.tokens - .get(&token_address, working_set) - .and_then(|token| token.balances.get(&user_address, working_set)) - } -} diff --git a/module-system/module-implementations/sov-bank/src/token.rs b/module-system/module-implementations/sov-bank/src/token.rs index 1c1b87425..543a6c228 100644 --- a/module-system/module-implementations/sov-bank/src/token.rs +++ b/module-system/module-implementations/sov-bank/src/token.rs @@ -12,8 +12,11 @@ use thiserror::Error; use crate::call::prefix_from_address_with_parent; +/// Type alias to store an amount of token. pub type Amount = u64; +/// Structure that stores information specifying +/// a given `amount` (type [`Amount`]) of coins stored at a `token_address` (type [`Context::Address`]). #[cfg_attr( feature = "native", derive(serde::Serialize), @@ -24,7 +27,9 @@ pub type Amount = u64; )] #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] pub struct Coins { + /// An `amount` of coins stored. pub amount: Amount, + /// The address where the tokens are stored. pub token_address: C::Address, } @@ -108,6 +113,9 @@ pub(crate) struct Token { } impl Token { + /// Transfer the amount `amount` of tokens from the address `from` to the address `to`. + /// First checks that there is enough token of that type stored in `from`. If so, update + /// the balances of the `from` and `to` accounts. pub(crate) fn transfer( &self, from: &C::Address, @@ -130,7 +138,8 @@ impl Token { Ok(()) } - + /// Burns a specified `amount` of token from the adress `from`. First check that the address has enough token to burn, + /// if not returns an error. Otherwise, update the balances by substracting the amount burnt. pub(crate) fn burn( &mut self, from: &C::Address, @@ -155,27 +164,32 @@ impl Token { Ok(()) } + /// Mints a given `amount` of token sent by `sender` to the specified `mint_to_address`. + /// Checks that the `authorized_minters` set is not empty for the token and that the `sender` + /// is an `authorized_minter`. If so, update the balances of token for the `mint_to_address` by + /// adding the minted tokens. Updates the `total_supply` of that token. pub(crate) fn mint( &mut self, - sender: &C::Address, - minter_address: &C::Address, + authorizer: &C::Address, + mint_to_address: &C::Address, amount: Amount, working_set: &mut WorkingSet, ) -> Result<()> { if self.authorized_minters.is_empty() { bail!("Attempt to mint frozen token {}", self.name) } - self.is_authorized_minter(sender)?; + + self.is_authorized_minter(authorizer)?; let to_balance: Amount = self .balances - .get(minter_address, working_set) + .get(mint_to_address, working_set) .unwrap_or_default() .checked_add(amount) .ok_or(anyhow::Error::msg( "Account balance overflow in the mint method of bank module", ))?; - self.balances.set(minter_address, &to_balance, working_set); + self.balances.set(mint_to_address, &to_balance, working_set); self.total_supply = self .total_supply .checked_add(amount) @@ -212,6 +226,11 @@ impl Token { Ok(new_balance) } + /// Creates a token from a given set of parameters. + /// The `token_name`, `sender` address (as a `u8` slice), and the `salt` (`u64` number) are used as an input + /// to an hash function that computes the token address. Then the initial accounts and balances are populated + /// from the `address_and_balances` slice and the `total_supply` of tokens is updated each time. + /// Returns a tuple containing the computed `token_address` and the created `token` object. pub(crate) fn create( token_name: &str, address_and_balances: &[(C::Address, u64)], diff --git a/module-system/module-implementations/sov-bank/src/utils.rs b/module-system/module-implementations/sov-bank/src/utils.rs index 1097a3a50..d8c401e68 100644 --- a/module-system/module-implementations/sov-bank/src/utils.rs +++ b/module-system/module-implementations/sov-bank/src/utils.rs @@ -17,6 +17,7 @@ pub fn get_token_address( C::Address::from(hash) } +/// Gets the token address for the genesis block using the `DEPLOYER` address as the sender. pub fn get_genesis_token_address( token_name: &str, salt: u64, diff --git a/module-system/module-implementations/sov-bank/tests/burn_test.rs b/module-system/module-implementations/sov-bank/tests/burn_test.rs index 9f13d9373..91e2e6144 100644 --- a/module-system/module-implementations/sov-bank/tests/burn_test.rs +++ b/module-system/module-implementations/sov-bank/tests/burn_test.rs @@ -1,7 +1,7 @@ use helpers::{generate_address, C}; +use sov_bank::query::TotalSupplyResponse; use sov_bank::{ get_genesis_token_address, get_token_address, Bank, BankConfig, CallMessage, Coins, - TotalSupplyResponse, }; use sov_modules_api::{Address, Context, Error, Module}; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; @@ -21,9 +21,9 @@ fn burn_deployed_tokens() { bank.genesis(&empty_bank_config, &mut working_set).unwrap(); let sender_address = generate_address("just_sender"); - let sender_context = C::new(sender_address.clone()); + let sender_context = C::new(sender_address); let minter_address = generate_address("minter"); - let minter_context = C::new(minter_address.clone()); + let minter_context = C::new(minter_address); let salt = 0; let token_name = "Token1".to_owned(); @@ -36,8 +36,8 @@ fn burn_deployed_tokens() { salt, token_name, initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; bank.call(mint_message, &minter_context, &mut working_set) .expect("Failed to mint token"); @@ -45,14 +45,13 @@ fn burn_deployed_tokens() { assert!(working_set.events().is_empty()); let query_total_supply = |working_set: &mut WorkingSet| -> Option { - let total_supply: TotalSupplyResponse = - bank.supply_of(token_address.clone(), working_set).unwrap(); + let total_supply: TotalSupplyResponse = bank.supply_of(token_address, working_set).unwrap(); total_supply.amount }; let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; let previous_total_supply = query_total_supply(&mut working_set); @@ -64,7 +63,7 @@ fn burn_deployed_tokens() { let burn_message = CallMessage::Burn { coins: Coins { amount: burn_amount, - token_address: token_address.clone(), + token_address, }, }; @@ -74,7 +73,7 @@ fn burn_deployed_tokens() { let current_total_supply = query_total_supply(&mut working_set); assert_eq!(Some(initial_balance - burn_amount), current_total_supply); - let minter_balance = query_user_balance(minter_address.clone(), &mut working_set); + let minter_balance = query_user_balance(minter_address, &mut working_set); assert_eq!(Some(initial_balance - burn_amount), minter_balance); let previous_total_supply = current_total_supply; @@ -89,7 +88,7 @@ fn burn_deployed_tokens() { assert!(chain.next().is_none()); assert_eq!( format!( - "Failed burn coins(token_address={} amount={}) by sender {}", + "Failed to burn coins(token_address={} amount={}) from owner {}", token_address, burn_amount, sender_address ), message_1 @@ -110,14 +109,14 @@ fn burn_deployed_tokens() { let burn_zero_message = CallMessage::Burn { coins: Coins { amount: 0, - token_address: token_address.clone(), + token_address, }, }; bank.call(burn_zero_message, &minter_context, &mut working_set) .expect("Failed to burn token"); assert!(working_set.events().is_empty()); - let minter_balance_after = query_user_balance(minter_address.clone(), &mut working_set); + let minter_balance_after = query_user_balance(minter_address, &mut working_set); assert_eq!(minter_balance, minter_balance_after); // --- @@ -125,7 +124,7 @@ fn burn_deployed_tokens() { let burn_message = CallMessage::Burn { coins: Coins { amount: initial_balance + 10, - token_address: token_address.clone(), + token_address, }, }; @@ -138,7 +137,7 @@ fn burn_deployed_tokens() { assert!(chain.next().is_none()); assert_eq!( format!( - "Failed burn coins(token_address={} amount={}) by sender {}", + "Failed to burn coins(token_address={} amount={}) from owner {}", token_address, initial_balance + 10, minter_address @@ -156,7 +155,7 @@ fn burn_deployed_tokens() { let burn_message = CallMessage::Burn { coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; @@ -169,7 +168,7 @@ fn burn_deployed_tokens() { assert!(chain.next().is_none()); assert_eq!( format!( - "Failed burn coins(token_address={} amount={}) by sender {}", + "Failed to burn coins(token_address={} amount={}) from owner {}", token_address, 1, minter_address ), message_1 @@ -193,25 +192,25 @@ fn burn_initial_tokens() { &bank_config.tokens[0].token_name, bank_config.tokens[0].salt, ); - let sender_address = bank_config.tokens[0].address_and_balances[0].0.clone(); + let sender_address = bank_config.tokens[0].address_and_balances[0].0; let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; - let balance_before = query_user_balance(sender_address.clone(), &mut working_set); + let balance_before = query_user_balance(sender_address, &mut working_set); assert_eq!(Some(initial_balance), balance_before); let burn_amount = 10; let burn_message = CallMessage::Burn { coins: Coins { amount: burn_amount, - token_address: token_address.clone(), + token_address, }, }; - let context = C::new(sender_address.clone()); + let context = C::new(sender_address); bank.call(burn_message, &context, &mut working_set) .expect("Failed to burn token"); assert!(working_set.events().is_empty()); diff --git a/module-system/module-implementations/sov-bank/tests/create_token_test.rs b/module-system/module-implementations/sov-bank/tests/create_token_test.rs index 0f7838595..b75abcf54 100644 --- a/module-system/module-implementations/sov-bank/tests/create_token_test.rs +++ b/module-system/module-implementations/sov-bank/tests/create_token_test.rs @@ -1,5 +1,5 @@ use sov_bank::{get_token_address, Bank, CallMessage}; -use sov_modules_api::test_utils::generate_address; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Context, Module}; use sov_state::{ProverStorage, WorkingSet}; @@ -16,7 +16,7 @@ fn initial_and_deployed_token() { bank.genesis(&bank_config, &mut working_set).unwrap(); let sender_address = generate_address::("sender"); - let sender_context = C::new(sender_address.clone()); + let sender_context = C::new(sender_address); let minter_address = generate_address::("minter"); let initial_balance = 500; let token_name = "Token1".to_owned(); @@ -26,8 +26,8 @@ fn initial_and_deployed_token() { salt, token_name, initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; bank.call(create_token_message, &sender_context, &mut working_set) @@ -35,8 +35,7 @@ fn initial_and_deployed_token() { assert!(working_set.events().is_empty()); - let sender_balance = - bank.get_balance_of(sender_address, token_address.clone(), &mut working_set); + let sender_balance = bank.get_balance_of(sender_address, token_address, &mut working_set); assert!(sender_balance.is_none()); let minter_balance = bank.get_balance_of(minter_address, token_address, &mut working_set); diff --git a/module-system/module-implementations/sov-bank/tests/freeze_test.rs b/module-system/module-implementations/sov-bank/tests/freeze_test.rs index 030b5b433..dc8c11776 100644 --- a/module-system/module-implementations/sov-bank/tests/freeze_test.rs +++ b/module-system/module-implementations/sov-bank/tests/freeze_test.rs @@ -1,7 +1,8 @@ use helpers::C; -use sov_bank::{get_token_address, Bank, BankConfig, CallMessage, Coins, TotalSupplyResponse}; +use sov_bank::query::TotalSupplyResponse; +use sov_bank::{get_token_address, Bank, BankConfig, CallMessage, Coins}; use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::test_utils::generate_address; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Address, Context, Error, Module}; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; @@ -18,7 +19,7 @@ fn freeze_token() { bank.genesis(&empty_bank_config, &mut working_set).unwrap(); let minter_address = generate_address::("minter"); - let minter_context = C::new(minter_address.clone()); + let minter_context = C::new(minter_address); let salt = 0; let token_name = "Token1".to_owned(); @@ -31,8 +32,8 @@ fn freeze_token() { salt, token_name: token_name.clone(), initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; let _minted = bank .call(mint_message, &minter_context, &mut working_set) @@ -42,9 +43,7 @@ fn freeze_token() { // ----- // Freeze - let freeze_message = CallMessage::Freeze { - token_address: token_address.clone(), - }; + let freeze_message = CallMessage::Freeze { token_address }; let _freeze = bank .call(freeze_message, &minter_context, &mut working_set) @@ -53,9 +52,7 @@ fn freeze_token() { // ---- // Try to freeze an already frozen token - let freeze_message = CallMessage::Freeze { - token_address: token_address.clone(), - }; + let freeze_message = CallMessage::Freeze { token_address }; let freeze = bank.call(freeze_message, &minter_context, &mut working_set); assert!(freeze.is_err()); @@ -84,8 +81,8 @@ fn freeze_token() { salt, token_name: token_name_2.clone(), initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; let _minted = bank .call(mint_message, &minter_context, &mut working_set) @@ -95,9 +92,9 @@ fn freeze_token() { // Try to freeze with a non authorized minter let unauthorized_address = generate_address::("unauthorized_address"); - let unauthorized_context = C::new(unauthorized_address.clone()); + let unauthorized_context = C::new(unauthorized_address); let freeze_message = CallMessage::Freeze { - token_address: token_address_2.clone(), + token_address: token_address_2, }; let freeze = bank.call(freeze_message, &unauthorized_context, &mut working_set); @@ -128,9 +125,9 @@ fn freeze_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let query_total_supply = |token_address: Address, @@ -150,7 +147,7 @@ fn freeze_token() { assert!(chain.next().is_none()); assert_eq!( format!( - "Failed mint coins(token_address={} amount={}) to {} by minter {}", + "Failed mint coins(token_address={} amount={}) to {} by authorizer {}", token_address, mint_amount, new_holder, minter_address ), message_1 @@ -166,9 +163,9 @@ fn freeze_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address_2.clone(), + token_address: token_address_2, }, - minter_address: minter_address.clone(), + minter_address, }; let _minted = bank @@ -176,7 +173,7 @@ fn freeze_token() { .expect("Failed to mint token"); assert!(working_set.events().is_empty()); - let total_supply = query_total_supply(token_address_2.clone(), &mut working_set); + let total_supply = query_total_supply(token_address_2, &mut working_set); assert_eq!(Some(initial_balance + mint_amount), total_supply); let query_user_balance = diff --git a/module-system/module-implementations/sov-bank/tests/helpers/mod.rs b/module-system/module-implementations/sov-bank/tests/helpers/mod.rs index 485fc9138..d6de1abb4 100644 --- a/module-system/module-implementations/sov-bank/tests/helpers/mod.rs +++ b/module-system/module-implementations/sov-bank/tests/helpers/mod.rs @@ -1,6 +1,6 @@ use sov_bank::{BankConfig, TokenConfig}; use sov_modules_api::default_context::DefaultContext; -use sov_modules_api::test_utils::generate_address as gen_address_generic; +use sov_modules_api::utils::generate_address as gen_address_generic; use sov_modules_api::Address; pub type C = DefaultContext; diff --git a/module-system/module-implementations/sov-bank/tests/mint_test.rs b/module-system/module-implementations/sov-bank/tests/mint_test.rs index 7a38e1d58..8ebe0875c 100644 --- a/module-system/module-implementations/sov-bank/tests/mint_test.rs +++ b/module-system/module-implementations/sov-bank/tests/mint_test.rs @@ -1,6 +1,7 @@ use helpers::C; -use sov_bank::{get_token_address, Bank, BankConfig, CallMessage, Coins, TotalSupplyResponse}; -use sov_modules_api::test_utils::generate_address; +use sov_bank::query::TotalSupplyResponse; +use sov_bank::{get_token_address, Bank, BankConfig, CallMessage, Coins}; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Address, Context, Error, Module}; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; @@ -17,7 +18,7 @@ fn mint_token() { bank.genesis(&empty_bank_config, &mut working_set).unwrap(); let minter_address = generate_address::("minter"); - let minter_context = C::new(minter_address.clone()); + let minter_context = C::new(minter_address); let salt = 0; let token_name = "Token1".to_owned(); @@ -30,8 +31,8 @@ fn mint_token() { salt, token_name: token_name.clone(), initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![minter_address.clone()], + minter_address, + authorized_minters: vec![minter_address], }; let _minted = bank .call(mint_message, &minter_context, &mut working_set) @@ -48,10 +49,10 @@ fn mint_token() { let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; - let previous_total_supply = query_total_supply(token_address.clone(), &mut working_set); + let previous_total_supply = query_total_supply(token_address, &mut working_set); assert_eq!(Some(initial_balance), previous_total_supply); // ----- @@ -61,9 +62,9 @@ fn mint_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let _minted = bank @@ -71,20 +72,20 @@ fn mint_token() { .expect("Failed to mint token"); assert!(working_set.events().is_empty()); - let total_supply = query_total_supply(token_address.clone(), &mut working_set); + let total_supply = query_total_supply(token_address, &mut working_set); assert_eq!(Some(initial_balance + mint_amount), total_supply); // check user balance after minting - let balance = query_user_balance(new_holder.clone(), &mut working_set); + let balance = query_user_balance(new_holder, &mut working_set); assert_eq!(Some(10), balance); // check original token creation balance - let bal = query_user_balance(minter_address.clone(), &mut working_set); + let bal = query_user_balance(minter_address, &mut working_set); assert_eq!(Some(100), bal); // Mint with an un-authorized user let unauthorized_address = generate_address::("unauthorized_address"); - let unauthorized_context = C::new(unauthorized_address.clone()); + let unauthorized_context = C::new(unauthorized_address); let unauthorized_mint = bank.call(mint_message, &unauthorized_context, &mut working_set); assert!(unauthorized_mint.is_err()); @@ -98,7 +99,7 @@ fn mint_token() { assert_eq!( format!( - "Failed mint coins(token_address={} amount={}) to {} by minter {}", + "Failed mint coins(token_address={} amount={}) to {} by authorizer {}", token_address, mint_amount, new_holder, unauthorized_address ), message_1 @@ -124,11 +125,8 @@ fn mint_token() { salt, token_name: token_name.clone(), initial_balance, - minter_address: minter_address.clone(), - authorized_minters: vec![ - authorized_minter_address_1.clone(), - authorized_minter_address_2.clone(), - ], + minter_address, + authorized_minters: vec![authorized_minter_address_1, authorized_minter_address_2], }; let _minted = bank .call(mint_message, &minter_context, &mut working_set) @@ -142,9 +140,9 @@ fn mint_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let minted = bank.call(mint_message, &minter_context, &mut working_set); @@ -157,7 +155,7 @@ fn mint_token() { assert!(chain.next().is_none()); assert_eq!( format!( - "Failed mint coins(token_address={} amount={}) to {} by minter {}", + "Failed mint coins(token_address={} amount={}) to {} by authorizer {}", token_address, mint_amount, new_holder, minter_address, ), message_1 @@ -174,32 +172,32 @@ fn mint_token() { let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let _minted = bank .call(mint_message, &authorized_minter_2_context, &mut working_set) .expect("Failed to mint token"); - let supply = query_total_supply(token_address.clone(), &mut working_set); + let supply = query_total_supply(token_address, &mut working_set); assert!(working_set.events().is_empty()); assert_eq!(Some(110), supply); // Try to mint new token with authorized sender 1 - let authorized_minter_1_context = C::new(authorized_minter_address_1.clone()); + let authorized_minter_1_context = C::new(authorized_minter_address_1); let mint_message = CallMessage::Mint { coins: Coins { amount: mint_amount, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let _minted = bank .call(mint_message, &authorized_minter_1_context, &mut working_set) .expect("Failed to mint token"); - let supply = query_total_supply(token_address.clone(), &mut working_set); + let supply = query_total_supply(token_address, &mut working_set); assert!(working_set.events().is_empty()); assert_eq!(Some(120), supply); @@ -207,9 +205,9 @@ fn mint_token() { let overflow_mint_message = CallMessage::Mint { coins: Coins { amount: u64::MAX, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let minted = bank.call( @@ -225,7 +223,7 @@ fn mint_token() { assert!(chain.next().is_none()); assert_eq!( format!( - "Failed mint coins(token_address={} amount={}) to {} by minter {}", + "Failed mint coins(token_address={} amount={}) to {} by authorizer {}", token_address, u64::MAX, new_holder, @@ -238,7 +236,7 @@ fn mint_token() { message_2, ); // assert that the supply is unchanged after the overflow mint - let supply = query_total_supply(token_address.clone(), &mut working_set); + let supply = query_total_supply(token_address, &mut working_set); assert_eq!(Some(120), supply); // Overflow test 2 - total supply @@ -246,9 +244,9 @@ fn mint_token() { let overflow_mint_message = CallMessage::Mint { coins: Coins { amount: u64::MAX - 1, - token_address: token_address.clone(), + token_address, }, - minter_address: new_holder.clone(), + minter_address: new_holder, }; let minted = bank.call( @@ -264,7 +262,7 @@ fn mint_token() { assert!(chain.next().is_none()); assert_eq!( format!( - "Failed mint coins(token_address={} amount={}) to {} by minter {}", + "Failed mint coins(token_address={} amount={}) to {} by authorizer {}", token_address, u64::MAX - 1, new_holder, diff --git a/module-system/module-implementations/sov-bank/tests/transfer_test.rs b/module-system/module-implementations/sov-bank/tests/transfer_test.rs index 662804e6e..1439adea1 100644 --- a/module-system/module-implementations/sov-bank/tests/transfer_test.rs +++ b/module-system/module-implementations/sov-bank/tests/transfer_test.rs @@ -1,11 +1,11 @@ mod helpers; use helpers::*; +use sov_bank::query::TotalSupplyResponse; use sov_bank::{ get_genesis_token_address, get_token_address, Bank, BankConfig, CallMessage, Coins, - TotalSupplyResponse, }; -use sov_modules_api::test_utils::generate_address; +use sov_modules_api::utils::generate_address; use sov_modules_api::{Address, Context, Error, Module}; use sov_state::{DefaultStorageSpec, ProverStorage, WorkingSet}; @@ -26,38 +26,37 @@ fn transfer_initial_token() { &bank_config.tokens[0].token_name, bank_config.tokens[0].salt, ); - let sender_address = bank_config.tokens[0].address_and_balances[0].0.clone(); - let receiver_address = bank_config.tokens[0].address_and_balances[1].0.clone(); + let sender_address = bank_config.tokens[0].address_and_balances[0].0; + let receiver_address = bank_config.tokens[0].address_and_balances[1].0; assert_ne!(sender_address, receiver_address); // Preparation let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; let query_total_supply = |working_set: &mut WorkingSet| -> Option { - let total_supply: TotalSupplyResponse = - bank.supply_of(token_address.clone(), working_set).unwrap(); + let total_supply: TotalSupplyResponse = bank.supply_of(token_address, working_set).unwrap(); total_supply.amount }; - let sender_balance_before = query_user_balance(sender_address.clone(), &mut working_set); - let receiver_balance_before = query_user_balance(receiver_address.clone(), &mut working_set); + let sender_balance_before = query_user_balance(sender_address, &mut working_set); + let receiver_balance_before = query_user_balance(receiver_address, &mut working_set); let total_supply_before = query_total_supply(&mut working_set); assert!(total_supply_before.is_some()); assert_eq!(Some(initial_balance), sender_balance_before); assert_eq!(sender_balance_before, receiver_balance_before); - let sender_context = C::new(sender_address.clone()); + let sender_context = C::new(sender_address); // Transfer happy test { let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: transfer_amount, - token_address: token_address.clone(), + token_address, }, }; @@ -65,8 +64,8 @@ fn transfer_initial_token() { .expect("Transfer call failed"); assert!(working_set.events().is_empty()); - let sender_balance_after = query_user_balance(sender_address.clone(), &mut working_set); - let receiver_balance_after = query_user_balance(receiver_address.clone(), &mut working_set); + let sender_balance_after = query_user_balance(sender_address, &mut working_set); + let receiver_balance_after = query_user_balance(receiver_address, &mut working_set); assert_eq!( Some(initial_balance - transfer_amount), @@ -83,10 +82,10 @@ fn transfer_initial_token() { // Not enough balance { let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: initial_balance + 1, - token_address: token_address.clone(), + token_address, }, }; @@ -128,10 +127,10 @@ fn transfer_initial_token() { let token_address = get_token_address::(&token_name, sender_address.as_ref(), salt); let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; @@ -156,19 +155,18 @@ fn transfer_initial_token() { // Sender does not exist { let unknown_sender = generate_address::("non_existing_sender"); - let unknown_sender_context = C::new(unknown_sender.clone()); + let unknown_sender_context = C::new(unknown_sender); - let sender_balance = query_user_balance(unknown_sender.clone(), &mut working_set); + let sender_balance = query_user_balance(unknown_sender, &mut working_set); assert!(sender_balance.is_none()); - let receiver_balance_before = - query_user_balance(receiver_address.clone(), &mut working_set); + let receiver_balance_before = query_user_balance(receiver_address, &mut working_set); let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; @@ -211,15 +209,14 @@ fn transfer_initial_token() { { let unknown_receiver = generate_address::("non_existing_receiver"); - let receiver_balance_before = - query_user_balance(unknown_receiver.clone(), &mut working_set); + let receiver_balance_before = query_user_balance(unknown_receiver, &mut working_set); assert!(receiver_balance_before.is_none()); let transfer_message = CallMessage::Transfer { - to: unknown_receiver.clone(), + to: unknown_receiver, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; @@ -234,14 +231,14 @@ fn transfer_initial_token() { // Sender equals receiver { let total_supply_before = query_total_supply(&mut working_set); - let sender_balance_before = query_user_balance(sender_address.clone(), &mut working_set); + let sender_balance_before = query_user_balance(sender_address, &mut working_set); assert!(sender_balance_before.is_some()); let transfer_message = CallMessage::Transfer { - to: sender_address.clone(), + to: sender_address, coins: Coins { amount: 1, - token_address: token_address.clone(), + token_address, }, }; bank.call(transfer_message, &sender_context, &mut working_set) @@ -276,30 +273,29 @@ fn transfer_deployed_token() { // Preparation let query_user_balance = |user_address: Address, working_set: &mut WorkingSet| -> Option { - bank.get_balance_of(user_address, token_address.clone(), working_set) + bank.get_balance_of(user_address, token_address, working_set) }; let query_total_supply = |working_set: &mut WorkingSet| -> Option { - let total_supply: TotalSupplyResponse = - bank.supply_of(token_address.clone(), working_set).unwrap(); + let total_supply: TotalSupplyResponse = bank.supply_of(token_address, working_set).unwrap(); total_supply.amount }; - let sender_balance_before = query_user_balance(sender_address.clone(), &mut working_set); - let receiver_balance_before = query_user_balance(receiver_address.clone(), &mut working_set); + let sender_balance_before = query_user_balance(sender_address, &mut working_set); + let receiver_balance_before = query_user_balance(receiver_address, &mut working_set); let total_supply_before = query_total_supply(&mut working_set); assert!(total_supply_before.is_none()); assert!(sender_balance_before.is_none()); assert!(receiver_balance_before.is_none()); - let sender_context = C::new(sender_address.clone()); + let sender_context = C::new(sender_address); let mint_message = CallMessage::CreateToken { salt, token_name, initial_balance, - minter_address: sender_address.clone(), - authorized_minters: vec![sender_address.clone()], + minter_address: sender_address, + authorized_minters: vec![sender_address], }; bank.call(mint_message, &sender_context, &mut working_set) .expect("Failed to mint token"); @@ -308,18 +304,18 @@ fn transfer_deployed_token() { let total_supply_before = query_total_supply(&mut working_set); assert!(total_supply_before.is_some()); - let sender_balance_before = query_user_balance(sender_address.clone(), &mut working_set); - let receiver_balance_before = query_user_balance(receiver_address.clone(), &mut working_set); + let sender_balance_before = query_user_balance(sender_address, &mut working_set); + let receiver_balance_before = query_user_balance(receiver_address, &mut working_set); assert_eq!(Some(initial_balance), sender_balance_before); assert!(receiver_balance_before.is_none()); let transfer_amount = 15; let transfer_message = CallMessage::Transfer { - to: receiver_address.clone(), + to: receiver_address, coins: Coins { amount: transfer_amount, - token_address: token_address.clone(), + token_address, }, }; diff --git a/module-system/module-implementations/sov-blob-storage/Cargo.toml b/module-system/module-implementations/sov-blob-storage/Cargo.toml index 8ba5a0dc8..648db23b6 100644 --- a/module-system/module-implementations/sov-blob-storage/Cargo.toml +++ b/module-system/module-implementations/sov-blob-storage/Cargo.toml @@ -15,16 +15,38 @@ resolver = "2" [dependencies] anyhow = { workspace = true } +borsh = { workspace = true } bincode = { workspace = true } -sov-modules-api = { path = "../../sov-modules-api", version = "0.1", default-features = false, features = ["macros"] } -sov-modules-macros = { path = "../../sov-modules-macros", version = "0.1" } -sov-state = { path = "../../sov-state", version = "0.1", default-features = false } +tracing = { workspace = true } +hex = { workspace = true } + sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1" } +sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-modules-macros = { path = "../../sov-modules-macros", version = "0.1" } +sov-state = { path = "../../sov-state", version = "0.1" } +sov-sequencer-registry = { path = "../sov-sequencer-registry", version = "0.1" } + + +schemars = { workspace = true, optional = true } +serde = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } +jsonrpsee = { workspace = true, optional = true } +clap = { workspace = true, optional = true } [dev-dependencies] -sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-rollup-interface = { path = "../../../rollup-interface", features = ["mocks"] } +sov-bank = { path = "../sov-bank", version = "0.1" } tempfile = { workspace = true } [features] -default = ["native"] -native = ["sov-modules-api/native", "sov-state/native"] +default = [] +native = [ + "jsonrpsee", + "schemars", + "serde", + "serde_json", + "sov-modules-api/native", + "sov-state/native", + "sov-modules-macros/native", + "sov-sequencer-registry/native" +] diff --git a/module-system/module-implementations/sov-blob-storage/README.md b/module-system/module-implementations/sov-blob-storage/README.md new file mode 100644 index 000000000..ef932131e --- /dev/null +++ b/module-system/module-implementations/sov-blob-storage/README.md @@ -0,0 +1,7 @@ +# Blob Storage Module + +This module provides a blob storage for a blob that have been deferred from their original slot. + +Main purpose of this module is to implement `BlobSelector` rollup capability. + +It has no RPC calls and only single RPC query to get module address. \ No newline at end of file diff --git a/module-system/module-implementations/sov-blob-storage/src/capabilities.rs b/module-system/module-implementations/sov-blob-storage/src/capabilities.rs new file mode 100644 index 000000000..7cd266a36 --- /dev/null +++ b/module-system/module-implementations/sov-blob-storage/src/capabilities.rs @@ -0,0 +1,91 @@ +use sov_modules_api::capabilities::{BlobRefOrOwned, BlobSelector}; +use sov_modules_api::{Context, Spec}; +use sov_rollup_interface::da::{BlobReaderTrait, DaSpec}; +use sov_state::WorkingSet; +use tracing::info; + +use crate::BlobStorage; + +impl BlobSelector for BlobStorage { + type Context = C; + + fn get_blobs_for_this_slot<'a, I>( + &self, + current_blobs: I, + working_set: &mut WorkingSet<::Storage>, + ) -> anyhow::Result>> + where + I: IntoIterator, + { + // TODO: Chain-state module: https://github.com/Sovereign-Labs/sovereign-sdk/pull/598/ + let current_slot: u64 = self.get_current_slot_number(working_set); + let past_deferred: Vec = current_slot + .checked_sub(self.get_deferred_slots_count(working_set)) + .map(|pull_from_slot| self.take_blobs_for_block_number(pull_from_slot, working_set)) + .unwrap_or_default(); + let preferred_sequencer = self.get_preferred_sequencer(working_set); + + let preferred_sequencer = if let Some(sequencer) = preferred_sequencer { + sequencer + } else { + // TODO: https://github.com/Sovereign-Labs/sovereign-sdk/issues/654 + // Prevent double number of blobs being executed + return Ok(past_deferred + .into_iter() + .map(Into::into) + .chain(current_blobs.into_iter().map(Into::into)) + .collect()); + }; + + let mut priority_blobs = Vec::new(); + let mut to_defer: Vec<&mut Da::BlobTransaction> = Vec::new(); + + for blob in current_blobs { + if blob.sender().as_ref() == &preferred_sequencer[..] { + priority_blobs.push(blob); + } else { + to_defer.push(blob); + } + } + + // TODO: chain state module: https://github.com/Sovereign-Labs/sovereign-sdk/pull/598/ + self.slot_number.set(&(current_slot + 1), working_set); + + if !to_defer.is_empty() { + // TODO: https://github.com/Sovereign-Labs/sovereign-sdk/issues/655 + // Gas metering suppose to prevent saving blobs from not allowed senders if they exit mid-slot + let to_defer: Vec<&Da::BlobTransaction> = to_defer + .iter() + .filter(|b| { + let is_allowed = self + .sequencer_registry + .is_sender_allowed(&b.sender(), working_set); + // This is the best effort approach for making sure, + // that blobs do not disappear silently + // TODO: Add issue for that + if !is_allowed { + info!( + "Blob hash=0x{} from sender {} is going to be discarded", + hex::encode(b.hash()), + b.sender() + ); + } + is_allowed + }) + .map(|b| &**b) + .collect(); + self.store_blobs(current_slot, &to_defer, working_set)? + } + + if !priority_blobs.is_empty() { + Ok(priority_blobs + .into_iter() + .map(Into::into) + .chain(past_deferred.into_iter().map(Into::into)) + .collect()) + } else { + // No blobs from preferred sequencer, nothing to save, older blobs have priority + Ok(past_deferred.into_iter().map(Into::into).collect()) + } + } +} diff --git a/module-system/module-implementations/sov-blob-storage/src/lib.rs b/module-system/module-implementations/sov-blob-storage/src/lib.rs index 1cd1cc6ea..19a3ae77f 100644 --- a/module-system/module-implementations/sov-blob-storage/src/lib.rs +++ b/module-system/module-implementations/sov-blob-storage/src/lib.rs @@ -1,13 +1,21 @@ #![deny(missing_docs)] +#![doc = include_str!("../README.md")] -//! Blob storage module allows to save DA blobs in the state - +mod capabilities; +#[cfg(feature = "native")] +mod query; +#[cfg(feature = "native")] +pub use query::{BlobStorageRpcImpl, BlobStorageRpcServer, Response}; use sov_modules_api::{Module, ModuleInfo}; use sov_rollup_interface::da::BlobReaderTrait; -use sov_state::{StateMap, WorkingSet}; +use sov_state::{StateMap, StateValue, WorkingSet}; + +/// For how many slots deferred blobs are stored before being executed +const DEFERRED_SLOTS_COUNT: u64 = 1; /// Blob storage contains only address and vector of blobs -#[derive(ModuleInfo, Clone)] +#[cfg_attr(feature = "native", derive(sov_modules_api::ModuleCallJsonSchema))] +#[derive(Clone, ModuleInfo)] pub struct BlobStorage { /// The address of blob storage module /// Note: this is address is generated by the module framework and the corresponding private key is unknown. @@ -19,6 +27,12 @@ pub struct BlobStorage { /// Caller controls the order of blobs in the vector #[state] pub(crate) blobs: StateMap>>, + + #[module] + pub(crate) sequencer_registry: sov_sequencer_registry::SequencerRegistry, + + #[state] + pub(crate) slot_number: StateValue, } /// Non standard methods for blob storage @@ -27,7 +41,7 @@ impl BlobStorage { pub fn store_blobs( &self, block_number: u64, - blobs: &[B], + blobs: &[&B], working_set: &mut WorkingSet, ) -> anyhow::Result<()> { let mut raw_blobs: Vec> = Vec::with_capacity(blobs.len()); @@ -52,10 +66,42 @@ impl BlobStorage { .map(|b| bincode::deserialize(b).expect("malformed blob was stored previously")) .collect() } + + // TODO: Migrate to generic: https://github.com/Sovereign-Labs/sovereign-sdk/issues/622 + pub(crate) fn get_preferred_sequencer( + &self, + working_set: &mut WorkingSet, + ) -> Option> { + self.sequencer_registry.get_preferred_sequencer(working_set) + } + + pub(crate) fn get_current_slot_number(&self, working_set: &mut WorkingSet) -> u64 { + self.slot_number + .get(working_set) + .expect("slot number is not set in genesis") + } + + pub(crate) fn get_deferred_slots_count( + &self, + _working_set: &mut WorkingSet, + ) -> u64 { + DEFERRED_SLOTS_COUNT + } } /// Empty module implementation impl Module for BlobStorage { type Context = C; type Config = (); + type CallMessage = sov_modules_api::NonInstantiable; + + /// TODO: Remove this when chain-state is available https://github.com/Sovereign-Labs/sovereign-sdk/pull/598 + fn genesis( + &self, + _config: &Self::Config, + working_set: &mut WorkingSet<::Storage>, + ) -> Result<(), sov_modules_api::Error> { + self.slot_number.set(&0, working_set); + Ok(()) + } } diff --git a/module-system/module-implementations/sov-blob-storage/src/query.rs b/module-system/module-implementations/sov-blob-storage/src/query.rs new file mode 100644 index 000000000..b0f3483f2 --- /dev/null +++ b/module-system/module-implementations/sov-blob-storage/src/query.rs @@ -0,0 +1,25 @@ +use jsonrpsee::core::RpcResult; +use sov_modules_api::macros::rpc_gen; +use sov_modules_api::ModuleInfo; +use sov_state::WorkingSet; + +use super::BlobStorage; + +/// Response returned from the blobStorage_getModuleAddress endpoint. +#[derive(serde::Serialize, serde::Deserialize, Debug, Eq, PartialEq, Clone)] +pub struct Response { + /// Address of the module. + pub address: String, +} + +/// TODO: https://github.com/Sovereign-Labs/sovereign-sdk/issues/626 +#[rpc_gen(client, server, namespace = "blobStorage")] +impl BlobStorage { + /// Queries the address of the module. + #[rpc_method(name = "getModuleAddress")] + fn get_module_address(&self, _working_set: &mut WorkingSet) -> RpcResult { + Ok(Response { + address: self.address().to_string(), + }) + } +} diff --git a/module-system/module-implementations/sov-blob-storage/tests/blob_storage_tests.rs b/module-system/module-implementations/sov-blob-storage/tests/blob_storage_tests.rs index 6d8509de4..824267a64 100644 --- a/module-system/module-implementations/sov-blob-storage/tests/blob_storage_tests.rs +++ b/module-system/module-implementations/sov-blob-storage/tests/blob_storage_tests.rs @@ -1,11 +1,11 @@ use sov_blob_storage::BlobStorage; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::Genesis; -use sov_rollup_interface::mocks::{MockAddress, TestBlob}; +use sov_rollup_interface::mocks::{MockAddress, MockBlob}; use sov_state::{ProverStorage, WorkingSet}; type C = DefaultContext; -type B = TestBlob; +type B = MockBlob; #[test] fn empty_test() { @@ -50,22 +50,25 @@ fn store_and_retrieve_standard() { let blob_4 = B::new(vec![9, 9, 9], sender, dummy_hash); let blob_5 = B::new(vec![0, 1, 0], sender, dummy_hash); - let block_2_blobs = vec![blob_1, blob_2, blob_3]; - let block_3_blobs = vec![blob_4]; - let block_4_blobs = vec![blob_5]; + let slot_2_blobs = vec![blob_1, blob_2, blob_3]; + let slot_2_blob_refs: Vec<&MockBlob> = slot_2_blobs.iter().collect(); + let slot_3_blobs = vec![blob_4]; + let slot_3_blob_refs: Vec<&MockBlob> = slot_3_blobs.iter().collect(); + let slot_4_blobs = vec![blob_5]; + let slot_4_blob_refs: Vec<&MockBlob> = slot_4_blobs.iter().collect(); blob_storage - .store_blobs(2, &block_2_blobs, &mut working_set) + .store_blobs(2, &slot_2_blob_refs, &mut working_set) .unwrap(); blob_storage - .store_blobs(3, &block_3_blobs, &mut working_set) + .store_blobs(3, &slot_3_blob_refs, &mut working_set) .unwrap(); blob_storage - .store_blobs(4, &block_4_blobs, &mut working_set) + .store_blobs(4, &slot_4_blob_refs, &mut working_set) .unwrap(); assert_eq!( - block_2_blobs, + slot_2_blobs, blob_storage.take_blobs_for_block_number(2, &mut working_set) ); assert!(blob_storage @@ -73,7 +76,7 @@ fn store_and_retrieve_standard() { .is_empty()); assert_eq!( - block_3_blobs, + slot_3_blobs, blob_storage.take_blobs_for_block_number(3, &mut working_set) ); assert!(blob_storage @@ -81,7 +84,7 @@ fn store_and_retrieve_standard() { .is_empty()); assert_eq!( - block_4_blobs, + slot_4_blobs, blob_storage.take_blobs_for_block_number(4, &mut working_set) ); assert!(blob_storage diff --git a/module-system/module-implementations/sov-blob-storage/tests/capability_tests.rs b/module-system/module-implementations/sov-blob-storage/tests/capability_tests.rs new file mode 100644 index 000000000..5f741d9a6 --- /dev/null +++ b/module-system/module-implementations/sov-blob-storage/tests/capability_tests.rs @@ -0,0 +1,441 @@ +use std::io::Read; + +use sov_bank::TokenConfig; +use sov_blob_storage::BlobStorage; +use sov_modules_api::capabilities::{BlobRefOrOwned, BlobSelector}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::digest::Digest; +use sov_modules_api::{Address, Context, Module, Spec}; +use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::mocks::{MockAddress, MockBlob, MockDaSpec}; +use sov_sequencer_registry::{SequencerConfig, SequencerRegistry}; +use sov_state::{ProverStorage, WorkingSet}; + +type C = DefaultContext; +type B = MockBlob; +type Da = MockDaSpec; + +const PREFERRED_SEQUENCER_KEY: &str = "preferred"; +const REGULAR_SEQUENCER_KEY: &str = "regular"; +const LOCKED_AMOUNT: u64 = 200; + +fn generate_address(key: &str) -> ::Address { + let hash: [u8; 32] = ::Hasher::digest(key.as_bytes()).into(); + Address::from(hash) +} + +fn get_bank_config( + preferred_sequencer: ::Address, + regular_sequencer: ::Address, +) -> sov_bank::BankConfig { + let token_config: TokenConfig = TokenConfig { + token_name: "InitialToken".to_owned(), + address_and_balances: vec![ + (preferred_sequencer, LOCKED_AMOUNT * 3), + (regular_sequencer, LOCKED_AMOUNT * 3), + ], + authorized_minters: vec![], + salt: 9, + }; + + sov_bank::BankConfig { + tokens: vec![token_config], + } +} + +#[test] +fn priority_sequencer_flow() { + let tmpdir = tempfile::tempdir().unwrap(); + let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); + + let preferred_sequencer_da = MockAddress::from([10u8; 32]); + let preferred_sequencer_rollup = generate_address(PREFERRED_SEQUENCER_KEY); + let regular_sequencer_da = MockAddress::from([30u8; 32]); + let regular_sequencer_rollup = generate_address(REGULAR_SEQUENCER_KEY); + + let bank_config = get_bank_config(preferred_sequencer_rollup, regular_sequencer_rollup); + + let token_address = sov_bank::get_genesis_token_address::( + &bank_config.tokens[0].token_name, + bank_config.tokens[0].salt, + ); + + let sequencer_registry_config = SequencerConfig { + seq_rollup_address: preferred_sequencer_rollup, + seq_da_address: preferred_sequencer_da.as_ref().to_vec(), + coins_to_lock: sov_bank::Coins { + amount: LOCKED_AMOUNT, + token_address, + }, + is_preferred_sequencer: true, + }; + + let bank = sov_bank::Bank::::default(); + let sequencer_registry = SequencerRegistry::::default(); + let blob_storage = BlobStorage::::default(); + + bank.genesis(&bank_config, &mut working_set).unwrap(); + sequencer_registry + .genesis(&sequencer_registry_config, &mut working_set) + .unwrap(); + blob_storage.genesis(&(), &mut working_set).unwrap(); + + let register_message = sov_sequencer_registry::CallMessage::Register { + da_address: regular_sequencer_da.as_ref().to_vec(), + }; + sequencer_registry + .call( + register_message, + &C::new(regular_sequencer_rollup), + &mut working_set, + ) + .unwrap(); + + let blob_1 = B::new(vec![1], regular_sequencer_da, [1u8; 32]); + let blob_2 = B::new(vec![2, 2], regular_sequencer_da, [2u8; 32]); + let blob_3 = B::new(vec![3, 3, 3], preferred_sequencer_da, [3u8; 32]); + let blob_4 = B::new(vec![4, 4, 4, 4], regular_sequencer_da, [4u8; 32]); + let blob_5 = B::new(vec![5, 5, 5, 5, 5], preferred_sequencer_da, [5u8; 32]); + let blob_6 = B::new(vec![6, 6, 6, 6, 6, 6], regular_sequencer_da, [6u8; 32]); + let blob_7 = B::new(vec![7, 7, 7, 7, 7, 7, 7], regular_sequencer_da, [7u8; 32]); + let blob_8 = B::new( + vec![8, 8, 8, 8, 8, 8, 8, 8], + regular_sequencer_da, + [8u8; 32], + ); + + let mut slot_1 = vec![blob_1.clone(), blob_2.clone(), blob_3.clone()]; + let mut slot_2 = vec![blob_4.clone(), blob_5.clone(), blob_6.clone()]; + let mut slot_3 = vec![blob_7.clone(), blob_8.clone()]; + let mut slot_4 = vec![]; + + // Slot 1: 3rd blob is from preferred sequencer, only it should be executed + let mut execute_in_slot_1 = as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_1, + &mut working_set, + ) + .unwrap(); + assert_eq!(1, execute_in_slot_1.len()); + blobs_are_equal(blob_3, execute_in_slot_1.remove(0), "slot 1"); + + // Slot 2: 5th blob is from preferred sequencer + 2nd and 3rd that were deferred previously + let mut execute_in_slot_2 = as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_2, + &mut working_set, + ) + .unwrap(); + assert_eq!(3, execute_in_slot_2.len()); + blobs_are_equal(blob_5, execute_in_slot_2.remove(0), "slot 2"); + blobs_are_equal(blob_1, execute_in_slot_2.remove(0), "slot 2"); + blobs_are_equal(blob_2, execute_in_slot_2.remove(0), "slot 2"); + + // Slot 3: no blobs from preferred sequencer, so deferred executed first and then current + let mut execute_in_slot_3 = as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_3, + &mut working_set, + ) + .unwrap(); + assert_eq!(2, execute_in_slot_3.len()); + blobs_are_equal(blob_4, execute_in_slot_3.remove(0), "slot 3"); + blobs_are_equal(blob_6, execute_in_slot_3.remove(0), "slot 3"); + + let mut execute_in_slot_4 = as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_4, + &mut working_set, + ) + .unwrap(); + + assert_eq!(2, execute_in_slot_4.len()); + blobs_are_equal(blob_7, execute_in_slot_4.remove(0), "slot 4"); + blobs_are_equal(blob_8, execute_in_slot_4.remove(0), "slot 4"); +} + +#[test] +fn test_blobs_from_non_registered_sequencers_are_not_saved() { + let tmpdir = tempfile::tempdir().unwrap(); + let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); + + let preferred_sequencer_da = MockAddress::from([10u8; 32]); + let preferred_sequencer_rollup = generate_address(PREFERRED_SEQUENCER_KEY); + let regular_sequencer_da = MockAddress::from([30u8; 32]); + let regular_sequencer_rollup = generate_address(REGULAR_SEQUENCER_KEY); + let some_sequencer = MockAddress::from([40u8; 32]); + + let bank_config = get_bank_config(preferred_sequencer_rollup, regular_sequencer_rollup); + + let token_address = sov_bank::get_genesis_token_address::( + &bank_config.tokens[0].token_name, + bank_config.tokens[0].salt, + ); + + let sequencer_registry_config = SequencerConfig { + seq_rollup_address: preferred_sequencer_rollup, + seq_da_address: preferred_sequencer_da.as_ref().to_vec(), + coins_to_lock: sov_bank::Coins { + amount: LOCKED_AMOUNT, + token_address, + }, + is_preferred_sequencer: true, + }; + + let bank = sov_bank::Bank::::default(); + let sequencer_registry = SequencerRegistry::::default(); + let blob_storage = BlobStorage::::default(); + + bank.genesis(&bank_config, &mut working_set).unwrap(); + sequencer_registry + .genesis(&sequencer_registry_config, &mut working_set) + .unwrap(); + blob_storage.genesis(&(), &mut working_set).unwrap(); + + let register_message = sov_sequencer_registry::CallMessage::Register { + da_address: regular_sequencer_da.as_ref().to_vec(), + }; + sequencer_registry + .call( + register_message, + &C::new(regular_sequencer_rollup), + &mut working_set, + ) + .unwrap(); + + let blob_1 = B::new(vec![1], regular_sequencer_da, [1u8; 32]); + let blob_2 = B::new(vec![2, 2], some_sequencer, [2u8; 32]); + let blob_3 = B::new(vec![3, 3, 3], preferred_sequencer_da, [3u8; 32]); + + let mut slot_1 = vec![blob_1.clone(), blob_2, blob_3.clone()]; + let mut slot_2 = vec![]; + + let mut execute_in_slot_1 = as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_1, + &mut working_set, + ) + .unwrap(); + assert_eq!(1, execute_in_slot_1.len()); + blobs_are_equal(blob_3, execute_in_slot_1.remove(0), "slot 1"); + + let mut execute_in_slot_2 = as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_2, + &mut working_set, + ) + .unwrap(); + assert_eq!(1, execute_in_slot_2.len()); + blobs_are_equal(blob_1, execute_in_slot_2.remove(0), "slot 2"); +} + +#[test] +fn test_blobs_no_deferred_without_preferred_sequencer() { + let tmpdir = tempfile::tempdir().unwrap(); + let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); + + let preferred_sequencer_da = MockAddress::from([10u8; 32]); + let preferred_sequencer_rollup = generate_address(PREFERRED_SEQUENCER_KEY); + let regular_sequencer_da = MockAddress::from([30u8; 32]); + let regular_sequencer_rollup = generate_address(REGULAR_SEQUENCER_KEY); + + let bank_config = get_bank_config(preferred_sequencer_rollup, regular_sequencer_rollup); + + let token_address = sov_bank::get_genesis_token_address::( + &bank_config.tokens[0].token_name, + bank_config.tokens[0].salt, + ); + + let sequencer_registry_config = SequencerConfig { + seq_rollup_address: preferred_sequencer_rollup, + seq_da_address: preferred_sequencer_da.as_ref().to_vec(), + coins_to_lock: sov_bank::Coins { + amount: LOCKED_AMOUNT, + token_address, + }, + is_preferred_sequencer: false, + }; + + let bank = sov_bank::Bank::::default(); + let sequencer_registry = SequencerRegistry::::default(); + let blob_storage = BlobStorage::::default(); + + bank.genesis(&bank_config, &mut working_set).unwrap(); + sequencer_registry + .genesis(&sequencer_registry_config, &mut working_set) + .unwrap(); + blob_storage.genesis(&(), &mut working_set).unwrap(); + + let register_message = sov_sequencer_registry::CallMessage::Register { + da_address: regular_sequencer_da.as_ref().to_vec(), + }; + sequencer_registry + .call( + register_message, + &C::new(regular_sequencer_rollup), + &mut working_set, + ) + .unwrap(); + + let blob_1 = B::new(vec![1], regular_sequencer_da, [1u8; 32]); + let blob_2 = B::new(vec![2, 2], regular_sequencer_da, [2u8; 32]); + let blob_3 = B::new(vec![3, 3, 3], preferred_sequencer_da, [3u8; 32]); + + let mut slot_1 = vec![blob_1.clone(), blob_2.clone(), blob_3.clone()]; + let mut slot_2: Vec<_> = vec![]; + + let mut execute_in_slot_1 = as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_1, + &mut working_set, + ) + .unwrap(); + assert_eq!(3, execute_in_slot_1.len()); + blobs_are_equal(blob_1, execute_in_slot_1.remove(0), "slot 1"); + blobs_are_equal(blob_2, execute_in_slot_1.remove(0), "slot 1"); + blobs_are_equal(blob_3, execute_in_slot_1.remove(0), "slot 1"); + + let execute_in_slot_2: Vec> = + as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_2, + &mut working_set, + ) + .unwrap(); + assert!(execute_in_slot_2.is_empty()); +} + +#[test] +fn deferred_blobs_are_first_after_preferred_sequencer_exit() { + let tmpdir = tempfile::tempdir().unwrap(); + let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); + + let preferred_sequencer_da = MockAddress::from([10u8; 32]); + let preferred_sequencer_rollup = generate_address(PREFERRED_SEQUENCER_KEY); + let regular_sequencer_da = MockAddress::from([30u8; 32]); + let regular_sequencer_rollup = generate_address(REGULAR_SEQUENCER_KEY); + + let bank_config = get_bank_config(preferred_sequencer_rollup, regular_sequencer_rollup); + + let token_address = sov_bank::get_genesis_token_address::( + &bank_config.tokens[0].token_name, + bank_config.tokens[0].salt, + ); + + let sequencer_registry_config = SequencerConfig { + seq_rollup_address: preferred_sequencer_rollup, + seq_da_address: preferred_sequencer_da.as_ref().to_vec(), + coins_to_lock: sov_bank::Coins { + amount: LOCKED_AMOUNT, + token_address, + }, + is_preferred_sequencer: true, + }; + + let bank = sov_bank::Bank::::default(); + let sequencer_registry = SequencerRegistry::::default(); + let blob_storage = BlobStorage::::default(); + + bank.genesis(&bank_config, &mut working_set).unwrap(); + sequencer_registry + .genesis(&sequencer_registry_config, &mut working_set) + .unwrap(); + blob_storage.genesis(&(), &mut working_set).unwrap(); + + let register_message = sov_sequencer_registry::CallMessage::Register { + da_address: regular_sequencer_da.as_ref().to_vec(), + }; + sequencer_registry + .call( + register_message, + &C::new(regular_sequencer_rollup), + &mut working_set, + ) + .unwrap(); + + let blob_1 = B::new(vec![1], regular_sequencer_da, [1u8; 32]); + let blob_2 = B::new(vec![2, 2], regular_sequencer_da, [2u8; 32]); + let blob_3 = B::new(vec![3, 3, 3], preferred_sequencer_da, [3u8; 32]); + let blob_4 = B::new(vec![4, 4, 4, 4], regular_sequencer_da, [4u8; 32]); + let blob_5 = B::new(vec![5, 5, 5, 5, 5], regular_sequencer_da, [5u8; 32]); + + let mut slot_1 = vec![blob_1.clone(), blob_2.clone(), blob_3.clone()]; + let mut slot_2 = vec![blob_4.clone(), blob_5.clone()]; + let mut slot_3: Vec<_> = vec![]; + + let mut execute_in_slot_1 = as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_1, + &mut working_set, + ) + .unwrap(); + + assert_eq!(1, execute_in_slot_1.len()); + blobs_are_equal(blob_3, execute_in_slot_1.remove(0), "slot 1"); + + let exit_message = sov_sequencer_registry::CallMessage::Exit { + da_address: preferred_sequencer_da.as_ref().to_vec(), + }; + + sequencer_registry + .call( + exit_message, + &C::new(preferred_sequencer_rollup), + &mut working_set, + ) + .unwrap(); + + assert!(sequencer_registry + .get_preferred_sequencer(&mut working_set) + .is_none()); + + let mut execute_in_slot_2 = as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_2, + &mut working_set, + ) + .unwrap(); + assert_eq!(4, execute_in_slot_2.len()); + blobs_are_equal(blob_1, execute_in_slot_2.remove(0), "slot 2"); + blobs_are_equal(blob_2, execute_in_slot_2.remove(0), "slot 2"); + blobs_are_equal(blob_4, execute_in_slot_2.remove(0), "slot 2"); + blobs_are_equal(blob_5, execute_in_slot_2.remove(0), "slot 2"); + + let execute_in_slot_3: Vec> = + as BlobSelector>::get_blobs_for_this_slot( + &blob_storage, + &mut slot_3, + &mut working_set, + ) + .unwrap(); + assert!(execute_in_slot_3.is_empty()); +} + +/// Check hashes and data of two blobs. +fn blobs_are_equal( + mut expected: B, + mut actual: BlobRefOrOwned, + slot_hint: &str, +) { + let actual_inner = actual.as_mut_ref(); + assert_eq!( + expected.hash(), + actual_inner.hash(), + "incorrect hashes in {}", + slot_hint + ); + + let mut read_actual = vec![]; + actual_inner + .data_mut() + .read_to_end(&mut read_actual) + .unwrap(); + + let mut read_expected = vec![]; + expected.data_mut().read_to_end(&mut read_expected).unwrap(); + assert_eq!( + read_expected, read_actual, + "incorrect data read in {}", + slot_hint + ); +} diff --git a/module-system/module-implementations/sov-chain-state/Cargo.toml b/module-system/module-implementations/sov-chain-state/Cargo.toml new file mode 100644 index 000000000..e7e6617b4 --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "sov-chain-state" +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } +version = { workspace = true } +readme = "README.md" +publish = false +resolver = "2" + +[dependencies] +anyhow = { workspace = true } +borsh = { workspace = true, features = ["rc"] } +serde = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } +jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], optional = true } + +sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-modules-macros = { path = "../../sov-modules-macros", version = "0.1" } +sov-state = { path = "../../sov-state", version = "0.1" } +sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1" } + +[dev-dependencies] +tempfile = { workspace = true } +sov-bank = { path = "../sov-bank" } +sov-value-setter = { path = "../examples/sov-value-setter" } +sov-modules-stf-template = { path = "../../sov-modules-stf-template" } +sov-data-generators = { path = "../../utils/sov-data-generators" } +sov-chain-state = { path = ".", features = ["native"] } + + +[features] +default = [] +native = ["serde", "serde_json", "jsonrpsee", "sov-state/native", "sov-modules-api/native", ] diff --git a/module-system/module-implementations/sov-chain-state/README.md b/module-system/module-implementations/sov-chain-state/README.md new file mode 100644 index 000000000..a4cbbb2fb --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/README.md @@ -0,0 +1,3 @@ +# Sov Chain State + +This module provides access to the current chain state (block height, block hash, etc.) diff --git a/module-system/module-implementations/sov-chain-state/src/call.rs b/module-system/module-implementations/sov-chain-state/src/call.rs new file mode 100644 index 000000000..5aea96899 --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/call.rs @@ -0,0 +1,31 @@ +use sov_rollup_interface::da::DaSpec; +use sov_state::WorkingSet; + +use crate::{ChainState, StateTransitionId, TransitionHeight}; + +impl ChainState +where + C: sov_modules_api::Context, + Da: DaSpec, +{ + /// Increment the current slot height + pub(crate) fn increment_slot_height(&self, working_set: &mut WorkingSet) { + let current_height = self + .slot_height + .get(working_set) + .expect("Block height must be initialized"); + self.slot_height + .set(&(current_height.saturating_add(1)), working_set); + } + + /// Store the previous state transition + pub(crate) fn store_state_transition( + &self, + height: TransitionHeight, + transition: StateTransitionId, + working_set: &mut WorkingSet, + ) { + self.historical_transitions + .set(&height, &transition, working_set); + } +} diff --git a/module-system/module-implementations/sov-chain-state/src/genesis.rs b/module-system/module-implementations/sov-chain-state/src/genesis.rs new file mode 100644 index 000000000..54a5ee02a --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/genesis.rs @@ -0,0 +1,20 @@ +use anyhow::Result; +use sov_rollup_interface::da::DaSpec; +use sov_state::WorkingSet; + +use crate::ChainState; + +impl ChainState { + pub(crate) fn init_module( + &self, + config: &::Config, + working_set: &mut WorkingSet, + ) -> Result<()> { + self.genesis_height + .set(&config.initial_slot_height, working_set); + + self.slot_height + .set(&config.initial_slot_height, working_set); + Ok(()) + } +} diff --git a/module-system/module-implementations/sov-chain-state/src/hooks.rs b/module-system/module-implementations/sov-chain-state/src/hooks.rs new file mode 100644 index 000000000..62ad9b935 --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/hooks.rs @@ -0,0 +1,72 @@ +use sov_modules_api::hooks::SlotHooks; +use sov_modules_api::{Context, Spec}; +use sov_rollup_interface::da::DaSpec; +use sov_rollup_interface::services::da::SlotData; +use sov_state::{Storage, WorkingSet}; + +use super::ChainState; +use crate::{StateTransitionId, TransitionInProgress}; + +impl SlotHooks for ChainState { + type Context = C; + + fn begin_slot_hook( + &self, + slot: &impl SlotData, + working_set: &mut WorkingSet<::Storage>, + ) { + if self.genesis_hash.get(working_set).is_none() { + // The genesis hash is not set, hence this is the + // first transition right after the genesis block + self.genesis_hash.set( + &working_set + .backing() + .get_state_root(&Default::default()) + .expect("Should have a state root"), + working_set, + ) + } else { + let transition: StateTransitionId = { + let last_transition_in_progress = self + .in_progress_transition + .get(working_set) + .expect("There should always be a transition in progress"); + + StateTransitionId { + da_block_hash: last_transition_in_progress.da_block_hash, + post_state_root: working_set + .backing() + .get_state_root(&Default::default()) + .expect("Should have a state root"), + validity_condition: last_transition_in_progress.validity_condition, + } + }; + + self.store_state_transition( + self.slot_height + .get(working_set) + .expect("Block height must be set"), + transition, + working_set, + ); + } + + self.increment_slot_height(working_set); + let validity_condition = slot.validity_condition(); + + self.in_progress_transition.set( + &TransitionInProgress { + da_block_hash: slot.hash(), + validity_condition, + }, + working_set, + ); + } + + fn end_slot_hook( + &self, + _root_hash: [u8; 32], + _working_set: &mut WorkingSet<::Storage>, + ) { + } +} diff --git a/module-system/module-implementations/sov-chain-state/src/lib.rs b/module-system/module-implementations/sov-chain-state/src/lib.rs new file mode 100644 index 000000000..2531fb72d --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/lib.rs @@ -0,0 +1,190 @@ +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] + +/// Contains the call methods used by the module +pub mod call; + +/// Genesis state configuration +pub mod genesis; + +/// Hook implementation for the module +pub mod hooks; + +/// The query interface with the module +#[cfg(feature = "native")] +pub mod query; +use borsh::{BorshDeserialize, BorshSerialize}; +#[cfg(feature = "native")] +pub use query::{ChainStateRpcImpl, ChainStateRpcServer}; +use sov_modules_api::Error; +use sov_modules_macros::ModuleInfo; +use sov_rollup_interface::da::DaSpec; +use sov_rollup_interface::zk::{ValidityCondition, ValidityConditionChecker}; +use sov_state::WorkingSet; + +/// Type alias that contains the height of a given transition +pub type TransitionHeight = u64; + +#[derive(BorshDeserialize, BorshSerialize, Clone, Debug, PartialEq, Eq)] +/// Structure that contains the information needed to represent a single state transition. +pub struct StateTransitionId { + da_block_hash: [u8; 32], + post_state_root: [u8; 32], + validity_condition: Cond, +} + +impl StateTransitionId { + /// Creates a new state transition. Only available for testing as we only want to create + /// new state transitions from existing [`TransitionInProgress`]. + pub fn new( + da_block_hash: [u8; 32], + post_state_root: [u8; 32], + validity_condition: Cond, + ) -> Self { + Self { + da_block_hash, + post_state_root, + validity_condition, + } + } +} + +impl StateTransitionId { + /// Compare the transition block hash and state root with the provided input couple. If + /// the pairs are equal, return [`true`]. + pub fn compare_hashes(&self, da_block_hash: &[u8; 32], post_state_root: &[u8; 32]) -> bool { + self.da_block_hash == *da_block_hash && self.post_state_root == *post_state_root + } + + /// Returns the post state root of a state transition + pub fn post_state_root(&self) -> [u8; 32] { + self.post_state_root + } + + /// Returns the da block hash of a state transition + pub fn da_block_hash(&self) -> [u8; 32] { + self.da_block_hash + } + + /// Returns the validity condition associated with the transition + pub fn validity_condition(&self) -> &Cond { + &self.validity_condition + } + + /// Checks the validity condition of a state transition + pub fn validity_condition_check>( + &self, + checker: &mut Checker, + ) -> Result<(), >::Error> { + checker.check(&self.validity_condition) + } +} + +#[derive(BorshDeserialize, BorshSerialize, Clone, Debug, PartialEq, Eq)] +/// Represents a transition in progress for the rollup. +pub struct TransitionInProgress { + da_block_hash: [u8; 32], + validity_condition: Cond, +} + +impl TransitionInProgress { + /// Creates a new transition in progress + pub fn new(da_block_hash: [u8; 32], validity_condition: Cond) -> Self { + Self { + da_block_hash, + validity_condition, + } + } +} + +/// A new module: +/// - Must derive `ModuleInfo` +/// - Must contain `[address]` field +/// - Can contain any number of ` #[state]` or `[module]` fields +#[derive(ModuleInfo)] +pub struct ChainState { + /// Address of the module. + #[address] + pub address: C::Address, + + /// The current block height + #[state] + pub slot_height: sov_state::StateValue, + + /// A record of all previous state transitions which are available to the VM. + /// Currently, this includes *all* historical state transitions, but that may change in the future. + /// This state map is delayed by one transition. In other words - the transition that happens in time i + /// is stored during transition i+1. This is mainly due to the fact that this structure depends on the + /// rollup's root hash which is only stored once the transition has completed. + #[state] + pub historical_transitions: + sov_state::StateMap>, + + /// The transition that is currently processed + #[state] + pub in_progress_transition: sov_state::StateValue>, + + /// The genesis root hash. + /// Set after the first transaction of the rollup is executed, using the `begin_slot` hook. + #[state] + pub genesis_hash: sov_state::StateValue<[u8; 32]>, + + /// The height of genesis + #[state] + pub genesis_height: sov_state::StateValue, +} + +/// Initial configuration of the chain state +pub struct ChainStateConfig { + /// Initial slot height + pub initial_slot_height: TransitionHeight, +} + +impl ChainState { + /// Returns transition height in the current slot + pub fn get_slot_height(&self, working_set: &mut WorkingSet) -> TransitionHeight { + self.slot_height + .get(working_set) + .expect("Slot height should be set at initialization") + } + + /// Return the genesis hash of the module. + pub fn get_genesis_hash(&self, working_set: &mut WorkingSet) -> Option<[u8; 32]> { + self.genesis_hash.get(working_set) + } + + /// Returns the transition in progress of the module. + pub fn get_in_progress_transition( + &self, + working_set: &mut WorkingSet, + ) -> Option> { + self.in_progress_transition.get(working_set) + } + + /// Returns the completed transition associated with the provided `transition_num`. + pub fn get_historical_transitions( + &self, + transition_num: TransitionHeight, + working_set: &mut WorkingSet, + ) -> Option> { + self.historical_transitions + .get(&transition_num, working_set) + } +} + +impl sov_modules_api::Module for ChainState { + type Context = C; + + type Config = ChainStateConfig; + + type CallMessage = sov_modules_api::NonInstantiable; + + fn genesis( + &self, + config: &Self::Config, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + // The initialization logic + Ok(self.init_module(config, working_set)?) + } +} diff --git a/module-system/module-implementations/sov-chain-state/src/query.rs b/module-system/module-implementations/sov-chain-state/src/query.rs new file mode 100644 index 000000000..ccbea2b28 --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/src/query.rs @@ -0,0 +1,19 @@ +use jsonrpsee::core::RpcResult; +use sov_modules_api::macros::rpc_gen; +use sov_rollup_interface::da::DaSpec; +use sov_state::WorkingSet; + +use crate::{ChainState, TransitionHeight}; + +#[rpc_gen(client, server, namespace = "chainState")] +impl ChainState { + /// Get the height of the current slot. + /// Panics if the slot height is not set + #[rpc_method(name = "getSlotHeight")] + pub fn get_slot_height_rpc( + &self, + working_set: &mut WorkingSet, + ) -> RpcResult { + Ok(self.get_slot_height(working_set)) + } +} diff --git a/module-system/module-implementations/sov-chain-state/tests/all_tests.rs b/module-system/module-implementations/sov-chain-state/tests/all_tests.rs new file mode 100644 index 000000000..b8755d0bf --- /dev/null +++ b/module-system/module-implementations/sov-chain-state/tests/all_tests.rs @@ -0,0 +1,140 @@ +use sov_chain_state::{ChainState, ChainStateConfig, StateTransitionId, TransitionInProgress}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::hooks::SlotHooks; +use sov_modules_api::Genesis; +use sov_rollup_interface::mocks::{ + MockBlock, MockBlockHeader, MockDaSpec, MockHash, MockValidityCond, +}; +use sov_state::{ProverStorage, Storage, WorkingSet}; + +/// This simply tests that the chain_state reacts properly with the invocation of the `begin_slot` +/// hook. For more complete integration tests, feel free to have a look at the integration tests folder. +#[test] +fn test_simple_chain_state() { + // The initial height can be any value. + const INIT_HEIGHT: u64 = 10; + // Initialize the module. + let tmpdir = tempfile::tempdir().unwrap(); + + let storage: ProverStorage = + ProverStorage::with_path(tmpdir.path()).unwrap(); + + let mut working_set = WorkingSet::new(storage.clone()); + + let chain_state = ChainState::::default(); + let config = ChainStateConfig { + initial_slot_height: INIT_HEIGHT, + }; + + // Genesis, initialize and then commit the state + chain_state.genesis(&config, &mut working_set).unwrap(); + let (reads_writes, witness) = working_set.checkpoint().freeze(); + storage.validate_and_commit(reads_writes, &witness).unwrap(); + + // Computes the initial, post genesis, working set + let mut working_set = WorkingSet::new(storage.clone()); + + // Check the slot height before any changes to the state. + let initial_height = chain_state.get_slot_height(&mut working_set); + + assert_eq!( + initial_height, INIT_HEIGHT, + "The initial height was not computed" + ); + + // Then simulate a transaction execution: call the begin_slot hook on a mock slot_data. + let slot_data = MockBlock { + curr_hash: [1; 32], + header: MockBlockHeader { + prev_hash: MockHash([0; 32]), + }, + height: INIT_HEIGHT, + validity_cond: MockValidityCond { is_valid: true }, + blobs: Default::default(), + }; + + chain_state.begin_slot_hook(&slot_data, &mut working_set); + + // Check that the root hash has been stored correctly + let stored_root: [u8; 32] = chain_state.get_genesis_hash(&mut working_set).unwrap(); + let init_root_hash = storage.get_state_root(&Default::default()).unwrap(); + + assert_eq!(stored_root, init_root_hash, "Genesis hashes don't match"); + + // Check that the slot height has been updated + let new_height_storage = chain_state.get_slot_height(&mut working_set); + + assert_eq!( + new_height_storage, + INIT_HEIGHT + 1, + "The new height did not update" + ); + + // Check that the new state transition is being stored + let new_tx_in_progress: TransitionInProgress = chain_state + .get_in_progress_transition(&mut working_set) + .unwrap(); + + assert_eq!( + new_tx_in_progress, + TransitionInProgress::::new([1; 32], MockValidityCond { is_valid: true }), + "The new transition has not been correctly stored" + ); + + // We now commit the new state (which updates the root hash) + let (reads_writes, witness) = working_set.checkpoint().freeze(); + storage.validate_and_commit(reads_writes, &witness).unwrap(); + let new_root_hash = storage.get_state_root(&Default::default()); + + // Computes the new working set + let mut working_set = WorkingSet::new(storage); + + // And we simulate a new slot application by calling the `begin_slot` hook. + let new_slot_data = MockBlock { + curr_hash: [2; 32], + header: MockBlockHeader { + prev_hash: MockHash([1; 32]), + }, + height: INIT_HEIGHT, + validity_cond: MockValidityCond { is_valid: false }, + blobs: Default::default(), + }; + + chain_state.begin_slot_hook(&new_slot_data, &mut working_set); + + // Check that the slot height has been updated correctly + let new_height_storage = chain_state.get_slot_height(&mut working_set); + assert_eq!( + new_height_storage, + INIT_HEIGHT + 2, + "The new height did not update" + ); + + // Check the transition in progress + let new_tx_in_progress: TransitionInProgress = chain_state + .get_in_progress_transition(&mut working_set) + .unwrap(); + + assert_eq!( + new_tx_in_progress, + TransitionInProgress::::new( + [2; 32], + MockValidityCond { is_valid: false } + ), + "The new transition has not been correctly stored" + ); + + // Check the transition stored + let last_tx_stored: StateTransitionId = chain_state + .get_historical_transitions(INIT_HEIGHT + 1, &mut working_set) + .unwrap(); + + assert_eq!( + last_tx_stored, + StateTransitionId::new( + [1; 32], + new_root_hash.unwrap(), + MockValidityCond { is_valid: true } + ) + ); +} diff --git a/module-system/module-implementations/sov-evm/Cargo.toml b/module-system/module-implementations/sov-evm/Cargo.toml index 37dab7f5a..1e1ab703f 100644 --- a/module-system/module-implementations/sov-evm/Cargo.toml +++ b/module-system/module-implementations/sov-evm/Cargo.toml @@ -14,8 +14,8 @@ resolver = "2" [dependencies] -sov-modules-api = { path = "../../sov-modules-api", version = "0.1", default-features = false, features = ["macros"] } -sov-state = { path = "../../sov-state", version = "0.1", default-features = false } +sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-state = { path = "../../sov-state", version = "0.1" } anyhow = { workspace = true } thiserror = { workspace = true } @@ -30,16 +30,12 @@ jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], tracing = { workspace = true } derive_more = { workspace = true } -# TODO: move these dependencies to the workspace when the EVM module is no longer in the experimental stage + ethereum-types = "0.14.1" ethers-core = { workspace = true } ethers-contract = { workspace = true } ethers-middleware = { workspace = true } -ethers-providers = { workspace = true } ethers-signers = { workspace = true } - - - ethers = { workspace = true } revm = { workspace = true } @@ -48,18 +44,19 @@ reth-rpc-types = { workspace = true } reth-rpc = { workspace = true } reth-revm = { workspace = true } -[dev-dependencies] +[dev-dependencies] +sov-evm = { path = ".", features = ["smart_contracts"] } primitive-types = "0.12.1" tokio = { workspace = true } tempfile = { workspace = true } bytes = { workspace = true } +secp256k1 = { workspace = true } -sov-modules-api = { path = "../../sov-modules-api", version = "0.1", features = ["macros"] } [features] -default = ["native"] -serde = ["dep:serde", "dep:serde_json"] -native = ["serde", "sov-state/native", "dep:jsonrpsee", "dep:schemars", "sov-modules-api/native", "dep:clap"] +default = [] +native = ["serde", "serde_json", "jsonrpsee", "schemars", "clap", "sov-state/native", "sov-modules-api/native"] experimental = ["native"] +smart_contracts = ["experimental"] diff --git a/module-system/module-implementations/sov-evm/src/call.rs b/module-system/module-implementations/sov-evm/src/call.rs index 27f27992d..06729fab8 100644 --- a/module-system/module-implementations/sov-evm/src/call.rs +++ b/module-system/module-implementations/sov-evm/src/call.rs @@ -1,14 +1,15 @@ use anyhow::Result; +use ethers_core::types::{OtherFields, TransactionReceipt}; use revm::primitives::{CfgEnv, U256}; use sov_modules_api::CallResponse; use sov_state::WorkingSet; use crate::evm::db::EvmDb; use crate::evm::executor::{self}; -use crate::evm::transaction::{BlockEnv, EvmTransaction}; -use crate::evm::{contract_address, EvmChainCfg}; +use crate::evm::transaction::{BlockEnv, EvmTransactionSignedEcRecovered}; +use crate::evm::{contract_address, EvmChainCfg, RawEvmTransaction}; use crate::experimental::SpecIdWrapper; -use crate::{Evm, TransactionReceipt}; +use crate::Evm; #[cfg_attr( feature = "native", @@ -18,49 +19,58 @@ use crate::{Evm, TransactionReceipt}; )] #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] pub struct CallMessage { - pub tx: EvmTransaction, + pub tx: RawEvmTransaction, } impl Evm { pub(crate) fn execute_call( &self, - tx: EvmTransaction, + tx: RawEvmTransaction, _context: &C, working_set: &mut WorkingSet, ) -> Result { - // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/515 + let evm_tx_recovered: EvmTransactionSignedEcRecovered = tx.clone().try_into()?; let block_env = self.block_env.get(working_set).unwrap_or_default(); let cfg = self.cfg.get(working_set).unwrap_or_default(); let cfg_env = get_cfg_env(&block_env, cfg, None); - self.transactions.set(&tx.hash, &tx, working_set); + + let hash = evm_tx_recovered.hash(); + self.transactions + .set(hash.as_fixed_bytes(), &tx, working_set); let evm_db: EvmDb<'_, C> = self.get_db(working_set); // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/505 - let result = executor::execute_tx(evm_db, block_env, tx.clone(), cfg_env).unwrap(); + let result = executor::execute_tx(evm_db, block_env, &evm_tx_recovered, cfg_env).unwrap(); let receipt = TransactionReceipt { - transaction_hash: tx.hash, + transaction_hash: hash.into(), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 - transaction_index: 0, + transaction_index: 0u64.into(), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 block_hash: Default::default(), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 - block_number: Some(0), - from: tx.sender, - to: tx.to, + block_number: Some(0u64.into()), + from: evm_tx_recovered.signer().into(), + to: evm_tx_recovered.to().map(|to| to.into()), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 cumulative_gas_used: Default::default(), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 gas_used: Default::default(), contract_address: contract_address(result).map(|addr| addr.into()), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 - status: Some(1), + status: Some(1u64.into()), root: Default::default(), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 - transaction_type: Some(1), + transaction_type: Some(1u64.into()), effective_gas_price: Default::default(), + // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 + logs_bloom: Default::default(), + // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 + other: OtherFields::default(), + // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 + logs: Default::default(), }; self.receipts diff --git a/module-system/module-implementations/sov-evm/src/evm/conversions.rs b/module-system/module-implementations/sov-evm/src/evm/conversions.rs index 5277565ed..acfce1988 100644 --- a/module-system/module-implementations/sov-evm/src/evm/conversions.rs +++ b/module-system/module-implementations/sov-evm/src/evm/conversions.rs @@ -1,13 +1,20 @@ use bytes::Bytes; -use ethers_core::types::{OtherFields, Transaction}; -use reth_rpc::eth::error::{EthApiError, RpcInvalidTransactionError}; +use ethereum_types::U64; +use ethers_core::types::{Bytes as EthBytes, OtherFields, Transaction}; +use reth_primitives::{ + Bytes as RethBytes, TransactionSigned as RethTransactionSigned, + TransactionSignedEcRecovered as RethTransactionSignedEcRecovered, + TransactionSignedNoHash as RethTransactionSignedNoHash, +}; +use reth_rpc::eth::error::EthApiError; use reth_rpc_types::CallRequest; use revm::primitives::{ AccountInfo as ReVmAccountInfo, BlockEnv as ReVmBlockEnv, Bytecode, CreateScheme, TransactTo, TxEnv, B160, B256, U256, }; +use thiserror::Error; -use super::transaction::{AccessListItem, BlockEnv, EvmTransaction}; +use super::transaction::{BlockEnv, EvmTransactionSignedEcRecovered, RawEvmTransaction}; use super::AccountInfo; impl From for ReVmAccountInfo { @@ -47,65 +54,56 @@ impl From for ReVmBlockEnv { } } -impl From for (B160, Vec) { - fn from(item: AccessListItem) -> Self { - ( - B160::from_slice(&item.address), - item.storage_keys - .into_iter() - .map(U256::from_le_bytes) - .collect(), - ) - } -} +impl From<&EvmTransactionSignedEcRecovered> for TxEnv { + fn from(tx: &EvmTransactionSignedEcRecovered) -> Self { + let tx: &RethTransactionSignedEcRecovered = tx.as_ref(); -impl From for TxEnv { - fn from(tx: EvmTransaction) -> Self { - let to = match tx.to { - Some(addr) => TransactTo::Call(B160::from_slice(&addr)), + let to = match tx.to() { + Some(addr) => TransactTo::Call(addr), None => TransactTo::Create(CreateScheme::Create), }; - let access_list = tx - .access_lists - .into_iter() - .map(|item| item.into()) - .collect(); - Self { - caller: B160::from_slice(&tx.sender), - data: Bytes::from(tx.data), - gas_limit: tx.gas_limit, - gas_price: U256::from(tx.gas_price), - gas_priority_fee: Some(U256::from(tx.max_priority_fee_per_gas)), + caller: tx.signer(), + gas_limit: tx.gas_limit(), + gas_price: U256::from(tx.effective_gas_price(None)), + gas_priority_fee: tx.max_priority_fee_per_gas().map(U256::from), transact_to: to, - value: U256::from(tx.value), - nonce: Some(tx.nonce), - chain_id: Some(tx.chain_id), - access_list, + value: U256::from(tx.value()), + data: Bytes::from(tx.input().to_vec()), + chain_id: tx.chain_id(), + nonce: Some(tx.nonce()), + // TODO handle access list + access_list: vec![], } } } -impl From for Transaction { - fn from(evm_tx: EvmTransaction) -> Self { - Self { - hash: evm_tx.hash.into(), - nonce: evm_tx.nonce.into(), - from: evm_tx.sender.into(), - to: evm_tx.to.map(|addr| addr.into()), - value: evm_tx.value.into(), - // https://github.com/foundry-rs/foundry/blob/master/anvil/core/src/eth/transaction/mod.rs#L1251 - gas_price: Some(evm_tx.max_fee_per_gas.into()), - input: evm_tx.data.into(), - v: (evm_tx.odd_y_parity as u8).into(), - r: evm_tx.r.into(), - s: evm_tx.s.into(), - transaction_type: Some(2.into()), +impl TryFrom for Transaction { + type Error = RawEvmTxConversionError; + fn try_from(evm_tx: RawEvmTransaction) -> Result { + let tx: EvmTransactionSignedEcRecovered = evm_tx.try_into()?; + let tx: &RethTransactionSignedEcRecovered = tx.as_ref(); + + Ok(Self { + hash: tx.hash().into(), + nonce: tx.nonce().into(), + + from: tx.signer().into(), + to: tx.to().map(|addr| addr.into()), + value: tx.value().into(), + gas_price: Some(tx.effective_gas_price(None).into()), + + input: EthBytes::from(tx.input().to_vec()), + v: tx.signature().v(tx.chain_id()).into(), + r: tx.signature().r.into(), + s: tx.signature().s.into(), + transaction_type: Some(U64::from(tx.tx_type() as u8)), + // TODO handle access list access_list: None, - max_priority_fee_per_gas: Some(evm_tx.max_priority_fee_per_gas.into()), - max_fee_per_gas: Some(evm_tx.max_fee_per_gas.into()), - chain_id: Some(evm_tx.chain_id.into()), + max_priority_fee_per_gas: tx.max_priority_fee_per_gas().map(From::from), + max_fee_per_gas: Some(tx.max_fee_per_gas().into()), + chain_id: tx.chain_id().map(|id| id.into()), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/503 block_hash: Some([0; 32].into()), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/503 @@ -116,64 +114,58 @@ impl From for Transaction { gas: Default::default(), // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/503 other: OtherFields::default(), - } + }) } } -use reth_primitives::{Bytes as RethBytes, TransactionSigned}; +#[derive(Error, Debug)] +pub enum RawEvmTxConversionError { + #[error("Empty raw transaction data")] + EmptyRawTransactionData, + #[error("Failed to decode signed transaction")] + FailedToDecodeSignedTransaction, +} + +impl From for EthApiError { + fn from(e: RawEvmTxConversionError) -> Self { + match e { + RawEvmTxConversionError::EmptyRawTransactionData => { + EthApiError::EmptyRawTransactionData + } + RawEvmTxConversionError::FailedToDecodeSignedTransaction => { + EthApiError::FailedToDecodeSignedTransaction + } + } + } +} -impl TryFrom for EvmTransaction { - type Error = EthApiError; +impl TryFrom for RethTransactionSignedNoHash { + type Error = RawEvmTxConversionError; - fn try_from(data: RethBytes) -> Result { + fn try_from(data: RawEvmTransaction) -> Result { + let data = RethBytes::from(data.rlp); if data.is_empty() { - return Err(EthApiError::EmptyRawTransactionData); + return Err(RawEvmTxConversionError::EmptyRawTransactionData); } - let transaction = TransactionSigned::decode_enveloped(data) - .map_err(|_| EthApiError::FailedToDecodeSignedTransaction)?; + let transaction = RethTransactionSigned::decode_enveloped(data) + .map_err(|_| RawEvmTxConversionError::FailedToDecodeSignedTransaction)?; - let transaction = transaction - .into_ecrecovered() - .ok_or(EthApiError::InvalidTransactionSignature)?; + Ok(transaction.into()) + } +} - let (signed_transaction, signer) = transaction.to_components(); +impl TryFrom for EvmTransactionSignedEcRecovered { + type Error = RawEvmTxConversionError; - let tx_hash = signed_transaction.hash(); - let tx_eip_1559 = match signed_transaction.transaction { - reth_primitives::Transaction::Legacy(_) => { - return Err(EthApiError::InvalidTransaction( - RpcInvalidTransactionError::TxTypeNotSupported, - )) - } - reth_primitives::Transaction::Eip2930(_) => { - return Err(EthApiError::InvalidTransaction( - RpcInvalidTransactionError::TxTypeNotSupported, - )) - } - reth_primitives::Transaction::Eip1559(tx_eip_1559) => tx_eip_1559, - }; + fn try_from(evm_tx: RawEvmTransaction) -> Result { + let tx = RethTransactionSignedNoHash::try_from(evm_tx)?; + let tx: RethTransactionSigned = tx.into(); + let tx = tx + .into_ecrecovered() + .ok_or(RawEvmTxConversionError::FailedToDecodeSignedTransaction)?; - Ok(Self { - sender: signer.into(), - data: tx_eip_1559.input.to_vec(), - gas_limit: tx_eip_1559.gas_limit, - // https://github.com/foundry-rs/foundry/blob/master/anvil/core/src/eth/transaction/mod.rs#L1251C20-L1251C20 - gas_price: tx_eip_1559.max_fee_per_gas, - max_priority_fee_per_gas: tx_eip_1559.max_priority_fee_per_gas, - max_fee_per_gas: tx_eip_1559.max_fee_per_gas, - to: tx_eip_1559.to.to().map(|addr| addr.into()), - value: tx_eip_1559.value, - nonce: tx_eip_1559.nonce, - // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/503 - access_lists: vec![], - chain_id: tx_eip_1559.chain_id, - // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/503 - hash: tx_hash.into(), - odd_y_parity: Default::default(), - r: Default::default(), - s: Default::default(), - }) + Ok(EvmTransactionSignedEcRecovered::new(tx)) } } @@ -198,7 +190,7 @@ pub fn prepare_call_env(request: CallRequest) -> TxEnv { .unwrap_or_default(), chain_id: request.chain_id.map(|c| c.as_u64()), nonce: request.nonce.map(|n| TryInto::::try_into(n).unwrap()), - + // TODO handle access list access_list: Default::default(), } } diff --git a/module-system/module-implementations/sov-evm/src/evm/executor.rs b/module-system/module-implementations/sov-evm/src/evm/executor.rs index f96101654..d7e56d96c 100644 --- a/module-system/module-implementations/sov-evm/src/evm/executor.rs +++ b/module-system/module-implementations/sov-evm/src/evm/executor.rs @@ -4,12 +4,12 @@ use reth_revm::tracing::{TracingInspector, TracingInspectorConfig}; use revm::primitives::{CfgEnv, EVMError, Env, ExecutionResult, ResultAndState, TxEnv}; use revm::{self, Database, DatabaseCommit}; -use super::transaction::{BlockEnv, EvmTransaction}; +use super::transaction::{BlockEnv, EvmTransactionSignedEcRecovered}; pub(crate) fn execute_tx + DatabaseCommit>( db: DB, block_env: BlockEnv, - tx: EvmTransaction, + tx: &EvmTransactionSignedEcRecovered, config_env: CfgEnv, ) -> Result> { let mut evm = revm::new(); diff --git a/module-system/module-implementations/sov-evm/src/evm/mod.rs b/module-system/module-implementations/sov-evm/src/evm/mod.rs index ee03fd513..962940b5c 100644 --- a/module-system/module-implementations/sov-evm/src/evm/mod.rs +++ b/module-system/module-implementations/sov-evm/src/evm/mod.rs @@ -10,8 +10,6 @@ pub(crate) mod db_init; pub(crate) mod executor; mod serialize; #[cfg(test)] -pub(crate) mod test_helpers; -#[cfg(test)] mod tests; pub(crate) mod transaction; @@ -19,9 +17,10 @@ pub type EthAddress = [u8; 20]; pub(crate) type Bytes32 = [u8; 32]; pub use conversions::prepare_call_env; -pub use transaction::EvmTransaction; +pub use transaction::RawEvmTransaction; use crate::experimental::SpecIdWrapper; + // Stores information about an EVM account #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone, Default)] pub(crate) struct AccountInfo { diff --git a/module-system/module-implementations/sov-evm/src/evm/tests.rs b/module-system/module-implementations/sov-evm/src/evm/tests.rs index b30ff0c12..24b93dd31 100644 --- a/module-system/module-implementations/sov-evm/src/evm/tests.rs +++ b/module-system/module-implementations/sov-evm/src/evm/tests.rs @@ -1,20 +1,31 @@ use std::convert::Infallible; +use reth_primitives::TransactionKind; use revm::db::CacheDB; -use revm::primitives::{CfgEnv, KECCAK_EMPTY, U256}; +use revm::primitives::{CfgEnv, ExecutionResult, Output, KECCAK_EMPTY, U256}; use revm::{Database, DatabaseCommit}; use sov_state::{ProverStorage, WorkingSet}; use super::db::EvmDb; use super::db_init::InitEvmDb; use super::executor; -use crate::evm::test_helpers::{output, SimpleStorageContract}; -use crate::evm::transaction::{BlockEnv, EvmTransaction}; +use crate::evm::transaction::BlockEnv; use crate::evm::{contract_address, AccountInfo}; +use crate::smart_contracts::SimpleStorageContract; +use crate::tests::dev_signer::DevSigner; use crate::Evm; - type C = sov_modules_api::default_context::DefaultContext; +pub(crate) fn output(result: ExecutionResult) -> bytes::Bytes { + match result { + ExecutionResult::Success { output, .. } => match output { + Output::Call(out) => out, + Output::Create(out, _) => out, + }, + _ => panic!("Expected successful ExecutionResult"), + } +} + #[test] fn simple_contract_execution_sov_state() { let tmpdir = tempfile::tempdir().unwrap(); @@ -36,7 +47,8 @@ fn simple_contract_execution_in_memory_state() { fn simple_contract_execution + DatabaseCommit + InitEvmDb>( mut evm_db: DB, ) { - let caller: [u8; 20] = [11; 20]; + let dev_signer = DevSigner::new_random(); + let caller = dev_signer.address; evm_db.insert_account_info( caller, AccountInfo { @@ -47,15 +59,14 @@ fn simple_contract_execution + DatabaseCommit + }, ); - let contract = SimpleStorageContract::new(); + let contract = SimpleStorageContract::default(); let contract_address = { - let tx = EvmTransaction { - to: None, - data: contract.byte_code().to_vec(), - ..Default::default() - }; + let tx = dev_signer + .sign_default_transaction(TransactionKind::Create, contract.byte_code().to_vec(), 1) + .unwrap(); + let tx = &tx.try_into().unwrap(); let result = executor::execute_tx(&mut evm_db, BlockEnv::default(), tx, CfgEnv::default()).unwrap(); contract_address(result).expect("Expected successful contract creation") @@ -66,26 +77,30 @@ fn simple_contract_execution + DatabaseCommit + { let call_data = contract.set_call_data(set_arg); - let tx = EvmTransaction { - to: Some(*contract_address.as_fixed_bytes()), - data: hex::decode(hex::encode(&call_data)).unwrap(), - nonce: 1, - ..Default::default() - }; + let tx = dev_signer + .sign_default_transaction( + TransactionKind::Call(contract_address.as_fixed_bytes().into()), + hex::decode(hex::encode(&call_data)).unwrap(), + 2, + ) + .unwrap(); + let tx = &tx.try_into().unwrap(); executor::execute_tx(&mut evm_db, BlockEnv::default(), tx, CfgEnv::default()).unwrap(); } let get_res = { let call_data = contract.get_call_data(); - let tx = EvmTransaction { - to: Some(*contract_address.as_fixed_bytes()), - data: hex::decode(hex::encode(&call_data)).unwrap(), - nonce: 2, - ..Default::default() - }; + let tx = dev_signer + .sign_default_transaction( + TransactionKind::Call(contract_address.as_fixed_bytes().into()), + hex::decode(hex::encode(&call_data)).unwrap(), + 3, + ) + .unwrap(); + let tx = &tx.try_into().unwrap(); let result = executor::execute_tx(&mut evm_db, BlockEnv::default(), tx, CfgEnv::default()).unwrap(); diff --git a/module-system/module-implementations/sov-evm/src/evm/transaction.rs b/module-system/module-implementations/sov-evm/src/evm/transaction.rs index 4ae947f50..6f0810e4f 100644 --- a/module-system/module-implementations/sov-evm/src/evm/transaction.rs +++ b/module-system/module-implementations/sov-evm/src/evm/transaction.rs @@ -1,3 +1,7 @@ +use reth_primitives::{ + TransactionSignedEcRecovered as RethTransactionSignedEcRecovered, H160, H256, +}; + use super::{Bytes32, EthAddress}; #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] @@ -25,6 +29,7 @@ impl Default for BlockEnv { } } +/// Rlp encoded evm transaction. #[cfg_attr( feature = "native", derive(serde::Serialize), @@ -32,55 +37,46 @@ impl Default for BlockEnv { derive(schemars::JsonSchema) )] #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] -pub struct AccessListItem { - pub address: EthAddress, - pub storage_keys: Vec, +pub struct RawEvmTransaction { + /// Rlp data. + pub rlp: Vec, } -#[cfg_attr( - feature = "native", - derive(serde::Serialize), - derive(serde::Deserialize), - derive(schemars::JsonSchema) -)] -#[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] -pub struct EvmTransaction { - pub sender: EthAddress, - pub data: Vec, - pub gas_limit: u64, - pub gas_price: u128, - pub max_priority_fee_per_gas: u128, - pub max_fee_per_gas: u128, - pub to: Option, - pub value: u128, - pub nonce: u64, - pub access_lists: Vec, - pub chain_id: u64, - pub odd_y_parity: bool, - pub r: Bytes32, - pub s: Bytes32, - // todo remove it - pub hash: Bytes32, +/// EC recovered evm transaction. +pub struct EvmTransactionSignedEcRecovered { + tx: RethTransactionSignedEcRecovered, } -impl Default for EvmTransaction { - fn default() -> Self { - Self { - sender: Default::default(), - data: Default::default(), - gas_limit: u64::MAX, - gas_price: Default::default(), - max_priority_fee_per_gas: Default::default(), - max_fee_per_gas: Default::default(), - to: Default::default(), - value: Default::default(), - nonce: Default::default(), - access_lists: Default::default(), - chain_id: 1, - hash: Default::default(), - odd_y_parity: Default::default(), - r: Default::default(), - s: Default::default(), - } +impl EvmTransactionSignedEcRecovered { + /// Creates a new EvmTransactionSignedEcRecovered. + pub fn new(tx: RethTransactionSignedEcRecovered) -> Self { + Self { tx } + } + + /// Transaction hash. Used to identify transaction. + pub fn hash(&self) -> H256 { + self.tx.hash() + } + + /// Signer of transaction recovered from signature. + pub fn signer(&self) -> H160 { + self.tx.signer() + } + + /// Receiver of the transaction. + pub fn to(&self) -> Option { + self.tx.to().map(|to| to.into()) + } +} + +impl AsRef for EvmTransactionSignedEcRecovered { + fn as_ref(&self) -> &RethTransactionSignedEcRecovered { + &self.tx + } +} + +impl From for RethTransactionSignedEcRecovered { + fn from(tx: EvmTransactionSignedEcRecovered) -> Self { + tx.tx } } diff --git a/module-system/module-implementations/sov-evm/src/hooks.rs b/module-system/module-implementations/sov-evm/src/hooks.rs new file mode 100644 index 000000000..722d456bd --- /dev/null +++ b/module-system/module-implementations/sov-evm/src/hooks.rs @@ -0,0 +1,9 @@ +use sov_state::WorkingSet; + +use crate::Evm; + +impl Evm { + pub fn end_slot_hook(&self, _root_hash: [u8; 32], _working_set: &mut WorkingSet) { + // TODO implement block creation logic. + } +} diff --git a/module-system/module-implementations/sov-evm/src/lib.rs b/module-system/module-implementations/sov-evm/src/lib.rs index 226c97647..726ab689a 100644 --- a/module-system/module-implementations/sov-evm/src/lib.rs +++ b/module-system/module-implementations/sov-evm/src/lib.rs @@ -4,19 +4,19 @@ pub mod call; pub mod evm; #[cfg(feature = "experimental")] pub mod genesis; +#[cfg(feature = "experimental")] +pub mod hooks; #[cfg(feature = "native")] #[cfg(feature = "experimental")] pub mod query; -#[cfg(feature = "experimental")] -mod receipt; +#[cfg(feature = "smart_contracts")] +pub mod smart_contracts; #[cfg(feature = "experimental")] #[cfg(test)] mod tests; #[cfg(feature = "experimental")] pub use experimental::{AccountData, Evm, EvmConfig, SpecIdWrapper}; #[cfg(feature = "experimental")] -pub use receipt::TransactionReceipt; -#[cfg(feature = "experimental")] pub use revm::primitives::SpecId; #[cfg(feature = "experimental")] @@ -24,6 +24,7 @@ mod experimental { use std::collections::HashMap; use derive_more::{From, Into}; + use ethers::types::TransactionReceipt; use revm::primitives::{SpecId, KECCAK_EMPTY, U256}; use sov_modules_api::{Error, ModuleInfo}; use sov_state::WorkingSet; @@ -31,10 +32,8 @@ mod experimental { use super::evm::db::EvmDb; use super::evm::transaction::BlockEnv; use super::evm::{DbAccount, EthAddress}; - use crate::evm::{Bytes32, EvmChainCfg, EvmTransaction}; - use crate::TransactionReceipt; - - #[derive(Clone)] + use crate::evm::{Bytes32, EvmChainCfg, RawEvmTransaction}; + #[derive(Clone, Debug)] pub struct AccountData { pub address: EthAddress, pub balance: Bytes32, @@ -53,7 +52,7 @@ mod experimental { } } - #[derive(Clone)] + #[derive(Clone, Debug)] pub struct EvmConfig { pub data: Vec, pub chain_id: u64, @@ -89,10 +88,14 @@ mod experimental { pub(crate) block_env: sov_state::StateValue, #[state] - pub(crate) transactions: sov_state::StateMap, + pub(crate) transactions: sov_state::StateMap, #[state] - pub(crate) receipts: sov_state::StateMap, + pub(crate) receipts: sov_state::StateMap< + ethereum_types::H256, + TransactionReceipt, + sov_state::codec::BcsCodec, + >, } impl sov_modules_api::Module for Evm { diff --git a/module-system/module-implementations/sov-evm/src/query.rs b/module-system/module-implementations/sov-evm/src/query.rs index 8380b05bd..cc0112d11 100644 --- a/module-system/module-implementations/sov-evm/src/query.rs +++ b/module-system/module-implementations/sov-evm/src/query.rs @@ -6,6 +6,7 @@ use reth_rpc_types::state::StateOverride; use reth_rpc_types::{BlockOverrides, CallRequest, TransactionRequest}; use revm::primitives::{CfgEnv, ExecutionResult}; use sov_modules_api::macros::rpc_gen; +use sov_modules_api::utils::to_jsonrpsee_error_object; use sov_state::WorkingSet; use tracing::info; @@ -61,8 +62,9 @@ impl Evm { working_set: &mut WorkingSet, ) -> RpcResult> { info!("evm module: eth_getTransactionByHash"); - let evm_transaction = self.transactions.get(&hash.into(), working_set); - Ok(evm_transaction.map(|tx| tx.into())) + let evm_transaction = self.transactions.get(hash.as_fixed_bytes(), working_set); + let result = evm_transaction.map(Transaction::try_from).transpose(); + result.map_err(|e| to_jsonrpsee_error_object(e, "ETH_RPC_ERROR")) } // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/502 @@ -73,8 +75,9 @@ impl Evm { working_set: &mut WorkingSet, ) -> RpcResult> { info!("evm module: eth_getTransactionReceipt"); - let receipt = self.receipts.get(&hash.into(), working_set); - Ok(receipt.map(|r| r.into())) + + let receipt = self.receipts.get(&hash, working_set); + Ok(receipt) } //https://github.com/paradigmxyz/reth/blob/f577e147807a783438a3f16aad968b4396274483/crates/rpc/rpc/src/eth/api/transactions.rs#L502 diff --git a/module-system/module-implementations/sov-evm/src/receipt.rs b/module-system/module-implementations/sov-evm/src/receipt.rs deleted file mode 100644 index 67d1a48e5..000000000 --- a/module-system/module-implementations/sov-evm/src/receipt.rs +++ /dev/null @@ -1,64 +0,0 @@ -use ethers_core::types::transaction::response; -use ethers_core::types::OtherFields; - -use crate::evm::{Bytes32, EthAddress}; - -#[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] -pub struct TransactionReceipt { - /// Transaction hash. - pub transaction_hash: Bytes32, - /// Index within the block. - pub transaction_index: u64, - /// Hash of the block this transaction was included within. - pub block_hash: Option, - /// Number of the block this transaction was included within. - pub block_number: Option, - /// address of the sender. - pub from: EthAddress, - // address of the receiver. null when its a contract creation transaction. - pub to: Option, - /// Cumulative gas used within the block after this was executed. - pub cumulative_gas_used: Bytes32, - pub gas_used: Bytes32, - pub contract_address: Option, - // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 - // pub logs: Vec, - // Status: either 1 (success) or 0 (failure). Only present after activation of [EIP-658](https://eips.ethereum.org/EIPS/eip-658) - pub status: Option, - /// State root. Only present before activation of [EIP-658](https://eips.ethereum.org/EIPS/eip-658) - pub root: Option, - // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 - // Logs bloom - // pub logs_bloom: Bloom, - /// Transaction type, Some(1) for AccessList transaction, None for Legacy - pub transaction_type: Option, - /// The price paid post-execution by the transaction (i.e. base fee + priority fee). - /// Both fields in 1559-style transactions are *maximums* (max fee + max priority fee), the - /// amount that's actually paid by users can only be determined post-execution - pub effective_gas_price: Option, -} - -impl From for response::TransactionReceipt { - fn from(receipt: TransactionReceipt) -> Self { - Self { - transaction_hash: receipt.transaction_hash.into(), - transaction_index: receipt.transaction_index.into(), - block_hash: receipt.block_hash.map(|hash| hash.into()), - block_number: receipt.block_number.map(|bn| bn.into()), - from: receipt.from.into(), - to: receipt.to.map(|to| to.into()), - cumulative_gas_used: receipt.cumulative_gas_used.into(), - gas_used: Some(receipt.gas_used.into()), - contract_address: receipt.contract_address.map(|addr| addr.into()), - // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 - logs: Default::default(), - status: receipt.status.map(|s| s.into()), - root: receipt.root.map(|r| r.into()), - // TODO https://github.com/Sovereign-Labs/sovereign-sdk/issues/504 - logs_bloom: Default::default(), - transaction_type: receipt.transaction_type.map(|t| t.into()), - effective_gas_price: receipt.effective_gas_price.map(|p| p.into()), - other: OtherFields::default(), - } - } -} diff --git a/module-system/module-implementations/sov-evm/src/smart_contracts/mod.rs b/module-system/module-implementations/sov-evm/src/smart_contracts/mod.rs new file mode 100644 index 000000000..1a3949c5e --- /dev/null +++ b/module-system/module-implementations/sov-evm/src/smart_contracts/mod.rs @@ -0,0 +1,2 @@ +mod simple_storage_contract; +pub use simple_storage_contract::SimpleStorageContract; diff --git a/module-system/module-implementations/sov-evm/src/evm/test_helpers.rs b/module-system/module-implementations/sov-evm/src/smart_contracts/simple_storage_contract.rs similarity index 69% rename from module-system/module-implementations/sov-evm/src/evm/test_helpers.rs rename to module-system/module-implementations/sov-evm/src/smart_contracts/simple_storage_contract.rs index 5bd218385..aba5f290e 100644 --- a/module-system/module-implementations/sov-evm/src/evm/test_helpers.rs +++ b/module-system/module-implementations/sov-evm/src/smart_contracts/simple_storage_contract.rs @@ -3,17 +3,6 @@ use std::path::PathBuf; use ethers_contract::BaseContract; use ethers_core::abi::Abi; use ethers_core::types::Bytes; -use revm::primitives::{ExecutionResult, Output}; - -pub(crate) fn output(result: ExecutionResult) -> bytes::Bytes { - match result { - ExecutionResult::Success { output, .. } => match output { - Output::Call(out) => out, - Output::Create(out, _) => out, - }, - _ => panic!("Expected successful ExecutionResult"), - } -} fn test_data_path() -> PathBuf { let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); @@ -29,13 +18,14 @@ fn make_contract_from_abi(path: PathBuf) -> BaseContract { BaseContract::from(abi) } -pub(crate) struct SimpleStorageContract { +/// SimpleStorageContract wrapper. +pub struct SimpleStorageContract { bytecode: Bytes, base_contract: BaseContract, } -impl SimpleStorageContract { - pub(crate) fn new() -> Self { +impl Default for SimpleStorageContract { + fn default() -> Self { let contract_data = { let mut path = test_data_path(); path.push("SimpleStorage.bin"); @@ -56,17 +46,22 @@ impl SimpleStorageContract { base_contract: contract, } } +} - pub(crate) fn byte_code(&self) -> Bytes { +impl SimpleStorageContract { + /// SimpleStorage bytecode. + pub fn byte_code(&self) -> Bytes { self.bytecode.clone() } - pub(crate) fn set_call_data(&self, set_arg: u32) -> Bytes { + /// Setter for the smart contract. + pub fn set_call_data(&self, set_arg: u32) -> Bytes { let set_arg = ethereum_types::U256::from(set_arg); self.base_contract.encode("set", set_arg).unwrap() } - pub(crate) fn get_call_data(&self) -> Bytes { + /// Getter for the smart contract. + pub fn get_call_data(&self) -> Bytes { self.base_contract.encode("get", ()).unwrap() } } diff --git a/module-system/module-implementations/sov-evm/src/tests/call_tests.rs b/module-system/module-implementations/sov-evm/src/tests/call_tests.rs index d39e218a8..09df2e886 100644 --- a/module-system/module-implementations/sov-evm/src/tests/call_tests.rs +++ b/module-system/module-implementations/sov-evm/src/tests/call_tests.rs @@ -1,3 +1,4 @@ +use reth_primitives::TransactionKind; use revm::primitives::{SpecId, KECCAK_EMPTY, U256}; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::default_signature::private_key::DefaultPrivateKey; @@ -5,38 +6,40 @@ use sov_modules_api::{Context, Module, PrivateKey, Spec}; use sov_state::{ProverStorage, WorkingSet}; use crate::call::CallMessage; -use crate::evm::test_helpers::SimpleStorageContract; -use crate::evm::transaction::EvmTransaction; use crate::evm::EthAddress; +use crate::smart_contracts::SimpleStorageContract; +use crate::tests::dev_signer::DevSigner; use crate::{AccountData, Evm, EvmConfig}; - type C = DefaultContext; -fn create_messages(contract_addr: EthAddress, set_arg: u32) -> Vec { +fn create_messages( + contract_addr: EthAddress, + set_arg: u32, + dev_signer: DevSigner, +) -> Vec { let mut transactions = Vec::default(); - let contract = SimpleStorageContract::new(); + let contract = SimpleStorageContract::default(); // Contract creation. { - transactions.push(CallMessage { - tx: EvmTransaction { - to: None, - data: contract.byte_code().to_vec(), - ..Default::default() - }, - }); + let signed_tx = dev_signer + .sign_default_transaction(TransactionKind::Create, contract.byte_code().to_vec(), 0) + .unwrap(); + + transactions.push(CallMessage { tx: signed_tx }); } // Update contract state. { - transactions.push(CallMessage { - tx: EvmTransaction { - to: Some(contract_addr), - data: hex::decode(hex::encode(&contract.set_call_data(set_arg))).unwrap(), - nonce: 1, - ..Default::default() - }, - }); + let signed_tx = dev_signer + .sign_default_transaction( + TransactionKind::Call(contract_addr.into()), + hex::decode(hex::encode(&contract.set_call_data(set_arg))).unwrap(), + 1, + ) + .unwrap(); + + transactions.push(CallMessage { tx: signed_tx }); } transactions @@ -53,7 +56,9 @@ fn evm_test() { let sender = priv_key.pub_key(); let sender_addr = sender.to_address::<::Address>(); let sender_context = C::new(sender_addr); - let caller = [0; 20]; + + let dev_signer: DevSigner = DevSigner::new_random(); + let caller = dev_signer.address; let evm = Evm::::default(); @@ -73,14 +78,14 @@ fn evm_test() { evm.genesis(&config, working_set).unwrap(); - let contract_addr = hex::decode("bd770416a3345f91e4b34576cb804a576fa48eb1") + let contract_addr = hex::decode("819c5497b157177315e1204f52e588b393771719") .unwrap() .try_into() .unwrap(); let set_arg = 999; - for tx in create_messages(contract_addr, set_arg) { + for tx in create_messages(contract_addr, set_arg, dev_signer) { evm.call(tx, &sender_context, working_set).unwrap(); } diff --git a/module-system/module-implementations/sov-evm/src/tests/dev_signer.rs b/module-system/module-implementations/sov-evm/src/tests/dev_signer.rs new file mode 100644 index 000000000..c7e83c359 --- /dev/null +++ b/module-system/module-implementations/sov-evm/src/tests/dev_signer.rs @@ -0,0 +1,79 @@ +use ethers_core::rand::rngs::StdRng; +use ethers_core::rand::SeedableRng; +use reth_primitives::{ + public_key_to_address, sign_message, Bytes as RethBytes, Transaction as RethTransaction, + TransactionKind, TransactionSigned, TxEip1559 as RethTxEip1559, H256, +}; +use reth_rpc::eth::error::SignError; +use secp256k1::{PublicKey, SecretKey}; + +use crate::evm::{EthAddress, RawEvmTransaction}; + +/// ETH transactions signer used in tests. +pub(crate) struct DevSigner { + secret_key: SecretKey, + pub(crate) address: EthAddress, +} + +impl DevSigner { + /// Creates a new signer. + pub(crate) fn new(secret_key: SecretKey) -> Self { + let public_key = PublicKey::from_secret_key(secp256k1::SECP256K1, &secret_key); + let addr = public_key_to_address(public_key); + Self { + secret_key, + address: addr.into(), + } + } + + /// Creates a new signer with random private key. + pub(crate) fn new_random() -> Self { + let mut rng = StdRng::seed_from_u64(22); + let secret_key = SecretKey::new(&mut rng); + Self::new(secret_key) + } + + /// Signs Eip1559 transaction. + pub(crate) fn sign_transaction( + &self, + transaction: RethTxEip1559, + ) -> Result { + let transaction = RethTransaction::Eip1559(transaction); + + let tx_signature_hash = transaction.signature_hash(); + + let signature = sign_message( + H256::from_slice(self.secret_key.as_ref()), + tx_signature_hash, + ) + .map_err(|_| SignError::CouldNotSign)?; + + Ok(TransactionSigned::from_transaction_and_signature( + transaction, + signature, + )) + } + + /// Signs default Eip1559 transaction with to, data and nonce overridden. + pub(crate) fn sign_default_transaction( + &self, + to: TransactionKind, + data: Vec, + nonce: u64, + ) -> Result { + let reth_tx = RethTxEip1559 { + to, + input: RethBytes::from(data), + nonce, + chain_id: 1, + gas_limit: u64::MAX, + ..Default::default() + }; + + let signed = self.sign_transaction(reth_tx)?; + + Ok(RawEvmTransaction { + rlp: signed.envelope_encoded().to_vec(), + }) + } +} diff --git a/module-system/module-implementations/sov-evm/src/tests/mod.rs b/module-system/module-implementations/sov-evm/src/tests/mod.rs index 30f5b5e42..d5b55080e 100644 --- a/module-system/module-implementations/sov-evm/src/tests/mod.rs +++ b/module-system/module-implementations/sov-evm/src/tests/mod.rs @@ -1,3 +1,4 @@ mod call_tests; mod cfg_tests; +pub(crate) mod dev_signer; mod tx_tests; diff --git a/module-system/module-implementations/sov-evm/src/tests/tx_tests.rs b/module-system/module-implementations/sov-evm/src/tests/tx_tests.rs index e8221dc5b..f17389244 100644 --- a/module-system/module-implementations/sov-evm/src/tests/tx_tests.rs +++ b/module-system/module-implementations/sov-evm/src/tests/tx_tests.rs @@ -1,17 +1,10 @@ use std::str::FromStr; use ethers_core::abi::Address; -use ethers_core::k256::ecdsa::SigningKey; use ethers_core::types::transaction::eip2718::TypedTransaction; use ethers_core::types::{Bytes, Eip1559TransactionRequest}; use ethers_core::utils::rlp::Rlp; -use ethers_middleware::SignerMiddleware; -use ethers_providers::{Http, Middleware, Provider}; -use ethers_signers::{LocalWallet, Signer, Wallet}; - -use crate::evm::test_helpers::SimpleStorageContract; - -const MAX_FEE_PER_GAS: u64 = 100000001; +use ethers_signers::{LocalWallet, Signer}; #[tokio::test] async fn tx_rlp_encoding_test() -> Result<(), Box> { @@ -48,123 +41,3 @@ async fn tx_rlp_encoding_test() -> Result<(), Box> { assert_eq!(tx, decoded_tx); Ok(()) } - -struct TestClient { - chain_id: u64, - from_addr: Address, - contract: SimpleStorageContract, - client: SignerMiddleware, Wallet>, -} - -impl TestClient { - #[allow(dead_code)] - async fn new_demo_rollup_client( - chain_id: u64, - key: Wallet, - from_addr: Address, - contract: SimpleStorageContract, - ) -> Self { - let endpoint = format!("http://localhost:{}", 12345); - let provider = Provider::try_from(endpoint).unwrap(); - - let client = SignerMiddleware::new_with_provider_chain(provider, key) - .await - .unwrap(); - - Self { - chain_id, - from_addr, - contract, - client, - } - } - - async fn execute(self) -> Result<(), Box> { - // Deploy contract - - let contract_address = { - let request = Eip1559TransactionRequest::new() - .from(self.from_addr) - .chain_id(self.chain_id) - .nonce(0u64) - .max_priority_fee_per_gas(10u64) - .max_fee_per_gas(MAX_FEE_PER_GAS) - .gas(900000u64) - .data(self.contract.byte_code()); - - let typed_transaction = TypedTransaction::Eip1559(request); - - let receipt = self - .client - .send_transaction(typed_transaction, None) - .await? - .await?; - - receipt.unwrap().contract_address.unwrap() - }; - - // Call contract - let set_arg = 923; - { - let request = Eip1559TransactionRequest::new() - .from(self.from_addr) - .to(contract_address) - .chain_id(self.chain_id) - .nonce(1u64) - .data(self.contract.set_call_data(set_arg)) - .max_priority_fee_per_gas(10u64) - .max_fee_per_gas(MAX_FEE_PER_GAS) - .gas(900000u64); - - let typed_transaction = TypedTransaction::Eip1559(request); - - let _ = self - .client - .send_transaction(typed_transaction, None) - .await - .unwrap() - .await; - } - - // Query contract - { - let request = Eip1559TransactionRequest::new() - .from(self.from_addr) - .to(contract_address) - .chain_id(self.chain_id) - .nonce(2u64) - .data(self.contract.get_call_data()) - .gas(900000u64); - - let typed_transaction = TypedTransaction::Eip1559(request); - - let response = self.client.call(&typed_transaction, None).await?; - - let resp_array: [u8; 32] = response.to_vec().try_into().unwrap(); - let get_arg = ethereum_types::U256::from(resp_array); - - assert_eq!(set_arg, get_arg.as_u32()) - } - - Ok(()) - } -} - -// TODO enable in CI -#[ignore] -#[tokio::test] -async fn send_tx_test_to_eth() -> Result<(), Box> { - let chain_id: u64 = 1; - let key = "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80" - .parse::() - .unwrap() - .with_chain_id(chain_id); - - let contract = SimpleStorageContract::new(); - - let from_addr = Address::from_str("0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266").unwrap(); - - //let test_client = TestClient::new_anvil_client(chain_id, key, from_addr, contract).await; - let test_client = TestClient::new_demo_rollup_client(chain_id, key, from_addr, contract).await; - test_client.execute().await -} diff --git a/module-system/module-implementations/sov-prover-incentives/Cargo.toml b/module-system/module-implementations/sov-prover-incentives/Cargo.toml index ce1950742..38f823325 100644 --- a/module-system/module-implementations/sov-prover-incentives/Cargo.toml +++ b/module-system/module-implementations/sov-prover-incentives/Cargo.toml @@ -13,22 +13,23 @@ resolver = "2" [dev-dependencies] sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1", features = ["mocks"] } -sov-modules-api = { path = "../../sov-modules-api", version = "0.1", features = ["macros"] } +sov-modules-api = { path = "../../sov-modules-api", version = "0.1", features = ["native"] } tempfile = { workspace = true } +sov-prover-incentives = { version = "*", features = ["native"], path = "." } [dependencies] anyhow = { workspace = true } -sov-bank = { path = "../sov-bank", version = "0.1", default-features = false } -sov-modules-api = { path = "../../sov-modules-api", version = "0.1", default-features = false, features = ["macros"] } -sov-state = { path = "../../sov-state", version = "0.1", default-features = false } +sov-bank = { path = "../sov-bank", version = "0.1" } +sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-state = { path = "../../sov-state", version = "0.1" } sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1" } schemars = { workspace = true, optional = true } -serde = { workspace = true } +serde = { workspace = true, optional = true } serde_json = { workspace = true, optional = true } borsh = { workspace = true, features = ["rc"] } bincode = { workspace = true } [features] -default = ["native"] -native = ["sov-modules-api/native", "dep:schemars", "dep:serde_json"] +default = [] +native = ["serde", "serde_json", "schemars", "sov-state/native", "sov-modules-api/native"] \ No newline at end of file diff --git a/module-system/module-implementations/sov-prover-incentives/src/call.rs b/module-system/module-implementations/sov-prover-incentives/src/call.rs index 55068c0a1..2f194dc87 100644 --- a/module-system/module-implementations/sov-prover-incentives/src/call.rs +++ b/module-system/module-implementations/sov-prover-incentives/src/call.rs @@ -15,8 +15,11 @@ use crate::ProverIncentives; // TODO: allow call messages to borrow data // https://github.com/Sovereign-Labs/sovereign-sdk/issues/274 pub enum CallMessage { + /// Bonds the prover with provided bond. BondProver(u64), + /// Unbonds the prover. UnbondProver, + /// Verifies the provided proof (of format `Vec`) VerifyProof(Vec), } diff --git a/module-system/module-implementations/sov-prover-incentives/src/genesis.rs b/module-system/module-implementations/sov-prover-incentives/src/genesis.rs index c83af2606..bbb1734d4 100644 --- a/module-system/module-implementations/sov-prover-incentives/src/genesis.rs +++ b/module-system/module-implementations/sov-prover-incentives/src/genesis.rs @@ -5,6 +5,9 @@ use sov_state::WorkingSet; use crate::ProverIncentives; impl ProverIncentives { + /// Init the [`ProverIncentives`] module using the provided `config`. + /// Sets the minimum amount necessary to bond, the commitment to the verifier circuit + /// the bonding token address and builds the set of initial provers. pub(crate) fn init_module( &self, config: &::Config, diff --git a/module-system/module-implementations/sov-prover-incentives/src/lib.rs b/module-system/module-implementations/sov-prover-incentives/src/lib.rs index d1898f9f3..598b65240 100644 --- a/module-system/module-implementations/sov-prover-incentives/src/lib.rs +++ b/module-system/module-implementations/sov-prover-incentives/src/lib.rs @@ -1,3 +1,5 @@ +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] mod call; mod genesis; @@ -7,14 +9,19 @@ mod tests; #[cfg(feature = "native")] mod query; -use borsh::{BorshDeserialize, BorshSerialize}; +/// The call methods specified in this module pub use call::CallMessage; +/// The response type used by RPC queries. #[cfg(feature = "native")] pub use query::Response; use sov_modules_api::{Context, Error, ModuleInfo}; -use sov_rollup_interface::zk::Zkvm; +use sov_rollup_interface::zk::{StoredCodeCommitment, Zkvm}; use sov_state::WorkingSet; +/// Configuration of the prover incentives module. Specifies the +/// address of the bonding token, the minimum bond, the commitment to +/// the allowed verifier method and a set of initial provers with their +/// bonding amount. pub struct ProverIncentivesConfig { /// The address of the token to be used for bonding. bonding_token_address: C::Address, @@ -26,28 +33,6 @@ pub struct ProverIncentivesConfig { initial_provers: Vec<(C::Address, u64)>, } -/// A wrapper around a code commitment which implements borsh -#[derive(Clone, Debug)] -pub struct StoredCodeCommitment { - commitment: Vm::CodeCommitment, -} - -impl BorshSerialize for StoredCodeCommitment { - fn serialize(&self, writer: &mut W) -> std::io::Result<()> { - bincode::serialize_into(writer, &self.commitment) - .expect("Serialization to vec is infallible"); - Ok(()) - } -} - -impl BorshDeserialize for StoredCodeCommitment { - fn deserialize_reader(reader: &mut R) -> std::io::Result { - let commitment: Vm::CodeCommitment = bincode::deserialize_from(reader) - .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; - Ok(Self { commitment }) - } -} - /// A new module: /// - Must derive `ModuleInfo` /// - Must contain `[address]` field diff --git a/module-system/module-implementations/sov-prover-incentives/src/query.rs b/module-system/module-implementations/sov-prover-incentives/src/query.rs index e8ab02ba8..ceaf23f0a 100644 --- a/module-system/module-implementations/sov-prover-incentives/src/query.rs +++ b/module-system/module-implementations/sov-prover-incentives/src/query.rs @@ -4,13 +4,16 @@ use sov_state::WorkingSet; use super::ProverIncentives; +/// The structure containing the response returned by the `get_bond_amount` query. #[derive(Serialize, Deserialize, Debug, Eq, PartialEq)] pub struct Response { + /// The bond value stored as a `u64`. pub value: u64, } impl ProverIncentives { - /// Queries the state of the module. + /// Queries the state of the module and returns the bond amount of the address `address`. + /// If the `address` is not bonded, returns a default value. pub fn get_bond_amount( &self, address: C::Address, diff --git a/module-system/module-implementations/sov-prover-incentives/src/tests.rs b/module-system/module-implementations/sov-prover-incentives/src/tests.rs index 8e68cf74f..9996fb669 100644 --- a/module-system/module-implementations/sov-prover-incentives/src/tests.rs +++ b/module-system/module-implementations/sov-prover-incentives/src/tests.rs @@ -11,6 +11,7 @@ type C = DefaultContext; const BOND_AMOUNT: u64 = 1000; const MOCK_CODE_COMMITMENT: MockCodeCommitment = MockCodeCommitment([0u8; 32]); +/// Generates an address by hashing the provided `key`. pub fn generate_address(key: &str) -> ::Address { let hash: [u8; 32] = ::Hasher::digest(key.as_bytes()).into(); Address::from(hash) @@ -21,8 +22,8 @@ fn create_bank_config() -> (sov_bank::BankConfig, ::Address) { let token_config = sov_bank::TokenConfig { token_name: "InitialToken".to_owned(), - address_and_balances: vec![(prover_address.clone(), BOND_AMOUNT * 5)], - authorized_minters: vec![prover_address.clone()], + address_and_balances: vec![(prover_address, BOND_AMOUNT * 5)], + authorized_minters: vec![prover_address], salt: 2, }; @@ -54,7 +55,7 @@ fn setup( bonding_token_address: token_address, minimum_bond: BOND_AMOUNT, commitment_of_allowed_verifier_method: MockCodeCommitment([0u8; 32]), - initial_provers: vec![(prover_address.clone(), BOND_AMOUNT)], + initial_provers: vec![(prover_address, BOND_AMOUNT)], }; module @@ -72,7 +73,7 @@ fn test_burn_on_invalid_proof() { // Assert that the prover has the correct bond amount before processing the proof assert_eq!( module - .get_bond_amount(prover_address.clone(), &mut working_set) + .get_bond_amount(prover_address, &mut working_set) .value, BOND_AMOUNT ); @@ -80,7 +81,7 @@ fn test_burn_on_invalid_proof() { // Process an invalid proof { let context = DefaultContext { - sender: prover_address.clone(), + sender: prover_address, }; let proof = MockProof { program_id: MOCK_CODE_COMMITMENT, @@ -110,7 +111,7 @@ fn test_valid_proof() { // Assert that the prover has the correct bond amount before processing the proof assert_eq!( module - .get_bond_amount(prover_address.clone(), &mut working_set) + .get_bond_amount(prover_address, &mut working_set) .value, BOND_AMOUNT ); @@ -118,7 +119,7 @@ fn test_valid_proof() { // Process a valid proof { let context = DefaultContext { - sender: prover_address.clone(), + sender: prover_address, }; let proof = MockProof { program_id: MOCK_CODE_COMMITMENT, @@ -145,7 +146,7 @@ fn test_unbonding() { let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); let (module, prover_address) = setup(&mut working_set); let context = DefaultContext { - sender: prover_address.clone(), + sender: prover_address, }; let token_address = module .bonding_token_address @@ -155,7 +156,7 @@ fn test_unbonding() { // Assert that the prover has bonded tokens assert_eq!( module - .get_bond_amount(prover_address.clone(), &mut working_set) + .get_bond_amount(prover_address, &mut working_set) .value, BOND_AMOUNT ); @@ -164,11 +165,7 @@ fn test_unbonding() { let initial_unlocked_balance = { module .bank - .get_balance_of( - prover_address.clone(), - token_address.clone(), - &mut working_set, - ) + .get_balance_of(prover_address, token_address, &mut working_set) .unwrap_or_default() }; @@ -180,7 +177,7 @@ fn test_unbonding() { // Assert that the prover no longer has bonded tokens assert_eq!( module - .get_bond_amount(prover_address.clone(), &mut working_set) + .get_bond_amount(prover_address, &mut working_set) .value, 0 ); @@ -202,7 +199,7 @@ fn test_prover_not_bonded() { let mut working_set = WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); let (module, prover_address) = setup(&mut working_set); let context = DefaultContext { - sender: prover_address.clone(), + sender: prover_address, }; // Unbond the prover diff --git a/module-system/module-implementations/sov-sequencer-registry/Cargo.toml b/module-system/module-implementations/sov-sequencer-registry/Cargo.toml index b0e111f4e..bc143bf1f 100644 --- a/module-system/module-implementations/sov-sequencer-registry/Cargo.toml +++ b/module-system/module-implementations/sov-sequencer-registry/Cargo.toml @@ -13,24 +13,38 @@ readme = "README.md" resolver = "2" [dev-dependencies] -sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-sequencer-registry = { path = ".", features = ["native"] } sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1", features = ["mocks"] } tempfile = { workspace = true } [dependencies] anyhow = { workspace = true } clap = { workspace = true, optional = true } -sov-bank = { path = "../sov-bank", version = "0.1", default-features = false } -sov-modules-api = { path = "../../sov-modules-api", version = "0.1", default-features = false, features = ["macros"] } -sov-state = { path = "../../sov-state", version = "0.1", default-features = false } +sov-bank = { path = "../sov-bank", version = "0.1" } +sov-modules-api = { path = "../../sov-modules-api", version = "0.1" } +sov-state = { path = "../../sov-state", version = "0.1" } sov-rollup-interface = { path = "../../../rollup-interface", version = "0.1" } schemars = { workspace = true, optional = true } serde = { workspace = true, optional = true } serde_json = { workspace = true, optional = true } borsh = { workspace = true, features = ["rc"] } jsonrpsee = { workspace = true, features = ["macros", "client-core", "server"], optional = true } +zk-cycle-macros = { path = "../../../utils/zk-cycle-macros", optional = true } +risc0-zkvm = { version = "0.16", default-features = false, features = ["std"], optional = true } +risc0-zkvm-platform = { version = "0.16", optional = true } +zk-cycle-utils = { path = "../../../utils/zk-cycle-utils", optional = true } [features] -default = ["native"] -serde = ["dep:serde", "dep:serde_json"] -native = ["serde", "sov-modules-api/native", "sov-state/native", "sov-bank/native", "dep:jsonrpsee", "dep:schemars", "dep:clap"] +bench = ["zk-cycle-macros/bench", "risc0-zkvm", "risc0-zkvm-platform", "zk-cycle-utils"] +default = [] +native = [ + "serde", + "serde_json", + "jsonrpsee", + "schemars", + "clap", + "sov-state/native", + "sov-modules-api/native", + "sov-bank/native", +] + diff --git a/module-system/module-implementations/sov-sequencer-registry/README.md b/module-system/module-implementations/sov-sequencer-registry/README.md index 9ead6f07d..16ebbdee5 100644 --- a/module-system/module-implementations/sov-sequencer-registry/README.md +++ b/module-system/module-implementations/sov-sequencer-registry/README.md @@ -1,9 +1,3 @@ # `sov-sequencer-registry` module The `sov-sequencer-registry` module is responsible for sequencer registration, slashing, and rewards. At the moment, only a centralized sequencer is supported. The sequencer's address and bond are registered during the rollup deployment. - -### The `sov-sequencer-registry` module offers the following functionality: - -Hooks: - -The `sov-sequencer-registry` module does not expose any call messages, and rollup users cannot directly modify the state of the sequencer. Instead, the module implements `ApplyBlobHooks` trait. diff --git a/module-system/module-implementations/sov-sequencer-registry/src/call.rs b/module-system/module-implementations/sov-sequencer-registry/src/call.rs index dea63bd01..29954b44d 100644 --- a/module-system/module-implementations/sov-sequencer-registry/src/call.rs +++ b/module-system/module-implementations/sov-sequencer-registry/src/call.rs @@ -4,21 +4,29 @@ use sov_modules_api::macros::CliWalletArg; use sov_modules_api::CallResponse; use sov_state::WorkingSet; -use crate::SequencerRegistry; +use crate::{DaAddress, SequencerRegistry}; -/// This enumeration represents the available call messages for interacting with the sov-sequencer-registry. +/// This enumeration represents the available call messages for interacting with +/// the `sov-sequencer-registry` module. #[cfg_attr( feature = "native", derive(serde::Serialize), derive(serde::Deserialize), - derive(CliWalletArg), - derive(schemars::JsonSchema) + derive(schemars::JsonSchema), + derive(CliWalletArg) )] #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] -// TODO: Replace with DA address generic, when AddressTrait is split pub enum CallMessage { - Register { da_address: Vec }, - Exit { da_address: Vec }, + /// Add a new sequencer to the sequencer registry. + Register { + /// The DA address of the sequencer you're registering. + da_address: DaAddress, + }, + /// Remove a sequencer from the sequencer registry. + Exit { + /// The DA address of the sequencer you're removing. + da_address: DaAddress, + }, } impl SequencerRegistry { diff --git a/module-system/module-implementations/sov-sequencer-registry/src/genesis.rs b/module-system/module-implementations/sov-sequencer-registry/src/genesis.rs index 2e4a9a2de..ec58c8e48 100644 --- a/module-system/module-implementations/sov-sequencer-registry/src/genesis.rs +++ b/module-system/module-implementations/sov-sequencer-registry/src/genesis.rs @@ -15,12 +15,9 @@ impl SequencerRegistry { &config.seq_rollup_address, working_set, )?; - if let Some(preferred_sequencer) = &config.preferred_sequencer { - if &config.seq_da_address != preferred_sequencer { - anyhow::bail!("Preferred sequencer is not in list of allowed sequencers"); - } + if config.is_preferred_sequencer { self.preferred_sequencer - .set(preferred_sequencer, working_set); + .set(&config.seq_da_address, working_set); } Ok(()) diff --git a/module-system/module-implementations/sov-sequencer-registry/src/hooks.rs b/module-system/module-implementations/sov-sequencer-registry/src/hooks.rs index 0a14fe146..cef202eae 100644 --- a/module-system/module-implementations/sov-sequencer-registry/src/hooks.rs +++ b/module-system/module-implementations/sov-sequencer-registry/src/hooks.rs @@ -2,22 +2,30 @@ use sov_modules_api::hooks::ApplyBlobHooks; use sov_modules_api::Context; use sov_rollup_interface::da::BlobReaderTrait; use sov_state::WorkingSet; +#[cfg(all(target_os = "zkvm", feature = "bench"))] +use zk_cycle_macros::cycle_tracker; +#[cfg(all(target_os = "zkvm", feature = "bench"))] +use zk_cycle_utils::print_cycle_count; use crate::{SequencerOutcome, SequencerRegistry}; -impl ApplyBlobHooks for SequencerRegistry { +impl ApplyBlobHooks for SequencerRegistry { type Context = C; type BlobResult = SequencerOutcome; + #[cfg_attr(all(target_os = "zkvm", feature = "bench"), cycle_tracker)] fn begin_blob_hook( &self, - blob: &mut impl BlobReaderTrait, + blob: &mut B, working_set: &mut WorkingSet<::Storage>, ) -> anyhow::Result<()> { - // Clone to satisfy StateMap API - // TODO: can be fixed after https://github.com/Sovereign-Labs/sovereign-sdk/issues/427 - let sender = blob.sender().as_ref().to_vec(); - self.allowed_sequencers.get_or_err(&sender, working_set)?; + #[cfg(all(target_os = "zkvm", feature = "bench"))] + print_cycle_count(); + if !self.is_sender_allowed(&blob.sender(), working_set) { + anyhow::bail!("sender {} is not allowed to submit blobs", blob.sender()); + } + #[cfg(all(target_os = "zkvm", feature = "bench"))] + print_cycle_count(); Ok(()) } diff --git a/module-system/module-implementations/sov-sequencer-registry/src/lib.rs b/module-system/module-implementations/sov-sequencer-registry/src/lib.rs index eeda0eb77..0321bec39 100644 --- a/module-system/module-implementations/sov-sequencer-registry/src/lib.rs +++ b/module-system/module-implementations/sov-sequencer-registry/src/lib.rs @@ -1,31 +1,69 @@ +//! The `sov-sequencer-registry` module is responsible for sequencer +//! registration, slashing, and rewards. At the moment, only a centralized +//! sequencer is supported. The sequencer's address and bond are registered +//! during the rollup deployment. +//! +//! The module implements the [`sov_modules_api::hooks::ApplyBlobHooks`] trait. + +#![deny(missing_docs)] + mod call; mod genesis; mod hooks; #[cfg(feature = "native")] -mod query; +pub mod query; pub use call::CallMessage; #[cfg(feature = "native")] -pub use query::{SequencerAddressResponse, SequencerRegistryRpcImpl, SequencerRegistryRpcServer}; +pub use query::{ + SequencerAddressResponse, SequencerRegistryRpcClient, SequencerRegistryRpcImpl, + SequencerRegistryRpcServer, +}; use sov_modules_api::{CallResponse, Error, ModuleInfo, Spec}; use sov_state::{StateMap, StateValue, WorkingSet}; -/// Initial configuration for the sov_sequencer_registry module. -/// TODO: Should we allow multiple sequencers in genesis? +/// A type alias for DA addresses. +/// +/// TODO: All usages of this type ought to be replaced with a DA address generic, +/// once is fixed. +type DaAddress = Vec; + +/// Genesis configuration for the [`SequencerRegistry`] module. +/// +/// This `struct` must be passed as an argument to +/// [`Module::genesis`](sov_modules_api::Module::genesis). +/// +// TODO: Should we allow multiple sequencers in genesis? pub struct SequencerConfig { + /// The rollup address of the sequencer. pub seq_rollup_address: C::Address, - // TODO: Replace with DA address generic, when AddressTrait is split - pub seq_da_address: Vec, + /// The Data Availability (DA) address of the sequencer. + pub seq_da_address: DaAddress, + /// Coins that will be slashed if the sequencer is malicious. + /// + /// The coins will be transferred from + /// [`SequencerConfig::seq_rollup_address`] to this module's address + /// ([`ModuleInfo::address`]) and locked away until the sequencer + /// decides to exit (unregister). + /// + /// Only sequencers in the [`SequencerRegistry::allowed_sequencers`] list are + /// allowed to exit. pub coins_to_lock: sov_bank::Coins, - // TODO: Replace with DA address generic, when AddressTrait is split - pub preferred_sequencer: Option>, + /// Determines whether this sequencer is *regular* or *preferred*. + /// + /// Batches from the preferred sequencer are always processed first in + /// block, which means the preferred sequencer can guarantee soft + /// confirmation time for transactions. + pub is_preferred_sequencer: bool, } +/// The `sov-sequencer-registry` module `struct`. #[cfg_attr(feature = "native", derive(sov_modules_api::ModuleCallJsonSchema))] -#[derive(ModuleInfo)] +#[derive(Clone, ModuleInfo)] pub struct SequencerRegistry { - /// The address of the sov_sequencer_registry module - /// Note: this is address is generated by the module framework and the corresponding private key is unknown. + /// The address of the `sov_sequencer_registry` module. + /// Note: this is address is generated by the module framework and the + /// corresponding private key is unknown. #[address] pub(crate) address: C::Address, @@ -33,29 +71,38 @@ pub struct SequencerRegistry { #[module] pub(crate) bank: sov_bank::Bank, - /// Only batches from sequencers from this list are going to be processed - /// + /// Only batches from sequencers from this list are going to be processed. #[state] - pub(crate) allowed_sequencers: StateMap, C::Address>, + pub(crate) allowed_sequencers: StateMap, - /// Optional preferred sequencer + /// Optional preferred sequencer. /// If set, batches from this sequencer will be processed first in block, /// So this sequencer can guarantee soft confirmation time for transactions #[state] - pub(crate) preferred_sequencer: StateValue>, + pub(crate) preferred_sequencer: StateValue, /// Coin's that will be slashed if the sequencer is malicious. - /// The coins will be transferred from `self.seq_rollup_address` to `self.address` - /// and locked forever, until sequencer decides to exit - /// Only sequencers in `allowed_sequencers` list are allowed to exit. + /// The coins will be transferred from + /// [`SequencerConfig::seq_rollup_address`] to + /// [`SequencerRegistry::address`] and locked forever, until sequencer + /// decides to exit (unregister). + /// + /// Only sequencers in the [`SequencerRegistry::allowed_sequencers`] list are + /// allowed to exit. #[state] pub(crate) coins_to_lock: StateValue>, } -/// Result of applying blob, from sequencer point of view. +/// Result of applying a blob, from sequencer's point of view. pub enum SequencerOutcome { + /// The blob was applied successfully and the operation is concluded. Completed, - Slashed { sequencer: Vec }, + /// The blob was *not* applied successfully. The sequencer has been slashed + /// as a result of the invalid blob. + Slashed { + /// The address of the sequencer that was slashed. + sequencer: DaAddress, + }, } impl sov_modules_api::Module for SequencerRegistry { @@ -63,7 +110,7 @@ impl sov_modules_api::Module for SequencerRegistry< type Config = SequencerConfig; - type CallMessage = call::CallMessage; + type CallMessage = CallMessage; fn genesis( &self, @@ -80,17 +127,16 @@ impl sov_modules_api::Module for SequencerRegistry< working_set: &mut WorkingSet<::Storage>, ) -> Result { Ok(match message { - call::CallMessage::Register { da_address } => { + CallMessage::Register { da_address } => { self.register(da_address, context, working_set)? } - call::CallMessage::Exit { da_address } => { - self.exit(da_address, context, working_set)? - } + CallMessage::Exit { da_address } => self.exit(da_address, context, working_set)?, }) } } impl SequencerRegistry { + /// Returns the configured amount of [`Coins`](sov_bank::Coins) to lock. pub fn get_coins_to_lock( &self, working_set: &mut WorkingSet, @@ -100,7 +146,7 @@ impl SequencerRegistry { pub(crate) fn register_sequencer( &self, - da_address: Vec, + da_address: DaAddress, rollup_address: &C::Address, working_set: &mut WorkingSet, ) -> anyhow::Result<()> { @@ -122,12 +168,27 @@ impl SequencerRegistry { Ok(()) } - /// Return preferred sequencer if it was set - /// TODO: Replace with DA address generic, when AddressTrait is split + /// Returns the preferred sequencer, or [`None`] it wasn't set. + /// + /// Read about [`SequencerConfig::is_preferred_sequencer`] to learn about + /// preferred sequencers. pub fn get_preferred_sequencer( &self, working_set: &mut WorkingSet, - ) -> Option> { + ) -> Option { self.preferred_sequencer.get(working_set) } + + /// Checks whether `sender` is a registered sequencer. + pub fn is_sender_allowed( + &self, + sender: &T, + working_set: &mut WorkingSet, + ) -> bool { + // Clone to satisfy StateMap API + // TODO: can be fixed after https://github.com/Sovereign-Labs/sovereign-sdk/issues/427 + self.allowed_sequencers + .get(&sender.as_ref().to_vec(), working_set) + .is_some() + } } diff --git a/module-system/module-implementations/sov-sequencer-registry/src/query.rs b/module-system/module-implementations/sov-sequencer-registry/src/query.rs index 903574037..c316d45a6 100644 --- a/module-system/module-implementations/sov-sequencer-registry/src/query.rs +++ b/module-system/module-implementations/sov-sequencer-registry/src/query.rs @@ -1,27 +1,31 @@ +//! Defines rpc queries exposed by the sequencer registry module, along with the relevant types use jsonrpsee::core::RpcResult; use sov_modules_api::macros::rpc_gen; use sov_modules_api::Context; use sov_state::WorkingSet; -use crate::SequencerRegistry; +use crate::{DaAddress, SequencerRegistry}; +/// The response type to the `getSequencerDddress` RPC method. #[cfg_attr( feature = "native", derive(serde::Deserialize, serde::Serialize, Clone) )] #[derive(Debug, Eq, PartialEq)] pub struct SequencerAddressResponse { + /// The rollup address of the requested sequencer. pub address: Option, } #[rpc_gen(client, server, namespace = "sequencer")] impl SequencerRegistry { - /// Returns sequencer rollup address for given DA address - /// Contains any data only if sequencer is allowed to produce batches + /// Returns the rollup address of the sequencer with the given DA address. + /// + /// The response only contains data if the sequencer is registered. #[rpc_method(name = "getSequencerAddress")] pub fn sequencer_address( &self, - da_address: Vec, + da_address: DaAddress, working_set: &mut WorkingSet, ) -> RpcResult> { Ok(SequencerAddressResponse { diff --git a/module-system/module-implementations/sov-sequencer-registry/tests/helpers/mod.rs b/module-system/module-implementations/sov-sequencer-registry/tests/helpers/mod.rs index 9072f9ace..5d0fbf6ab 100644 --- a/module-system/module-implementations/sov-sequencer-registry/tests/helpers/mod.rs +++ b/module-system/module-implementations/sov-sequencer-registry/tests/helpers/mod.rs @@ -40,10 +40,10 @@ impl TestSequencer { pub fn query_balance_via_bank( &mut self, working_set: &mut WorkingSet<::Storage>, - ) -> RpcResult { + ) -> RpcResult { self.bank.balance_of( - self.sequencer_config.seq_rollup_address.clone(), - self.sequencer_config.coins_to_lock.token_address.clone(), + self.sequencer_config.seq_rollup_address, + self.sequencer_config.coins_to_lock.token_address, working_set, ) } @@ -53,10 +53,10 @@ impl TestSequencer { &mut self, user_address: ::Address, working_set: &mut WorkingSet<::Storage>, - ) -> RpcResult { + ) -> RpcResult { self.bank.balance_of( user_address, - self.sequencer_config.coins_to_lock.token_address.clone(), + self.sequencer_config.coins_to_lock.token_address, working_set, ) } @@ -68,7 +68,7 @@ pub fn create_bank_config() -> (sov_bank::BankConfig, ::Address) { let token_config = sov_bank::TokenConfig { token_name: "InitialToken".to_owned(), address_and_balances: vec![ - (seq_address.clone(), INITIAL_BALANCE), + (seq_address, INITIAL_BALANCE), (generate_address(ANOTHER_SEQUENCER_KEY), INITIAL_BALANCE), (generate_address(UNKNOWN_SEQUENCER_KEY), INITIAL_BALANCE), (generate_address(LOW_FUND_KEY), 3), @@ -96,7 +96,7 @@ pub fn create_sequencer_config( amount: LOCKED_AMOUNT, token_address, }, - preferred_sequencer: None, + is_preferred_sequencer: false, } } diff --git a/module-system/module-implementations/sov-sequencer-registry/tests/hooks_test.rs b/module-system/module-implementations/sov-sequencer-registry/tests/hooks_test.rs index ec9dc469d..a47d2fc0f 100644 --- a/module-system/module-implementations/sov-sequencer-registry/tests/hooks_test.rs +++ b/module-system/module-implementations/sov-sequencer-registry/tests/hooks_test.rs @@ -4,8 +4,7 @@ use sov_state::{ProverStorage, WorkingSet}; mod helpers; use helpers::*; -use sov_modules_api::Address; -use sov_rollup_interface::mocks::TestBlob; +use sov_rollup_interface::mocks::{MockAddress, MockBlob}; use sov_sequencer_registry::{SequencerOutcome, SequencerRegistry}; #[test] @@ -21,9 +20,9 @@ fn begin_blob_hook_known_sequencer() { }; assert_eq!(INITIAL_BALANCE - LOCKED_AMOUNT, balance_after_genesis); - let mut test_blob = TestBlob::new( + let mut test_blob = MockBlob::new( Vec::new(), - Address::from(GENESIS_SEQUENCER_DA_ADDRESS), + MockAddress::from(GENESIS_SEQUENCER_DA_ADDRESS), [0_u8; 32], ); @@ -48,9 +47,9 @@ fn begin_blob_hook_unknown_sequencer() { let working_set = &mut WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); test_sequencer.genesis(working_set); - let mut test_blob = TestBlob::new( + let mut test_blob = MockBlob::new( Vec::new(), - Address::from(UNKNOWN_SEQUENCER_DA_ADDRESS), + MockAddress::from(UNKNOWN_SEQUENCER_DA_ADDRESS), [0_u8; 32], ); @@ -58,9 +57,12 @@ fn begin_blob_hook_unknown_sequencer() { .registry .begin_blob_hook(&mut test_blob, working_set); assert!(result.is_err()); - let expected_message_part = "Value not found for prefix: \"sov_sequencer_registry/SequencerRegistry/allowed_sequencers/\""; + let expected_message = format!( + "sender {} is not allowed to submit blobs", + MockAddress::from(UNKNOWN_SEQUENCER_DA_ADDRESS) + ); let actual_message = result.err().unwrap().to_string(); - assert!(actual_message.contains(expected_message_part)); + assert_eq!(expected_message, actual_message); } #[test] @@ -75,9 +77,9 @@ fn end_blob_hook_success() { }; assert_eq!(INITIAL_BALANCE - LOCKED_AMOUNT, balance_after_genesis); - let mut test_blob = TestBlob::new( + let mut test_blob = MockBlob::new( Vec::new(), - Address::from(GENESIS_SEQUENCER_DA_ADDRESS), + MockAddress::from(GENESIS_SEQUENCER_DA_ADDRESS), [0_u8; 32], ); @@ -86,10 +88,12 @@ fn end_blob_hook_success() { .begin_blob_hook(&mut test_blob, working_set) .unwrap(); - test_sequencer - .registry - .end_blob_hook(SequencerOutcome::Completed, working_set) - .unwrap(); + as ApplyBlobHooks>::end_blob_hook( + &test_sequencer.registry, + SequencerOutcome::Completed, + working_set, + ) + .unwrap(); let resp = test_sequencer.query_balance_via_bank(working_set).unwrap(); assert_eq!(balance_after_genesis, resp.amount.unwrap()); let resp = test_sequencer @@ -111,9 +115,9 @@ fn end_blob_hook_slash() { }; assert_eq!(INITIAL_BALANCE - LOCKED_AMOUNT, balance_after_genesis); - let mut test_blob = TestBlob::new( + let mut test_blob = MockBlob::new( Vec::new(), - Address::from(GENESIS_SEQUENCER_DA_ADDRESS), + MockAddress::from(GENESIS_SEQUENCER_DA_ADDRESS), [0_u8; 32], ); @@ -125,10 +129,12 @@ fn end_blob_hook_slash() { let result = SequencerOutcome::Slashed { sequencer: GENESIS_SEQUENCER_DA_ADDRESS.to_vec(), }; - test_sequencer - .registry - .end_blob_hook(result, working_set) - .unwrap(); + as ApplyBlobHooks>::end_blob_hook( + &test_sequencer.registry, + result, + working_set, + ) + .unwrap(); let resp = test_sequencer.query_balance_via_bank(working_set).unwrap(); assert_eq!(balance_after_genesis, resp.amount.unwrap()); @@ -152,7 +158,7 @@ fn end_blob_hook_slash_preferred_sequencer() { let registry = SequencerRegistry::::default(); let mut sequencer_config = create_sequencer_config(seq_rollup_address, token_address); - sequencer_config.preferred_sequencer = Some(sequencer_config.seq_da_address.clone()); + sequencer_config.is_preferred_sequencer = true; let mut test_sequencer = TestSequencer { bank, @@ -170,9 +176,9 @@ fn end_blob_hook_slash_preferred_sequencer() { }; assert_eq!(INITIAL_BALANCE - LOCKED_AMOUNT, balance_after_genesis); - let mut test_blob = TestBlob::new( + let mut test_blob = MockBlob::new( Vec::new(), - Address::from(GENESIS_SEQUENCER_DA_ADDRESS), + MockAddress::from(GENESIS_SEQUENCER_DA_ADDRESS), [0_u8; 32], ); @@ -184,10 +190,12 @@ fn end_blob_hook_slash_preferred_sequencer() { let result = SequencerOutcome::Slashed { sequencer: GENESIS_SEQUENCER_DA_ADDRESS.to_vec(), }; - test_sequencer - .registry - .end_blob_hook(result, working_set) - .unwrap(); + as ApplyBlobHooks>::end_blob_hook( + &test_sequencer.registry, + result, + working_set, + ) + .unwrap(); let resp = test_sequencer.query_balance_via_bank(working_set).unwrap(); assert_eq!(balance_after_genesis, resp.amount.unwrap()); @@ -210,9 +218,9 @@ fn end_blob_hook_slash_unknown_sequencer() { let working_set = &mut WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); test_sequencer.genesis(working_set); - let mut test_blob = TestBlob::new( + let mut test_blob = MockBlob::new( Vec::new(), - Address::from(GENESIS_SEQUENCER_DA_ADDRESS), + MockAddress::from(GENESIS_SEQUENCER_DA_ADDRESS), [0_u8; 32], ); @@ -230,10 +238,12 @@ fn end_blob_hook_slash_unknown_sequencer() { let result = SequencerOutcome::Slashed { sequencer: UNKNOWN_SEQUENCER_DA_ADDRESS.to_vec(), }; - test_sequencer - .registry - .end_blob_hook(result, working_set) - .unwrap(); + as ApplyBlobHooks>::end_blob_hook( + &test_sequencer.registry, + result, + working_set, + ) + .unwrap(); let resp = test_sequencer .registry diff --git a/module-system/module-implementations/sov-sequencer-registry/tests/sequencer_registry_test.rs b/module-system/module-implementations/sov-sequencer-registry/tests/sequencer_registry_test.rs index 15662d088..c914da1d2 100644 --- a/module-system/module-implementations/sov-sequencer-registry/tests/sequencer_registry_test.rs +++ b/module-system/module-implementations/sov-sequencer-registry/tests/sequencer_registry_test.rs @@ -34,10 +34,10 @@ fn test_registration_lifecycle() { let da_address = ANOTHER_SEQUENCER_DA_ADDRESS.to_vec(); let sequencer_address = generate_address(ANOTHER_SEQUENCER_KEY); - let sender_context = C::new(sequencer_address.clone()); + let sender_context = C::new(sequencer_address); let balance_before = test_sequencer - .query_balance(sequencer_address.clone(), working_set) + .query_balance(sequencer_address, working_set) .unwrap() .amount .unwrap(); @@ -57,7 +57,7 @@ fn test_registration_lifecycle() { .expect("Sequencer registration has failed"); let balance_after_registration = test_sequencer - .query_balance(sequencer_address.clone(), working_set) + .query_balance(sequencer_address, working_set) .unwrap() .amount .unwrap(); @@ -68,7 +68,7 @@ fn test_registration_lifecycle() { .sequencer_address(da_address.clone(), working_set) .unwrap(); assert_eq!( - Some(sequencer_address.clone()), + Some(sequencer_address), registry_response_after_registration.address ); @@ -104,7 +104,7 @@ fn test_registration_not_enough_funds() { let da_address = ANOTHER_SEQUENCER_DA_ADDRESS.to_vec(); let sequencer_address = generate_address(LOW_FUND_KEY); - let sender_context = C::new(sequencer_address.clone()); + let sender_context = C::new(sequencer_address); let register_message = CallMessage::Register { da_address }; let response = test_sequencer @@ -155,7 +155,7 @@ fn test_registration_second_time() { let da_address = GENESIS_SEQUENCER_DA_ADDRESS.to_vec(); let sequencer_address = generate_address(GENESIS_SEQUENCER_KEY); - let sender_context = C::new(sequencer_address.clone()); + let sender_context = C::new(sequencer_address); let register_message = CallMessage::Register { da_address }; let response = test_sequencer @@ -236,7 +236,7 @@ fn test_preferred_sequencer_returned_and_removed() { let registry = SequencerRegistry::::default(); let mut sequencer_config = create_sequencer_config(seq_rollup_address, token_address); - sequencer_config.preferred_sequencer = Some(sequencer_config.seq_da_address.clone()); + sequencer_config.is_preferred_sequencer = true; let mut test_sequencer = TestSequencer { bank, @@ -250,7 +250,7 @@ fn test_preferred_sequencer_returned_and_removed() { test_sequencer.genesis(working_set); assert_eq!( - test_sequencer.sequencer_config.preferred_sequencer, + Some(test_sequencer.sequencer_config.seq_da_address), test_sequencer.registry.get_preferred_sequencer(working_set) ); @@ -270,34 +270,3 @@ fn test_preferred_sequencer_returned_and_removed() { .get_preferred_sequencer(working_set) .is_none()); } - -#[test] -fn test_preferred_sequencer_not_allowed_sequencers() { - let bank = sov_bank::Bank::::default(); - let (bank_config, seq_rollup_address) = create_bank_config(); - - let token_address = sov_bank::get_genesis_token_address::( - &bank_config.tokens[0].token_name, - bank_config.tokens[0].salt, - ); - - let some_da_address = UNKNOWN_SEQUENCER_DA_ADDRESS.to_vec(); - - let registry = SequencerRegistry::::default(); - let mut sequencer_config = create_sequencer_config(seq_rollup_address, token_address); - - sequencer_config.preferred_sequencer = Some(some_da_address); - - let tmpdir = tempfile::tempdir().unwrap(); - let working_set = &mut WorkingSet::new(ProverStorage::with_path(tmpdir.path()).unwrap()); - - bank.genesis(&bank_config, working_set).unwrap(); - let genesis_result = registry.genesis(&sequencer_config, working_set); - assert!(genesis_result.is_err()); - - let message = genesis_result.err().unwrap().to_string(); - assert_eq!( - "Preferred sequencer is not in list of allowed sequencers", - message - ); -} diff --git a/module-system/module-schemas/Cargo.toml b/module-system/module-schemas/Cargo.toml index c73ad573b..b315a34a2 100644 --- a/module-system/module-schemas/Cargo.toml +++ b/module-system/module-schemas/Cargo.toml @@ -16,10 +16,9 @@ sov-modules-api = { path = "../sov-modules-api" } sov-rollup-interface = { path = "../../rollup-interface", features = ["mocks"] } # Modules -sov-accounts = { path = "../module-implementations/sov-accounts" } -sov-bank = { path = "../module-implementations/sov-bank" } -sov-prover-incentives = { path = "../module-implementations/sov-prover-incentives" } -sov-sequencer-registry = { path = "../module-implementations/sov-sequencer-registry" } +sov-accounts = { path = "../module-implementations/sov-accounts", features = ["native"] } +sov-bank = { path = "../module-implementations/sov-bank", features = ["native"] } +sov-prover-incentives = { path = "../module-implementations/sov-prover-incentives", features = ["native"] } +sov-sequencer-registry = { path = "../module-implementations/sov-sequencer-registry", features = ["native"] } sov-evm = { path = "../module-implementations/sov-evm", features = ["experimental"] } -sov-election = { path = "../module-implementations/examples/sov-election" } -sov-value-setter = { path = "../module-implementations/examples/sov-value-setter" } +sov-value-setter = { path = "../module-implementations/examples/sov-value-setter", features = ["native"] } diff --git a/module-system/module-schemas/build.rs b/module-system/module-schemas/build.rs index b8bbef026..5fb771f9a 100644 --- a/module-system/module-schemas/build.rs +++ b/module-system/module-schemas/build.rs @@ -9,7 +9,6 @@ fn main() -> io::Result<()> { store_json_schema::>("sov-bank.json")?; store_json_schema::>("sov-accounts.json")?; store_json_schema::>("sov-evm.json")?; - store_json_schema::>("sov-election.json")?; store_json_schema::>("sov-value-setter.json")?; store_json_schema::>( "sov-prover-incentives.json", diff --git a/module-system/module-schemas/schemas/sov-bank.json b/module-system/module-schemas/schemas/sov-bank.json index 2579415b4..57b32c95c 100644 --- a/module-system/module-schemas/schemas/sov-bank.json +++ b/module-system/module-schemas/schemas/sov-bank.json @@ -153,7 +153,7 @@ "additionalProperties": false }, { - "description": "Freeze a token so that the supply is frozen", + "description": "Freezes a token so that the supply is frozen", "type": "object", "required": [ "Freeze" @@ -199,6 +199,7 @@ } }, "Coins": { + "description": "Structure that stores information specifying a given `amount` (type [`Amount`]) of coins stored at a `token_address` (type [`Context::Address`]).", "type": "object", "required": [ "amount", @@ -206,12 +207,18 @@ ], "properties": { "amount": { + "description": "An `amount` of coins stored.", "type": "integer", "format": "uint64", "minimum": 0.0 }, "token_address": { - "$ref": "#/definitions/Address" + "description": "The address where the tokens are stored.", + "allOf": [ + { + "$ref": "#/definitions/Address" + } + ] } } } diff --git a/module-system/module-schemas/schemas/sov-evm.json b/module-system/module-schemas/schemas/sov-evm.json index 28f23f5e3..00b39d865 100644 --- a/module-system/module-schemas/schemas/sov-evm.json +++ b/module-system/module-schemas/schemas/sov-evm.json @@ -7,165 +7,25 @@ ], "properties": { "tx": { - "$ref": "#/definitions/EvmTransaction" + "$ref": "#/definitions/RawEvmTransaction" } }, "definitions": { - "AccessListItem": { + "RawEvmTransaction": { + "description": "Rlp encoded evm transaction.", "type": "object", "required": [ - "address", - "storage_keys" + "rlp" ], "properties": { - "address": { + "rlp": { + "description": "Rlp data.", "type": "array", "items": { "type": "integer", "format": "uint8", "minimum": 0.0 - }, - "maxItems": 20, - "minItems": 20 - }, - "storage_keys": { - "type": "array", - "items": { - "type": "array", - "items": { - "type": "integer", - "format": "uint8", - "minimum": 0.0 - }, - "maxItems": 32, - "minItems": 32 - } - } - } - }, - "EvmTransaction": { - "type": "object", - "required": [ - "access_lists", - "chain_id", - "data", - "gas_limit", - "gas_price", - "hash", - "max_fee_per_gas", - "max_priority_fee_per_gas", - "nonce", - "odd_y_parity", - "r", - "s", - "sender", - "value" - ], - "properties": { - "access_lists": { - "type": "array", - "items": { - "$ref": "#/definitions/AccessListItem" } - }, - "chain_id": { - "type": "integer", - "format": "uint64", - "minimum": 0.0 - }, - "data": { - "type": "array", - "items": { - "type": "integer", - "format": "uint8", - "minimum": 0.0 - } - }, - "gas_limit": { - "type": "integer", - "format": "uint64", - "minimum": 0.0 - }, - "gas_price": { - "type": "integer", - "format": "uint128", - "minimum": 0.0 - }, - "hash": { - "type": "array", - "items": { - "type": "integer", - "format": "uint8", - "minimum": 0.0 - }, - "maxItems": 32, - "minItems": 32 - }, - "max_fee_per_gas": { - "type": "integer", - "format": "uint128", - "minimum": 0.0 - }, - "max_priority_fee_per_gas": { - "type": "integer", - "format": "uint128", - "minimum": 0.0 - }, - "nonce": { - "type": "integer", - "format": "uint64", - "minimum": 0.0 - }, - "odd_y_parity": { - "type": "boolean" - }, - "r": { - "type": "array", - "items": { - "type": "integer", - "format": "uint8", - "minimum": 0.0 - }, - "maxItems": 32, - "minItems": 32 - }, - "s": { - "type": "array", - "items": { - "type": "integer", - "format": "uint8", - "minimum": 0.0 - }, - "maxItems": 32, - "minItems": 32 - }, - "sender": { - "type": "array", - "items": { - "type": "integer", - "format": "uint8", - "minimum": 0.0 - }, - "maxItems": 20, - "minItems": 20 - }, - "to": { - "type": [ - "array", - "null" - ], - "items": { - "type": "integer", - "format": "uint8", - "minimum": 0.0 - }, - "maxItems": 20, - "minItems": 20 - }, - "value": { - "type": "integer", - "format": "uint128", - "minimum": 0.0 } } } diff --git a/module-system/module-schemas/schemas/sov-prover-incentives.json b/module-system/module-schemas/schemas/sov-prover-incentives.json index c7d910862..38d6e504a 100644 --- a/module-system/module-schemas/schemas/sov-prover-incentives.json +++ b/module-system/module-schemas/schemas/sov-prover-incentives.json @@ -4,12 +4,7 @@ "description": "This enumeration represents the available call messages for interacting with the `ExampleModule` module.", "oneOf": [ { - "type": "string", - "enum": [ - "UnbondProver" - ] - }, - { + "description": "Bonds the prover with provided bond.", "type": "object", "required": [ "BondProver" @@ -24,6 +19,14 @@ "additionalProperties": false }, { + "description": "Unbonds the prover.", + "type": "string", + "enum": [ + "UnbondProver" + ] + }, + { + "description": "Verifies the provided proof (of format `Vec`)", "type": "object", "required": [ "VerifyProof" diff --git a/module-system/module-schemas/schemas/sov-sequencer-registry.json b/module-system/module-schemas/schemas/sov-sequencer-registry.json index 464667392..cb82076ee 100644 --- a/module-system/module-schemas/schemas/sov-sequencer-registry.json +++ b/module-system/module-schemas/schemas/sov-sequencer-registry.json @@ -1,9 +1,10 @@ { "$schema": "http://json-schema.org/draft-07/schema#", "title": "CallMessage", - "description": "This enumeration represents the available call messages for interacting with the sov-sequencer-registry.", + "description": "This enumeration represents the available call messages for interacting with the `sov-sequencer-registry` module.", "oneOf": [ { + "description": "Add a new sequencer to the sequencer registry.", "type": "object", "required": [ "Register" @@ -16,6 +17,7 @@ ], "properties": { "da_address": { + "description": "The DA address of the sequencer you're registering.", "type": "array", "items": { "type": "integer", @@ -29,6 +31,7 @@ "additionalProperties": false }, { + "description": "Remove a sequencer from the sequencer registry.", "type": "object", "required": [ "Exit" @@ -41,6 +44,7 @@ ], "properties": { "da_address": { + "description": "The DA address of the sequencer you're removing.", "type": "array", "items": { "type": "integer", diff --git a/module-system/sov-cli/Cargo.toml b/module-system/sov-cli/Cargo.toml index 8843a465c..4db709b3a 100644 --- a/module-system/sov-cli/Cargo.toml +++ b/module-system/sov-cli/Cargo.toml @@ -17,10 +17,19 @@ path = "src/lib.rs" [dependencies] -demo-stf = { path = "../../examples/demo-stf" } sov-modules-api = { path = "../sov-modules-api", version = "0.1", features = ["native"] } +sov-bank = { path = "../module-implementations/sov-bank", version = "0.1", features = ["native"] } +sov-accounts = { path = "../module-implementations/sov-accounts", version = "0.1", features = ["native"] } directories = "5.0.1" anyhow = { workspace = true } +hex = { workspace = true, features = ["serde"] } borsh = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } +jsonrpsee = { workspace = true, features = ["client"] } +tokio = { workspace = true } + +[dev-dependencies] +tempfile = { workspace = true } +demo-stf = { path = "../../examples/demo-stf", features = ["native"] } +sov-rollup-interface = { path = "../../rollup-interface", features = ["mocks"] } \ No newline at end of file diff --git a/module-system/sov-cli/README.md b/module-system/sov-cli/README.md index 90e2bf2ba..d82b2384c 100644 --- a/module-system/sov-cli/README.md +++ b/module-system/sov-cli/README.md @@ -3,7 +3,7 @@ This package defines a CLI wallet to be used with the Sovereign SDK ## Storage -By default, this wallet persists data in your a directory called `.sov_cli_wallet`, under your home directory. Home is defined as follows: +By default, this wallet persists data in a directory called `.sov_cli_wallet`, under your home directory. Home is defined as follows: - Linux: `/home/alice/` - Windows: `C:\Users\Alice\AppData\Roaming` - macOS: `/Users/Alice/Library/Application Support` diff --git a/module-system/sov-cli/src/bin/main.rs b/module-system/sov-cli/src/bin/main.rs deleted file mode 100644 index 5e41098c9..000000000 --- a/module-system/sov-cli/src/bin/main.rs +++ /dev/null @@ -1,42 +0,0 @@ -use demo_stf::runtime::{Runtime, RuntimeCall}; -use sov_cli::wallet_state::WalletState; -use sov_cli::workflows::keys::KeyWorkflow; -use sov_cli::workflows::transactions::TransactionWorkflow; -use sov_cli::{clap, wallet_dir}; -use sov_modules_api::clap::Parser; - -type Ctx = sov_modules_api::default_context::DefaultContext; - -#[derive(clap::Subcommand)] -#[command(author, version, about, long_about = None)] -pub enum Workflows { - #[clap(subcommand)] - Transactions(TransactionWorkflow>), - #[clap(subcommand)] - Keys(KeyWorkflow), -} - -#[derive(clap::Parser)] -#[command(author, version, about, long_about = None)] -pub struct App { - #[clap(subcommand)] - workflow: Workflows, -} - -fn main() -> Result<(), anyhow::Error> { - let app_dir = wallet_dir()?; - std::fs::create_dir_all(app_dir.as_ref())?; - let wallet_state_path = app_dir.as_ref().join("wallet_state.json"); - let mut wallet_state: WalletState, Ctx> = - WalletState::load(&wallet_state_path)?; - - let invocation = App::parse(); - - match invocation.workflow { - Workflows::Transactions(tx) => tx.run(&mut wallet_state, app_dir)?, - Workflows::Keys(inner) => inner.run(&mut wallet_state, app_dir)?, - } - wallet_state.save(wallet_state_path)?; - - Ok(()) -} diff --git a/module-system/sov-cli/src/wallet_state.rs b/module-system/sov-cli/src/wallet_state.rs index 80387fe8b..08b8691ef 100644 --- a/module-system/sov-cli/src/wallet_state.rs +++ b/module-system/sov-cli/src/wallet_state.rs @@ -1,9 +1,10 @@ use std::fs; use std::path::{Path, PathBuf}; +use std::str::FromStr; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; -use sov_modules_api::clap; +use sov_modules_api::{clap, PrivateKey}; /// A struct representing the current state of the CLI wallet #[derive(Debug, Serialize, Deserialize)] @@ -13,6 +14,22 @@ pub struct WalletState { pub unsent_transactions: Vec, /// The addresses in the wallet pub addresses: AddressList, + /// The addresses in the wallet + pub rpc_url: Option, +} + +impl Default + for WalletState +{ + fn default() -> Self { + Self { + unsent_transactions: Vec::new(), + addresses: AddressList { + addresses: Vec::new(), + }, + rpc_url: None, + } + } } impl WalletState { @@ -24,13 +41,7 @@ impl WalletStat let state = serde_json::from_slice(data.as_slice())?; Ok(state) } else { - Ok(Self { - unsent_transactions: Vec::new(), - addresses: AddressList { - other_addresses: Vec::new(), - active_address: None, - }, - }) + Ok(Default::default()) } } @@ -42,33 +53,123 @@ impl WalletStat } } +/// A struct representing private key and associated address +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(bound = "Ctx::Address: Serialize + DeserializeOwned")] +pub struct PrivateKeyAndAddress { + /// Private key of the address + pub private_key: Ctx::PrivateKey, + /// Address associated from the private key + pub address: Ctx::Address, +} + +impl PrivateKeyAndAddress { + /// Returns boolean if the private key matches default address + pub fn is_matching_to_default(&self) -> bool { + self.private_key.to_address::() == self.address + } + + /// Randomly generates a new private key and address + pub fn generate() -> Self { + let private_key = Ctx::PrivateKey::generate(); + let address = private_key.to_address::(); + Self { + private_key, + address, + } + } + + /// Generates valid private key and address from given private key + pub fn from_key(private_key: Ctx::PrivateKey) -> Self { + let address = private_key.to_address::(); + Self { + private_key, + address, + } + } +} + +/// A simplified struct representing private key and associated address +/// where the private key is represented as a hex string and address as canonical string +/// TODO: Remove it https://github.com/Sovereign-Labs/sovereign-sdk/issues/766 +#[derive(Debug, serde::Serialize, serde::Deserialize)] +pub struct HexPrivateAndAddress { + /// Private key is hex encoded bytes, without leading 0x + pub hex_priv_key: String, + /// Address is in canonical string format + pub address: String, +} + +impl TryFrom for PrivateKeyAndAddress { + type Error = anyhow::Error; + + fn try_from(value: HexPrivateAndAddress) -> Result { + let private_key_bytes = hex::decode(value.hex_priv_key)?; + let private_key = Ctx::PrivateKey::try_from(&private_key_bytes)?; + let address = Ctx::Address::from_str(&value.address)?; + Ok(PrivateKeyAndAddress { + private_key, + address, + }) + } +} + /// A list of addresses associated with this wallet #[derive(Debug, Serialize, Deserialize)] #[serde(bound = "Ctx::Address: Serialize + DeserializeOwned")] pub struct AddressList { - /// Any addresses which are known by the wallet but not currently active - pub other_addresses: Vec>, - /// The address which is currently active - pub active_address: Option>, + /// All addresses which are known by the wallet. The active address is stored + /// first in this array + addresses: Vec>, } impl AddressList { /// Get the active address pub fn default_address(&self) -> Option<&AddressEntry> { - self.active_address.as_ref() + self.addresses.first() + } + + /// Get an address by identifier + pub fn get_address( + &mut self, + identifier: &KeyIdentifier, + ) -> Option<&mut AddressEntry> { + self.addresses + .iter_mut() + .find(|entry| entry.matches(identifier)) + } + + /// Activate a key by identifier + pub fn activate(&mut self, identifier: &KeyIdentifier) -> Option<&AddressEntry> { + let (idx, _) = self + .addresses + .iter() + .enumerate() + .find(|(_idx, entry)| entry.matches(identifier))?; + self.addresses.swap(0, idx); + self.default_address() } + + /// Remove an address from the wallet by identifier + pub fn remove(&mut self, identifier: &KeyIdentifier) { + self.addresses.retain(|entry| !entry.matches(identifier)); + } + /// Add an address to the wallet - pub fn add(&mut self, address: Ctx::Address, nickname: Option, location: PathBuf) { + pub fn add( + &mut self, + address: Ctx::Address, + nickname: Option, + public_key: Ctx::PublicKey, + location: PathBuf, + ) { let entry = AddressEntry { address, nickname, location, + pub_key: public_key, }; - if self.active_address.is_none() { - self.active_address = Some(entry); - } else { - self.other_addresses.push(entry); - } + self.addresses.push(entry); } } @@ -82,6 +183,9 @@ pub struct AddressEntry { pub nickname: Option, /// The location of the private key on disk pub location: PathBuf, + /// The public key associated with the address + #[serde(with = "pubkey_hex")] + pub pub_key: Ctx::PublicKey, } impl AddressEntry { @@ -121,3 +225,111 @@ impl std::fmt::Display for KeyIdentifier { } } } + +mod pubkey_hex { + use core::fmt; + use std::marker::PhantomData; + + use borsh::{BorshDeserialize, BorshSerialize}; + use hex::{FromHex, ToHex}; + use serde::de::{Error, Visitor}; + use serde::{Deserializer, Serializer}; + use sov_modules_api::PublicKey; + pub fn serialize( + data: &P, + serializer: S, + ) -> Result + where + S: Serializer, + { + let bytes = data + .try_to_vec() + .expect("serialization to vec is infallible"); + let formatted_string = format!("0x{}", bytes.encode_hex::()); + serializer.serialize_str(&formatted_string) + } + + /// Deserializes a hex string into raw bytes. + /// + /// Both, upper and lower case characters are valid in the input string and can + /// even be mixed (e.g. `f9b4ca`, `F9B4CA` and `f9B4Ca` are all valid strings). + pub fn deserialize<'de, D, C>(deserializer: D) -> Result + where + D: Deserializer<'de>, + C: PublicKey + BorshDeserialize, + { + struct HexPubkeyVisitor(PhantomData); + + impl<'de, C: PublicKey + BorshDeserialize> Visitor<'de> for HexPubkeyVisitor { + type Value = C; + + fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "a hex encoded string") + } + + fn visit_str(self, data: &str) -> Result + where + E: Error, + { + let data = data.trim_start_matches("0x"); + let bytes: Vec = FromHex::from_hex(data).map_err(Error::custom)?; + C::try_from_slice(&bytes).map_err(Error::custom) + } + + fn visit_borrowed_str(self, data: &'de str) -> Result + where + E: Error, + { + let data = data.trim_start_matches("0x"); + let bytes: Vec = FromHex::from_hex(data).map_err(Error::custom)?; + C::try_from_slice(&bytes).map_err(Error::custom) + } + } + + deserializer.deserialize_str(HexPubkeyVisitor(PhantomData::)) + } +} + +#[cfg(test)] +mod tests { + use sov_modules_api::default_context::DefaultContext; + + use super::*; + + type C = DefaultContext; + #[test] + fn test_private_key_and_address() { + let private_key_and_address = PrivateKeyAndAddress::::generate(); + + let json = serde_json::to_string_pretty(&private_key_and_address).unwrap(); + + let decoded: PrivateKeyAndAddress = serde_json::from_str(&json).unwrap(); + + assert_eq!( + private_key_and_address.private_key.pub_key(), + decoded.private_key.pub_key() + ); + assert_eq!(private_key_and_address.address, decoded.address); + } + + #[test] + fn test_hex_private_key_conversion() { + let private_key_and_address = PrivateKeyAndAddress::::generate(); + + let hex_private_key = private_key_and_address.private_key.as_hex(); + let address_string = private_key_and_address.address.to_string(); + + let hex_private_key_and_address = HexPrivateAndAddress { + hex_priv_key: hex_private_key, + address: address_string, + }; + + let converted = PrivateKeyAndAddress::::try_from(hex_private_key_and_address).unwrap(); + + assert_eq!( + private_key_and_address.private_key.pub_key(), + converted.private_key.pub_key() + ); + assert_eq!(private_key_and_address.address, converted.address); + } +} diff --git a/module-system/sov-cli/src/workflows/keys.rs b/module-system/sov-cli/src/workflows/keys.rs index e416ecd6f..cc5b67629 100644 --- a/module-system/sov-cli/src/workflows/keys.rs +++ b/module-system/sov-cli/src/workflows/keys.rs @@ -5,7 +5,7 @@ use serde::de::DeserializeOwned; use serde::Serialize; use sov_modules_api::{clap, PrivateKey, PublicKey, Spec}; -use crate::wallet_state::{KeyIdentifier, WalletState}; +use crate::wallet_state::{KeyIdentifier, PrivateKeyAndAddress, WalletState}; #[derive(clap::Subcommand)] /// View and manage keys associated with this wallet @@ -16,6 +16,12 @@ pub enum KeyWorkflow { /// A nickname for this key pair nickname: Option, }, + /// Generate a new key pair if none exist + GenerateIfMissing { + #[clap(short, long)] + /// A nickname for this key pair + nickname: Option, + }, /// Import an existing key pair Import { #[clap(short, long)] @@ -36,6 +42,12 @@ pub enum KeyWorkflow { #[clap(subcommand)] identifier: KeyIdentifier, }, + /// Unlink a key from the wallet + Remove { + /// The identifier of the key to activate + #[clap(subcommand)] + identifier: KeyIdentifier, + }, } impl KeyWorkflow { @@ -47,16 +59,7 @@ impl KeyWorkflow { ) -> Result<(), anyhow::Error> { match self { KeyWorkflow::Generate { nickname } => { - let keys = ::PrivateKey::generate(); - let address = keys.pub_key().to_address::<::Address>(); - let key_path = app_dir.as_ref().join(format!("{}.json", address)); - println!( - "Generated key pair with address: {}. Saving to {}", - address, - key_path.display() - ); - std::fs::write(&key_path, serde_json::to_string(&keys)?)?; - wallet_state.addresses.add(address, nickname, key_path); + generate_and_save_key(nickname, app_dir, wallet_state)?; } KeyWorkflow::Import { nickname, @@ -64,33 +67,41 @@ impl KeyWorkflow { path, } => { // Try to load the key as a sanity check. - let key = load_key::(&path)?; + let private_key = load_key::(&path)?; + let public_key = private_key.pub_key(); let address = - address_override.unwrap_or_else(|| key.pub_key().to_address::()); + address_override.unwrap_or_else(|| public_key.to_address::()); println!("Imported key pair. address: {}", address); - wallet_state.addresses.add(address, nickname, path); + wallet_state + .addresses + .add(address, nickname, public_key, path); } KeyWorkflow::List => { println!("{}", serde_json::to_string_pretty(&wallet_state.addresses)?) } KeyWorkflow::Activate { identifier } => { - if let Some(active) = wallet_state.addresses.active_address.as_mut() { + if let Some(active) = wallet_state.addresses.default_address() { if active.matches(&identifier) { println!("Key '{}' is already active", identifier); return Ok(()); } - let requested = wallet_state - .addresses - .other_addresses - .iter_mut() - .find(|entry| entry.matches(&identifier)) - .ok_or_else(|| { - anyhow::anyhow!("Could not find key with nickname {}", identifier) - })?; - std::mem::swap(active, requested); - println!("Activated key {}", identifier); + } + wallet_state + .addresses + .activate(&identifier) + .ok_or_else(|| { + anyhow::anyhow!("Could not find key with identifier {}", identifier) + })?; + println!("Activated key {}", identifier); + } + KeyWorkflow::GenerateIfMissing { nickname } => { + if wallet_state.addresses.default_address().is_none() { + generate_and_save_key(nickname, app_dir, wallet_state)?; } } + KeyWorkflow::Remove { identifier } => { + wallet_state.addresses.remove(&identifier); + } } Ok(()) } @@ -100,7 +111,29 @@ impl KeyWorkflow { pub fn load_key( path: impl AsRef, ) -> Result { - let data = std::fs::read(path)?; - let key = serde_json::from_slice(data.as_slice())?; - Ok(key) + let data = std::fs::read_to_string(path)?; + let key_and_address: PrivateKeyAndAddress = serde_json::from_str(&data)?; + Ok(key_and_address.private_key) +} + +/// Generate a new key pair and save it to the wallet +pub fn generate_and_save_key( + nickname: Option, + app_dir: impl AsRef, + wallet_state: &mut WalletState, +) -> Result<(), anyhow::Error> { + let keys = ::PrivateKey::generate(); + let public_key = keys.pub_key(); + let address = keys.pub_key().to_address::<::Address>(); + let key_path = app_dir.as_ref().join(format!("{}.json", address)); + println!( + "Generated key pair with address: {}. Saving to {}", + address, + key_path.display() + ); + std::fs::write(&key_path, serde_json::to_string(&keys)?)?; + wallet_state + .addresses + .add(address, nickname, public_key, key_path); + Ok(()) } diff --git a/module-system/sov-cli/src/workflows/mod.rs b/module-system/sov-cli/src/workflows/mod.rs index fb4d11f45..21fadad1a 100644 --- a/module-system/sov-cli/src/workflows/mod.rs +++ b/module-system/sov-cli/src/workflows/mod.rs @@ -1,3 +1,4 @@ //! Workflows for the CLI wallet pub mod keys; +pub mod rpc; pub mod transactions; diff --git a/module-system/sov-cli/src/workflows/rpc.rs b/module-system/sov-cli/src/workflows/rpc.rs new file mode 100644 index 000000000..903d5328b --- /dev/null +++ b/module-system/sov-cli/src/workflows/rpc.rs @@ -0,0 +1,189 @@ +//! Query the current state of the rollup and send transactions + +use std::path::Path; + +use anyhow::Context; +use borsh::BorshSerialize; +use jsonrpsee::core::client::ClientT; +use jsonrpsee::http_client::HttpClientBuilder; +use serde::de::DeserializeOwned; +use serde::Serialize; +use sov_accounts::query::AccountsRpcClient; +use sov_bank::query::{BalanceResponse, BankRpcClient}; +use sov_modules_api::clap; +use sov_modules_api::transaction::Transaction; + +use crate::wallet_state::{AddressEntry, KeyIdentifier, WalletState}; +use crate::workflows::keys::load_key; +const NO_ACCOUNTS_FOUND: &str = + "No accounts found. You can generate one with the `keys generate` subcommand"; +const BAD_RPC_URL: &str = "Unable to connect to provided rpc. You can change to a different rpc url with the `rpc set-url` subcommand "; + +/// Query the current state of the rollup and send transactions +#[derive(clap::Subcommand)] +pub enum RpcWorkflows { + /// Set the url of the rpc server to use + SetUrl { + /// A url like http://localhost:8545 + rpc_url: String, + }, + /// Query the rpc server for the nonce of the provided account. If no account is provided, the active account is used + GetNonce { + /// (Optional) The account to query the nonce for (default: the active account) + #[clap(subcommand)] + account: Option>, + }, + /// Query the rpc server for the token balance of an account + GetBalance { + /// (Optional) The account to query the balance of (default: the active account) + #[clap(subcommand)] + account: Option>, + /// The address of the token to query for + token_address: C::Address, + }, + /// Sign all transactions from the current batch and submit them to the rollup. + /// Nonces will be set automatically. + SubmitBatch { + /// (Optional) The account to sign transactions for this batch (default: the active account) + #[clap(subcommand)] + account: Option>, + /// (Optional) The nonce to use for the first transaction in the batch (default: the current nonce for the account). Any other transactions will + /// be signed with sequential nonces starting from this value. + nonce_override: Option, + }, +} + +impl RpcWorkflows { + fn resolve_account<'wallet, Tx: BorshSerialize>( + &self, + wallet_state: &'wallet mut WalletState, + ) -> Result<&'wallet AddressEntry, anyhow::Error> { + let account_id = match self { + RpcWorkflows::SetUrl { .. } => None, + RpcWorkflows::GetNonce { account } => account.as_ref(), + RpcWorkflows::GetBalance { account, .. } => account.as_ref(), + RpcWorkflows::SubmitBatch { account, .. } => account.as_ref(), + }; + + let account = if let Some(id) = account_id { + let addr = wallet_state.addresses.get_address(id); + + addr.ok_or_else(|| anyhow::format_err!("No account found matching identifier: {}", id))? + } else { + wallet_state + .addresses + .default_address() + .ok_or_else(|| anyhow::format_err!(NO_ACCOUNTS_FOUND))? + }; + Ok(account) + } +} + +impl RpcWorkflows { + /// Run the rpc workflow + pub async fn run( + &self, + wallet_state: &mut WalletState, + _app_dir: impl AsRef, + ) -> Result<(), anyhow::Error> { + // If the user is just setting the RPC url, we can skip the usual setup + if let RpcWorkflows::SetUrl { rpc_url } = self { + let _client = HttpClientBuilder::default() + .build(rpc_url) + .context("Invalid rpc url: ")?; + wallet_state.rpc_url = Some(rpc_url.clone()); + println!("Set rpc url to {}", rpc_url); + return Ok(()); + } + + // Otherwise, we need to initialize an RPC and resolve the active account + let rpc_url = wallet_state + .rpc_url + .as_ref() + .ok_or(anyhow::format_err!( + "No rpc url set. Use the `rpc set-url` subcommand to set one" + ))? + .clone(); + let client = HttpClientBuilder::default().build(rpc_url)?; + let account = self.resolve_account(wallet_state)?; + + // Finally, run the workflow + match self { + RpcWorkflows::SetUrl { .. } => { + unreachable!("This case was handled above") + } + RpcWorkflows::GetNonce { .. } => { + let nonce = get_nonce_for_account(&client, account).await?; + println!("Nonce for account {} is {}", account.address, nonce); + } + RpcWorkflows::GetBalance { + account: _, + token_address, + } => { + let BalanceResponse { amount } = BankRpcClient::::balance_of( + &client, + account.address.clone(), + token_address.clone(), + ) + .await + .context(BAD_RPC_URL)?; + + println!( + "Balance for account {} is {}", + account.address, + amount.unwrap_or_default() + ); + } + RpcWorkflows::SubmitBatch { nonce_override, .. } => { + let private_key = load_key::(&account.location)?; + + let nonce = match nonce_override { + Some(nonce) => *nonce, + None => get_nonce_for_account(&client, account).await?, + }; + let txs = std::mem::take(&mut wallet_state.unsent_transactions) + .into_iter() + .enumerate() + .map(|(offset, tx)| { + Transaction::::new_signed_tx( + &private_key, + tx.try_to_vec().unwrap(), + nonce + offset as u64, + ) + .try_to_vec() + .unwrap() + }) + .collect::>(); + + let response: String = client + .request("sequencer_publishBatch", txs) + .await + .context("Unable to publish batch")?; + + // Print the result + println!( + "Your batch was submitted to the sequencer for publication. Response: {:?}", + response + ); + } + } + Ok(()) + } +} + +async fn get_nonce_for_account( + client: &(impl ClientT + Send + Sync), + account: &AddressEntry, +) -> Result { + Ok(match AccountsRpcClient::::get_account( + client, + account.pub_key.clone(), + ) + .await + .context( + "Unable to connect to provided rpc. You can change to a different rpc url with the `rpc set-url` subcommand ", + )? { + sov_accounts::query::Response::AccountExists { addr: _, nonce } => nonce, + _ => 0, + }) +} diff --git a/module-system/sov-cli/src/workflows/transactions.rs b/module-system/sov-cli/src/workflows/transactions.rs index e9655656b..3b346e7db 100644 --- a/module-system/sov-cli/src/workflows/transactions.rs +++ b/module-system/sov-cli/src/workflows/transactions.rs @@ -1,107 +1,129 @@ //! Workflows for transaction management -use std::path::{Path, PathBuf}; +use std::path::Path; -use serde::de::DeserializeOwned; use serde::Serialize; -use sov_modules_api::clap::{self, Args, Subcommand}; +use sov_modules_api::clap::{self, Subcommand}; +use sov_modules_api::cli::CliFrontEnd; +use sov_modules_api::CliWallet; use crate::wallet_state::WalletState; #[derive(clap::Parser)] /// Generate, sign, and send transactions -pub enum TransactionWorkflow -where - RT::CliStringRepr: clap::Subcommand, -{ - /// Parse a transaction from the command line and add it to the current batch +pub enum TransactionWorkflow { + /// Import a transaction #[clap(subcommand)] - Generate(RT::CliStringRepr), - /// Import a transaction from a JSON file or as a JSON string - Import(TransactionSubcommand), + Import(ImportTransaction), + /// Delete the current batch of transactions + Clean, + /// Remove a single transaction from the current batch + Remove { + /// The index of the transaction to remove, starting from 0 + index: usize, + }, /// List the current batch of transactions List, - // TODO: Add `send` and `generate_schema` subcommands/ - // TODO: design and implement batch management (remove tx, drop batch, etc.) } -impl TransactionWorkflow -where - RT::Decodable: Serialize + DeserializeOwned, - RT::CliStringRepr: clap::Subcommand, -{ +impl TransactionWorkflow { /// Run the transaction workflow - pub fn run( + pub fn run( self, wallet_state: &mut WalletState, _app_dir: impl AsRef, - ) -> Result<(), anyhow::Error> { + ) -> Result<(), anyhow::Error> + where + File: CliFrontEnd, + Json: CliFrontEnd, + File: TryInto, Error = E1>, + Json: TryInto, Error = E2>, + RT::CliStringRepr: TryInto, + RT::Decodable: Serialize, + E1: Into + Send + Sync, + E2: Into + Send + Sync, + E3: Into + Send + Sync, + { match self { - TransactionWorkflow::Generate(subcommand) => { - // let TransactionSubcommand { args, inner } = subcommand; - let tx: RT::Decodable = subcommand.into(); - println!("Adding the following transaction to batch:"); - println!("{}", serde_json::to_string_pretty(&tx)?); - wallet_state.unsent_transactions.push(tx); - } - TransactionWorkflow::Import(subcommand) => { - let TransactionSubcommand { args: _, inner } = subcommand; - let tx = match inner { - ImportTransaction::FromFile { path } => { - let tx = std::fs::read_to_string(path)?; - serde_json::from_str(&tx)? - } - ImportTransaction::FromString { json } => serde_json::from_str(&json)?, - }; - println!("Adding the following transaction to batch:"); - println!("{}", serde_json::to_string_pretty(&tx)?); - wallet_state.unsent_transactions.push(tx); - } + TransactionWorkflow::Import(import_workflow) => import_workflow.run(wallet_state), TransactionWorkflow::List => { println!("Current batch:"); println!( "{}", serde_json::to_string_pretty(&wallet_state.unsent_transactions)? ); + Ok(()) + } + TransactionWorkflow::Clean => { + wallet_state.unsent_transactions.clear(); + Ok(()) + } + TransactionWorkflow::Remove { index } => { + wallet_state.unsent_transactions.remove(index); + Ok(()) } } - - Ok(()) } } +/// An argument passed as path to a file +#[derive(clap::Parser)] +pub struct FileArg { + /// The path to the file + #[arg(long, short)] + pub path: String, +} #[derive(clap::Subcommand)] /// Import a pre-formatted transaction from a JSON file or as a JSON string -pub enum ImportTransaction { +pub enum ImportTransaction { /// Import a transaction from a JSON file at the provided path - #[command(arg_required_else_help(true))] - FromFile { - /// The expected format of the file contents is: {"module_name": {"call_name": {"field_name": "field_value"}}} - path: PathBuf, - }, + #[clap(subcommand)] + FromFile(Json), /// Provide a JSON serialized transaction directly as input - #[command(arg_required_else_help(true))] - FromString { + #[clap(subcommand)] + FromString( /// The JSON serialized transaction as a string. /// The expected format is: {"module_name": {"call_name": {"field_name": "field_value"}}} - json: String, - }, + File, + ), } -/// A wrapper around a subcommand that also includes the optional global arguments -#[derive(clap::Parser)] -pub struct TransactionSubcommand { - /// The optional arguments - #[clap(flatten)] - pub args: OptionalArgs, - /// The inner subcommand - #[clap(subcommand)] - pub inner: S, -} +impl ImportTransaction +where + Json: Subcommand, + File: Subcommand, +{ + /// Parse from a file or a json string + pub fn run( + self, + wallet_state: &mut WalletState, + ) -> Result<(), anyhow::Error> + where + Json: CliFrontEnd, + File: CliFrontEnd, + Json: TryInto, Error = E1>, + File: TryInto, Error = E2>, + RT::CliStringRepr: TryInto, + RT::Decodable: Serialize, + E1: Into + Send + Sync, + E2: Into + Send + Sync, + E3: Into + Send + Sync, + { + let intermediate_repr: RT::CliStringRepr = match self { + ImportTransaction::FromFile(file) => { + file.try_into().map_err(Into::::into)? + } + ImportTransaction::FromString(json) => { + json.try_into().map_err(Into::::into)? + } + }; -/// The optional arguments for the transaction workflow -#[derive(Debug, Args)] -pub struct OptionalArgs { - #[clap(short, long, global = true, default_value_t = false)] - send_transactions: bool, + let tx = intermediate_repr + .try_into() + .map_err(Into::::into)?; + println!("Adding the following transaction to batch:"); + println!("{}", serde_json::to_string_pretty(&tx)?); + wallet_state.unsent_transactions.push(tx); + Ok(()) + } } diff --git a/module-system/sov-cli/test-data/requests/burn.json b/module-system/sov-cli/test-data/requests/burn.json new file mode 100644 index 000000000..e3e7f9f88 --- /dev/null +++ b/module-system/sov-cli/test-data/requests/burn.json @@ -0,0 +1,8 @@ +{ + "Burn": { + "coins": { + "amount": 300, + "token_address": "sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft" + } + } +} diff --git a/module-system/sov-cli/test-data/requests/create_token.json b/module-system/sov-cli/test-data/requests/create_token.json new file mode 100644 index 000000000..16c27afd1 --- /dev/null +++ b/module-system/sov-cli/test-data/requests/create_token.json @@ -0,0 +1,13 @@ +{ + "CreateToken": { + "salt": 11, + "token_name": "sov-test-token", + "initial_balance": 1000, + "minter_address": "sov1x3jtvq0zwhj2ucsc4hqugskvralrulxvf53vwtkred93s2x9gmzs04jvyr", + "authorized_minters": [ + "sov1l6n2cku82yfqld30lanm2nfw43n2auc8clw7r5u5m6s7p8jrm4zqrr8r94", + "sov1x3jtvq0zwhj2ucsc4hqugskvralrulxvf53vwtkred93s2x9gmzs04jvyr", + "sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc" + ] + } +} diff --git a/module-system/sov-cli/test-data/requests/mint.json b/module-system/sov-cli/test-data/requests/mint.json new file mode 100644 index 000000000..10436e602 --- /dev/null +++ b/module-system/sov-cli/test-data/requests/mint.json @@ -0,0 +1,9 @@ +{ + "Mint": { + "coins": { + "amount": 3000, + "token_address": "sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft" + }, + "minter_address": "sov15vspj48hpttzyvxu8kzq5klhvaczcpyxn6z6k0hwpwtzs4a6wkvqwr57gc" + } +} diff --git a/module-system/sov-cli/test-data/requests/transfer.json b/module-system/sov-cli/test-data/requests/transfer.json new file mode 100644 index 000000000..92411d14c --- /dev/null +++ b/module-system/sov-cli/test-data/requests/transfer.json @@ -0,0 +1,9 @@ +{ + "Transfer": { + "to": "sov1l6n2cku82yfqld30lanm2nfw43n2auc8clw7r5u5m6s7p8jrm4zqklh0qh", + "coins": { + "amount": 200, + "token_address": "sov16m8fxq0x5wc5aw75fx9rus2p7g2l22zf4re72c3m058g77cdjemsavg2ft" + } + } +} diff --git a/module-system/sov-cli/tests/keys.rs b/module-system/sov-cli/tests/keys.rs new file mode 100644 index 000000000..ceed1a6fa --- /dev/null +++ b/module-system/sov-cli/tests/keys.rs @@ -0,0 +1,98 @@ +use demo_stf::runtime::RuntimeCall; +use sov_cli::wallet_state::{KeyIdentifier, PrivateKeyAndAddress, WalletState}; +use sov_cli::workflows::keys::KeyWorkflow; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::{PrivateKey, PublicKey, Spec}; +use sov_rollup_interface::mocks::MockDaSpec; + +type Da = MockDaSpec; + +#[test] +fn test_key_gen() { + let app_dir = tempfile::tempdir().unwrap(); + let mut wallet_state = + WalletState::, DefaultContext>::default(); + let workflow = KeyWorkflow::Generate { nickname: None }; + workflow.run(&mut wallet_state, app_dir).unwrap(); + + assert!(wallet_state.addresses.default_address().is_some()); +} + +#[test] +fn test_key_import() { + let app_dir = tempfile::tempdir().unwrap(); + // Generate a key and write it to a file + let generated_key = ::PrivateKey::generate(); + let key_path = app_dir.path().join("test_key"); + let key_and_address = PrivateKeyAndAddress::::from_key(generated_key.clone()); + std::fs::write(&key_path, serde_json::to_string(&key_and_address).unwrap()) + .expect("Failed to write key to tempdir"); + + // Initialize an empty wallet + let mut wallet_state = + WalletState::, DefaultContext>::default(); + let workflow = KeyWorkflow::Import { + nickname: Some("my-test-key".to_string()), + address_override: None, + path: key_path, + }; + // Import the key + workflow.run(&mut wallet_state, app_dir).unwrap(); + + // Ensure that the wallet has at least one key + let entry = wallet_state + .addresses + .default_address() + .expect("Key import must succeed"); + + assert_eq!(entry.nickname.as_ref().unwrap(), "my-test-key"); + assert_eq!( + entry.address, + generated_key + .pub_key() + .to_address::<::Address>() + ); +} + +#[test] +fn test_activate() { + // Setup a wallet with two keys + let app_dir = tempfile::tempdir().unwrap(); + let mut wallet_state = + WalletState::, DefaultContext>::default(); + let workflow = KeyWorkflow::Generate { + nickname: Some("key1".into()), + }; + workflow.run(&mut wallet_state, &app_dir).unwrap(); + let workflow = KeyWorkflow::Generate { + nickname: Some("key2".into()), + }; + workflow.run(&mut wallet_state, &app_dir).unwrap(); + + // Ensure that key1 is active + let current_active_wallet = wallet_state.addresses.default_address().unwrap(); + assert!(current_active_wallet.is_nicknamed("key1")); + let address_1 = current_active_wallet.address; + + // Activate key2 by nickname + let workflow = KeyWorkflow::Activate { + identifier: KeyIdentifier::ByNickname { + nickname: "key2".to_string(), + }, + }; + workflow.run(&mut wallet_state, &app_dir).unwrap(); + + // Ensure that key2 is active + let current_active_wallet = wallet_state.addresses.default_address().unwrap(); + assert!(current_active_wallet.is_nicknamed("key2")); + + // Activate key1 by address + let workflow = KeyWorkflow::Activate { + identifier: KeyIdentifier::ByAddress { address: address_1 }, + }; + workflow.run(&mut wallet_state, &app_dir).unwrap(); + + // Ensure that key1 is active + let current_active_wallet = wallet_state.addresses.default_address().unwrap(); + assert!(current_active_wallet.is_nicknamed("key1")); +} diff --git a/module-system/sov-cli/tests/transactions.rs b/module-system/sov-cli/tests/transactions.rs new file mode 100644 index 000000000..c8881ca7b --- /dev/null +++ b/module-system/sov-cli/tests/transactions.rs @@ -0,0 +1,67 @@ +use std::path::{Path, PathBuf}; + +use demo_stf::runtime::{Runtime, RuntimeCall, RuntimeSubcommand}; +use sov_cli::wallet_state::WalletState; +use sov_cli::workflows::transactions::{ImportTransaction, TransactionWorkflow}; +use sov_modules_api::cli::{FileNameArg, JsonStringArg}; +use sov_modules_api::default_context::DefaultContext; +use sov_rollup_interface::mocks::MockDaSpec; + +type Da = MockDaSpec; + +#[test] +fn test_import_transaction_from_string() { + let app_dir = tempfile::tempdir().unwrap(); + let mut wallet_state = + WalletState::, DefaultContext>::default(); + + let test_token_path = make_test_path("requests/create_token.json"); + let subcommand = RuntimeSubcommand::::bank { + contents: JsonStringArg { + json: std::fs::read_to_string(test_token_path).unwrap(), + }, + }; + + let workflow = TransactionWorkflow::Import(ImportTransaction::< + _, + RuntimeSubcommand, + >::FromFile(subcommand)); + workflow + .run::, _, _, _, _, _>(&mut wallet_state, app_dir) + .unwrap(); + + assert_eq!(wallet_state.unsent_transactions.len(), 1); +} + +#[test] +fn test_import_transaction_from_file() { + let app_dir = tempfile::tempdir().unwrap(); + let mut wallet_state = + WalletState::, DefaultContext>::default(); + + let test_token_path = make_test_path("requests/create_token.json"); + let subcommand = RuntimeSubcommand::::bank { + contents: FileNameArg { + path: test_token_path.to_str().unwrap().into(), + }, + }; + + let workflow = TransactionWorkflow::Import(ImportTransaction::< + _, + RuntimeSubcommand, + >::FromFile(subcommand)); + workflow + .run::, _, _, _, _, _>(&mut wallet_state, app_dir) + .unwrap(); + + assert_eq!(wallet_state.unsent_transactions.len(), 1); +} + +fn make_test_path>(path: P) -> PathBuf { + let mut sender_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + sender_path.push("test-data"); + + sender_path.push(path); + + sender_path +} diff --git a/module-system/sov-modules-api/Cargo.toml b/module-system/sov-modules-api/Cargo.toml index 3484e8af7..69fb4168b 100644 --- a/module-system/sov-modules-api/Cargo.toml +++ b/module-system/sov-modules-api/Cargo.toml @@ -12,31 +12,37 @@ readme = "README.md" resolver = "2" [dependencies] -jsonrpsee = { workspace = true, optional = true } +jsonrpsee = { workspace = true, optional = true } anyhow = { workspace = true } -sov-state = { path = "../sov-state", version = "0.1", default-features = false } +arbitrary = { workspace = true, optional = true } +sov-state = { path = "../sov-state", version = "0.1" } sov-rollup-interface = { path = "../../rollup-interface", version = "0.1" } sov-modules-macros = { path = "../sov-modules-macros", version = "0.1", optional = true } +sov-sequencer = { path = "../../full-node/sov-sequencer", optional = true } serde = { workspace = true } borsh = { workspace = true } thiserror = { workspace = true } sha2 = { workspace = true } bech32 = { workspace = true } derive_more = { workspace = true } -serde_json = { workspace = true } +serde_json = { workspace = true, optional = true } hex = { workspace = true, optional = true } clap = { workspace = true, optional = true } schemars = { workspace = true, optional = true, features = [] } -ed25519-dalek = { version = "1.0.1", default-features = false, features = ["alloc", "u64_backend"] } -rand = { version = "0.7", optional = true } +ed25519-dalek = { version = "2.0.0", default-features = false } +rand = { version = "0.8", optional = true } +zk-cycle-macros = { path = "../../utils/zk-cycle-macros", optional = true } +risc0-zkvm = { version = "0.16", default-features = false, features = ["std"], optional = true } +risc0-zkvm-platform = { version = "0.16", optional = true } [dev-dependencies] -serde_json = { workspace = true } bincode = { workspace = true } +sov-modules-api = { path = ".", features = ["native"] } [features] -default = ["native", "macros"] -native = ["sov-state/native", "rand", "hex", "schemars", "ed25519-dalek/default", "clap", "jsonrpsee", "macros", "sov-modules-macros/native"] +bench = ["zk-cycle-macros", "risc0-zkvm", "risc0-zkvm-platform"] +default = ["macros"] +native = ["serde_json", "rand", "hex", "schemars", "ed25519-dalek/default", "ed25519-dalek/serde", "ed25519-dalek/rand_core", "clap", "jsonrpsee", "macros", "sov-modules-macros/native", "sov-state/native", "sov-sequencer"] macros = ["sov-modules-macros"] diff --git a/module-system/sov-modules-api/src/capabilities.rs b/module-system/sov-modules-api/src/capabilities.rs new file mode 100644 index 000000000..ebcca888d --- /dev/null +++ b/module-system/sov-modules-api/src/capabilities.rs @@ -0,0 +1,61 @@ +#![deny(missing_docs)] + +//! The rollup capabilities module defines "capabilities" that rollup must +//! provide if they wish to use the standard app template. +//! If you don't want to provide these capabilities, +//! you can bypass the Sovereign module-system completely +//! and write a state transition function from scratch. +//! [See here for docs](https://github.com/Sovereign-Labs/sovereign-sdk/blob/nightly/examples/demo-stf/README.md) + +use sov_rollup_interface::da::{BlobReaderTrait, DaSpec}; +use sov_state::WorkingSet; + +use crate::{Context, Spec}; + +/// Container type for mixing borrowed and owned blobs. +pub enum BlobRefOrOwned<'a, B: BlobReaderTrait> { + /// Mutable reference + Ref(&'a mut B), + /// Owned blob + Owned(B), +} + +impl<'a, B: BlobReaderTrait> BlobRefOrOwned<'a, B> { + /// Convenience method to get mutable reference to the blob + pub fn as_mut_ref(&mut self) -> &mut B { + match self { + BlobRefOrOwned::Ref(r) => r, + BlobRefOrOwned::Owned(ref mut blob) => blob, + } + } +} + +impl<'a, B: BlobReaderTrait> From for BlobRefOrOwned<'a, B> { + fn from(value: B) -> Self { + BlobRefOrOwned::Owned(value) + } +} + +impl<'a, B: BlobReaderTrait> From<&'a mut B> for BlobRefOrOwned<'a, B> { + fn from(value: &'a mut B) -> Self { + BlobRefOrOwned::Ref(value) + } +} + +/// BlobSelector decides which blobs to process in a current slot. +pub trait BlobSelector { + /// Context type + type Context: Context; + + /// It takes two arguments. + /// 1. `current_blobs` - blobs that were received from the network for the current slot. + /// 2. `working_set` - the working to access storage. + /// It returns a vector containing a mix of borrowed and owned blobs. + fn get_blobs_for_this_slot<'a, I>( + &self, + current_blobs: I, + working_set: &mut WorkingSet<::Storage>, + ) -> anyhow::Result>> + where + I: IntoIterator; +} diff --git a/module-system/sov-modules-api/src/cli.rs b/module-system/sov-modules-api/src/cli.rs new file mode 100644 index 000000000..bdd5ef1cf --- /dev/null +++ b/module-system/sov-modules-api/src/cli.rs @@ -0,0 +1,32 @@ +use crate::CliWallet; + +/// An argument to the cli containing a json string +#[derive(clap::Args, PartialEq, core::fmt::Debug, Clone, PartialOrd, Ord, Eq, Hash)] +pub struct JsonStringArg { + /// The json formatted transaction data + #[arg(long, help = "The JSON formatted transaction")] + pub json: String, +} + +/// An argument to the cli containing a path to a file +#[derive(clap::Args, PartialEq, core::fmt::Debug, Clone, PartialOrd, Ord, Eq, Hash)] +pub struct FileNameArg { + /// The json formatted transaction data + #[arg(long, help = "The JSON formatted transaction")] + pub path: String, +} + +impl TryFrom for JsonStringArg { + type Error = std::io::Error; + fn try_from(arg: FileNameArg) -> Result { + let json = std::fs::read_to_string(arg.path)?; + Ok(JsonStringArg { json }) + } +} + +pub trait CliFrontEnd +where + RT: CliWallet, +{ + type CliIntermediateRepr; +} diff --git a/module-system/sov-modules-api/src/default_context.rs b/module-system/sov-modules-api/src/default_context.rs index 4296572a4..a07f6b735 100644 --- a/module-system/sov-modules-api/src/default_context.rs +++ b/module-system/sov-modules-api/src/default_context.rs @@ -1,7 +1,7 @@ #[cfg(feature = "native")] use serde::{Deserialize, Serialize}; use sha2::Digest; -use sov_rollup_interface::AddressTrait; +use sov_rollup_interface::RollupAddress; #[cfg(feature = "native")] use sov_state::ProverStorage; use sov_state::{ArrayWitness, DefaultStorageSpec, ZkStorage}; @@ -12,6 +12,7 @@ use crate::default_signature::{DefaultPublicKey, DefaultSignature}; use crate::{Address, Context, PublicKey, Spec}; #[cfg(feature = "native")] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct DefaultContext { pub sender: Address, @@ -21,8 +22,8 @@ pub struct DefaultContext { impl Spec for DefaultContext { type Address = Address; type Storage = ProverStorage; - type PublicKey = DefaultPublicKey; type PrivateKey = DefaultPrivateKey; + type PublicKey = DefaultPublicKey; type Hasher = sha2::Sha256; type Signature = DefaultSignature; type Witness = ArrayWitness; @@ -67,7 +68,7 @@ impl Context for ZkDefaultContext { } impl PublicKey for DefaultPublicKey { - fn to_address(&self) -> A { + fn to_address(&self) -> A { let pub_key_hash = { let mut hasher = ::Hasher::new(); hasher.update(self.pub_key); diff --git a/module-system/sov-modules-api/src/default_signature.rs b/module-system/sov-modules-api/src/default_signature.rs index edfb1fb69..ab7be5a2b 100644 --- a/module-system/sov-modules-api/src/default_signature.rs +++ b/module-system/sov-modules-api/src/default_signature.rs @@ -1,20 +1,17 @@ +use std::hash::Hash; #[cfg(feature = "native")] use std::str::FromStr; use borsh::{BorshDeserialize, BorshSerialize}; -use ed25519_dalek::ed25519::signature::Signature as DalekSignatureTrait; use ed25519_dalek::{ - PublicKey as DalekPublicKey, Signature as DalekSignature, PUBLIC_KEY_LENGTH, SIGNATURE_LENGTH, + Signature as DalekSignature, VerifyingKey as DalekPublicKey, PUBLIC_KEY_LENGTH, }; -use serde::de::Error; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; use crate::{SigVerificationError, Signature}; #[cfg(feature = "native")] pub mod private_key { - - use ed25519_dalek::{Keypair, SignatureError, Signer}; + use ed25519_dalek::{Signer, SigningKey, KEYPAIR_LENGTH, SECRET_KEY_LENGTH}; use rand::rngs::OsRng; use thiserror::Error; @@ -22,40 +19,52 @@ pub mod private_key { use crate::{Address, PrivateKey, PublicKey}; #[derive(Error, Debug)] - pub enum DefaultPrivateKeyHexDeserializationError { + pub enum DefaultPrivateKeyDeserializationError { #[error("Hex deserialization error")] FromHexError(#[from] hex::FromHexError), - #[error("PrivateKey deserialization error")] - PrivateKeyError(#[from] SignatureError), + #[error("KeyPairError deserialization error")] + KeyPairError(#[from] ed25519_dalek::SignatureError), + #[error("Invalid private key length: {actual}, expected {expected_1} or {expected_2}")] + InvalidPrivateKeyLength { + expected_1: usize, + expected_2: usize, + actual: usize, + }, } /// A private key for the default signature scheme. /// This struct also stores the corresponding public key. + #[derive(Clone, serde::Serialize, serde::Deserialize)] pub struct DefaultPrivateKey { - key_pair: Keypair, + key_pair: SigningKey, } - impl core::fmt::Debug for DefaultPrivateKey { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("DefaultPrivateKey") - .field("public_key", &self.key_pair.public) - .field("private_key", &"***REDACTED***") - .finish() + impl DefaultPrivateKey { + // This is private method and panics if input slice has incorrect length + fn try_from_keypair(value: &[u8]) -> Result { + let value: [u8; KEYPAIR_LENGTH] = value + .try_into() + .expect("incorrect usage of `try_from_keypair`, check input length"); + let key_pair = SigningKey::from_keypair_bytes(&value)?; + Ok(Self { key_pair }) } - } - impl serde::Serialize for DefaultPrivateKey { - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_bytes(&self.key_pair.to_bytes()) + // This is private method and panics if input slice has incorrect length + fn try_from_private_key(value: &[u8]) -> Self { + let value: [u8; SECRET_KEY_LENGTH] = value + .try_into() + .expect("incorrect usage of `try_from_private_key`, check input length"); + let key_pair = SigningKey::from_bytes(&value); + Self { key_pair } } } - impl<'de> serde::Deserialize<'de> for DefaultPrivateKey { - fn deserialize>(deserializer: D) -> Result { - use serde::de::Error; - let bytes = <&'de [u8] as serde::Deserialize>::deserialize(deserializer)?; - let key_pair = Keypair::from_bytes(bytes).map_err(D::Error::custom)?; - Ok(Self { key_pair }) + impl core::fmt::Debug for DefaultPrivateKey { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("DefaultPrivateKey") + .field("public_key", &self.key_pair.verifying_key()) + .field("private_key", &"***REDACTED***") + .finish() } } @@ -63,9 +72,20 @@ pub mod private_key { type Error = anyhow::Error; fn try_from(value: &[u8]) -> Result { - let key_pair = Keypair::from_bytes(value)?; - key_pair.secret.to_bytes(); - Ok(Self { key_pair }) + if value.len() == KEYPAIR_LENGTH { + Self::try_from_keypair(value).map_err(|e| e.into()) + } else if value.len() == SECRET_KEY_LENGTH { + Ok(Self::try_from_private_key(value)) + } else { + let err = Err( + DefaultPrivateKeyDeserializationError::InvalidPrivateKeyLength { + expected_1: SECRET_KEY_LENGTH, + expected_2: KEYPAIR_LENGTH, + actual: value.len(), + }, + ); + err.map_err(|e| e.into()) + } } } @@ -78,13 +98,13 @@ pub mod private_key { let mut csprng = OsRng; Self { - key_pair: Keypair::generate(&mut csprng), + key_pair: SigningKey::generate(&mut csprng), } } fn pub_key(&self) -> Self::PublicKey { DefaultPublicKey { - pub_key: self.key_pair.public, + pub_key: self.key_pair.verifying_key(), } } @@ -100,20 +120,56 @@ pub mod private_key { hex::encode(self.key_pair.to_bytes()) } - pub fn from_hex(hex: &str) -> Result { + pub fn from_hex(hex: &str) -> anyhow::Result { let bytes = hex::decode(hex)?; - Ok(Self { - key_pair: Keypair::from_bytes(&bytes)?, - }) + Self::try_from(&bytes[..]) } pub fn default_address(&self) -> Address { self.pub_key().to_address::
() } } + + #[cfg(feature = "arbitrary")] + impl<'a> arbitrary::Arbitrary<'a> for DefaultPrivateKey { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + use rand::rngs::StdRng; + use rand::SeedableRng; + + // it is important to generate the secret deterministically from the arbitrary argument + // so keys and signatures will be reproducible for a given seed. + // this unlocks fuzzy replay + let seed = <[u8; 32]>::arbitrary(u)?; + let rng = &mut StdRng::from_seed(seed); + let key_pair = SigningKey::generate(rng); + + Ok(Self { key_pair }) + } + } + + #[cfg(feature = "arbitrary")] + impl<'a> arbitrary::Arbitrary<'a> for DefaultPublicKey { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + DefaultPrivateKey::arbitrary(u).map(|p| p.pub_key()) + } + } + + #[cfg(feature = "arbitrary")] + impl<'a> arbitrary::Arbitrary<'a> for DefaultSignature { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + // the secret/public pair is lost; it is impossible to verify this signature + // to run a verification, generate the keys+payload individually + let payload_len = u.arbitrary_len::()?; + let payload = u.bytes(payload_len)?; + DefaultPrivateKey::arbitrary(u).map(|s| s.sign(payload)) + } + } } -#[cfg_attr(feature = "native", derive(schemars::JsonSchema))] +#[cfg_attr( + feature = "native", + derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) +)] #[derive(PartialEq, Eq, Clone, Debug)] pub struct DefaultPublicKey { #[cfg_attr( @@ -123,26 +179,9 @@ pub struct DefaultPublicKey { pub(crate) pub_key: DalekPublicKey, } -impl Serialize for DefaultPublicKey { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let s = self.pub_key.as_bytes(); - serializer.serialize_bytes(s) - } -} - -impl<'de> Deserialize<'de> for DefaultPublicKey { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let bytes = as serde::Deserialize>::deserialize(deserializer)?; - let dpk = DalekPublicKey::from_bytes(&bytes).or(Err(D::Error::custom( - "Couldn't convert bytes to ed25519 public key", - )))?; - Ok(DefaultPublicKey { pub_key: dpk }) +impl Hash for DefaultPublicKey { + fn hash(&self, state: &mut H) { + self.pub_key.as_bytes().hash(state); } } @@ -163,7 +202,10 @@ impl BorshSerialize for DefaultPublicKey { } } -#[cfg_attr(feature = "native", derive(schemars::JsonSchema))] +#[cfg_attr( + feature = "native", + derive(serde::Serialize, serde::Deserialize, schemars::JsonSchema) +)] #[derive(PartialEq, Eq, Debug, Clone)] pub struct DefaultSignature { #[cfg_attr( @@ -173,43 +215,20 @@ pub struct DefaultSignature { pub msg_sig: DalekSignature, } -impl Serialize for DefaultSignature { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - let s = self.msg_sig.as_bytes(); - serializer.serialize_bytes(s) - } -} - -impl<'de> Deserialize<'de> for DefaultSignature { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let bytes = as serde::Deserialize>::deserialize(deserializer)?; - let dsig = DalekSignature::from_bytes(&bytes).or(Err(D::Error::custom( - "Couldn't convert bytes to ed25519 signature", - )))?; - Ok(DefaultSignature { msg_sig: dsig }) - } -} - impl BorshDeserialize for DefaultSignature { fn deserialize_reader(reader: &mut R) -> std::io::Result { - let mut buffer = [0; SIGNATURE_LENGTH]; + let mut buffer = [0; DalekSignature::BYTE_SIZE]; reader.read_exact(&mut buffer)?; - let msg_sig = DalekSignature::from_bytes(&buffer).map_err(map_error)?; - - Ok(Self { msg_sig }) + Ok(Self { + msg_sig: DalekSignature::from_bytes(&buffer), + }) } } impl BorshSerialize for DefaultSignature { fn serialize(&self, writer: &mut W) -> std::io::Result<()> { - writer.write_all(self.msg_sig.as_bytes()) + writer.write_all(&self.msg_sig.to_bytes()) } } @@ -239,6 +258,11 @@ impl FromStr for DefaultPublicKey { fn from_str(s: &str) -> Result { let bytes = hex::decode(s)?; + + let bytes: [u8; PUBLIC_KEY_LENGTH] = bytes + .try_into() + .map_err(|_| anyhow::anyhow!("Invalid public key size"))?; + let pub_key = DalekPublicKey::from_bytes(&bytes) .map_err(|_| anyhow::anyhow!("Invalid public key"))?; Ok(DefaultPublicKey { pub_key }) @@ -251,21 +275,40 @@ impl FromStr for DefaultSignature { fn from_str(s: &str) -> Result { let bytes = hex::decode(s)?; - let msg_sig = - DalekSignature::from_bytes(&bytes).map_err(|_| anyhow::anyhow!("Invalid signature"))?; - Ok(DefaultSignature { msg_sig }) + + let bytes: ed25519_dalek::ed25519::SignatureBytes = bytes + .try_into() + .map_err(|_| anyhow::anyhow!("Invalid signature"))?; + + Ok(DefaultSignature { + msg_sig: DalekSignature::from_bytes(&bytes), + }) } } #[test] #[cfg(feature = "native")] -fn test_privatekey_serde() { +fn test_privatekey_serde_bincode() { use self::private_key::DefaultPrivateKey; use crate::PrivateKey; let key_pair = DefaultPrivateKey::generate(); let serialized = bincode::serialize(&key_pair).expect("Serialization to vec is infallible"); let output = bincode::deserialize::(&serialized) + .expect("SigningKey is serialized correctly"); + + assert_eq!(key_pair.as_hex(), output.as_hex()); +} + +#[test] +#[cfg(feature = "native")] +fn test_privatekey_serde_json() { + use self::private_key::DefaultPrivateKey; + use crate::PrivateKey; + + let key_pair = DefaultPrivateKey::generate(); + let serialized = serde_json::to_vec(&key_pair).expect("Serialization to vec is infallible"); + let output = serde_json::from_slice::(&serialized) .expect("Keypair is serialized correctly"); assert_eq!(key_pair.as_hex(), output.as_hex()); diff --git a/module-system/sov-modules-api/src/dispatch.rs b/module-system/sov-modules-api/src/dispatch.rs index 8432615c8..2131f5a9e 100644 --- a/module-system/sov-modules-api/src/dispatch.rs +++ b/module-system/sov-modules-api/src/dispatch.rs @@ -1,6 +1,6 @@ use sov_state::WorkingSet; -use crate::{CallResponse, Context, Error, Spec}; +use crate::{CallResponse, Context, Error, Module, Spec}; /// Methods from this trait should be called only once during the rollup deployment. pub trait Genesis { @@ -37,10 +37,18 @@ pub trait DispatchCall { fn module_address(&self, message: &Self::Decodable) -> &::Address; } +/// A trait that specifies how a runtime should encode the data for each module +pub trait EncodeCall { + /// The encoding function + fn encode_call(data: M::CallMessage) -> Vec; +} + /// A trait that needs to be implemented for a *runtime* to be used with the CLI wallet #[cfg(feature = "native")] pub trait CliWallet: DispatchCall { /// The type that is used to represent this type in the CLI. Typically, - /// this type implements the clap::Subcommand trait. - type CliStringRepr: Into<::Decodable>; + /// this type implements the clap::Subcommand trait. This type is generic to + /// allow for different representations of the same type in the interface; a + /// typical end-usage will impl traits only in the case where `CliStringRepr: Into::RuntimeCall` + type CliStringRepr; } diff --git a/module-system/sov-modules-api/src/hooks.rs b/module-system/sov-modules-api/src/hooks.rs index 4e1e55695..aedf68106 100644 --- a/module-system/sov-modules-api/src/hooks.rs +++ b/module-system/sov-modules-api/src/hooks.rs @@ -1,4 +1,5 @@ -use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::da::{BlobReaderTrait, DaSpec}; +use sov_rollup_interface::services::da::SlotData; use sov_state::WorkingSet; use crate::transaction::Transaction; @@ -30,7 +31,7 @@ pub trait TxHooks { /// Hooks related to the Sequencer functionality. /// In essence, the sequencer locks a bond at the beginning of the `StateTransitionFunction::apply_blob`, /// and is rewarded once a blob of transactions is processed. -pub trait ApplyBlobHooks { +pub trait ApplyBlobHooks { type Context: Context; type BlobResult; @@ -38,7 +39,7 @@ pub trait ApplyBlobHooks { /// If this hook returns Err, batch is not applied fn begin_blob_hook( &self, - blob: &mut impl BlobReaderTrait, + blob: &mut B, working_set: &mut WorkingSet<::Storage>, ) -> anyhow::Result<()>; @@ -50,3 +51,20 @@ pub trait ApplyBlobHooks { working_set: &mut WorkingSet<::Storage>, ) -> anyhow::Result<()>; } + +/// Hooks that execute during the `StateTransitionFunction::begin_slot` and `end_slot` functions. +pub trait SlotHooks { + type Context: Context; + + fn begin_slot_hook( + &self, + slot_data: &impl SlotData, + working_set: &mut WorkingSet<::Storage>, + ); + + fn end_slot_hook( + &self, + root_hash: [u8; 32], + working_set: &mut WorkingSet<::Storage>, + ); +} diff --git a/module-system/sov-modules-api/src/lib.rs b/module-system/sov-modules-api/src/lib.rs index 806235ccf..a62a281da 100644 --- a/module-system/sov-modules-api/src/lib.rs +++ b/module-system/sov-modules-api/src/lib.rs @@ -1,6 +1,9 @@ -#![feature(associated_type_defaults)] +#![doc = include_str!("../README.md")] mod bech32; +pub mod capabilities; +#[cfg(feature = "native")] +pub mod cli; pub mod default_context; pub mod default_signature; mod dispatch; @@ -10,7 +13,6 @@ pub mod hooks; mod prefix; mod response; mod serde_address; -pub mod test_utils; #[cfg(test)] mod tests; pub mod transaction; @@ -39,6 +41,7 @@ pub mod macros { use core::fmt::{self, Debug, Display}; use std::collections::{HashMap, HashSet}; +use std::hash::Hash; use std::str::FromStr; use borsh::{BorshDeserialize, BorshSerialize}; @@ -46,12 +49,12 @@ use borsh::{BorshDeserialize, BorshSerialize}; pub use clap; #[cfg(feature = "native")] pub use dispatch::CliWallet; -pub use dispatch::{DispatchCall, Genesis}; +pub use dispatch::{DispatchCall, EncodeCall, Genesis}; pub use error::Error; pub use prefix::Prefix; pub use response::CallResponse; use serde::{Deserialize, Serialize}; -pub use sov_rollup_interface::{digest, AddressTrait}; +pub use sov_rollup_interface::{digest, BasicAddress, RollupAddress}; use sov_state::{Storage, Witness, WorkingSet}; use thiserror::Error; @@ -63,10 +66,12 @@ impl AsRef<[u8]> for Address { } } -impl AddressTrait for Address {} +impl BasicAddress for Address {} +impl RollupAddress for Address {} #[cfg_attr(feature = "native", derive(schemars::JsonSchema))] -#[derive(PartialEq, Clone, Eq, borsh::BorshDeserialize, borsh::BorshSerialize, Hash)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[derive(PartialEq, Clone, Copy, Eq, borsh::BorshDeserialize, borsh::BorshSerialize, Hash)] pub struct Address { addr: [u8; 32], } @@ -135,11 +140,12 @@ pub trait Signature { /// A type that can't be instantiated. #[derive(Debug, PartialEq, Serialize, Deserialize)] +#[cfg_attr(feature = "native", derive(schemars::JsonSchema))] pub enum NonInstantiable {} /// PublicKey used in the Module System. pub trait PublicKey { - fn to_address(&self) -> A; + fn to_address(&self) -> A; } /// A PrivateKey used in the Module System. @@ -150,7 +156,7 @@ pub trait PrivateKey { fn generate() -> Self; fn pub_key(&self) -> Self::PublicKey; fn sign(&self, msg: &[u8]) -> Self::Signature; - fn to_address(&self) -> A { + fn to_address(&self) -> A { self.pub_key().to_address::() } } @@ -167,7 +173,7 @@ pub trait PrivateKey { pub trait Spec { /// The Address type used on the rollup. Typically calculated as the hash of a public key. #[cfg(feature = "native")] - type Address: AddressTrait + type Address: RollupAddress + BorshSerialize + BorshDeserialize + Sync @@ -180,7 +186,7 @@ pub trait Spec { /// The Address type used on the rollup. Typically calculated as the hash of a public key. #[cfg(not(feature = "native"))] - type Address: AddressTrait + BorshSerialize + BorshDeserialize; + type Address: RollupAddress + BorshSerialize + BorshDeserialize; /// Authenticated state storage used by the rollup. Typically some variant of a merkle-patricia trie. type Storage: Storage + Clone + Send + Sync; @@ -190,6 +196,7 @@ pub trait Spec { type PublicKey: borsh::BorshDeserialize + borsh::BorshSerialize + Eq + + Hash + Clone + Debug + PublicKey @@ -214,6 +221,7 @@ pub trait Spec { type PublicKey: borsh::BorshDeserialize + borsh::BorshSerialize + Eq + + Hash + Clone + Debug + Send @@ -295,7 +303,7 @@ pub trait Module { type Config; /// Module defined argument to the call method. - type CallMessage: Debug + BorshSerialize + BorshDeserialize = NonInstantiable; + type CallMessage: Debug + BorshSerialize + BorshDeserialize; /// Genesis is called when a rollup is deployed and can be used to set initial state values in the module. fn genesis( diff --git a/module-system/sov-modules-api/src/response.rs b/module-system/sov-modules-api/src/response.rs index 1780b9fbb..19c183784 100644 --- a/module-system/sov-modules-api/src/response.rs +++ b/module-system/sov-modules-api/src/response.rs @@ -1,3 +1,3 @@ /// Response type for the `Module::call` method. -#[derive(Default)] +#[derive(Default, Debug)] pub struct CallResponse {} diff --git a/module-system/sov-modules-api/src/test_utils.rs b/module-system/sov-modules-api/src/test_utils.rs deleted file mode 100644 index 199ef0c5e..000000000 --- a/module-system/sov-modules-api/src/test_utils.rs +++ /dev/null @@ -1,6 +0,0 @@ -use crate::{Context, Digest, Spec}; - -pub fn generate_address(key: &str) -> ::Address { - let hash: [u8; 32] = ::Hasher::digest(key.as_bytes()).into(); - C::Address::from(hash) -} diff --git a/module-system/sov-modules-api/src/tests.rs b/module-system/sov-modules-api/src/tests.rs index 3b1517783..84085f5c0 100644 --- a/module-system/sov-modules-api/src/tests.rs +++ b/module-system/sov-modules-api/src/tests.rs @@ -71,11 +71,11 @@ fn test_sorting_modules() { }; let module_b = Module { address: Address::from([2; 32]), - dependencies: vec![module_a.address.clone()], + dependencies: vec![module_a.address], }; let module_c = Module { address: Address::from([3; 32]), - dependencies: vec![module_a.address.clone(), module_b.address.clone()], + dependencies: vec![module_a.address, module_b.address], }; let modules: Vec<(&dyn ModuleInfo, i32)> = @@ -91,11 +91,11 @@ fn test_sorting_modules_missing_module() { let module_a_address = Address::from([1; 32]); let module_b = Module { address: Address::from([2; 32]), - dependencies: vec![module_a_address.clone()], + dependencies: vec![module_a_address], }; let module_c = Module { address: Address::from([3; 32]), - dependencies: vec![module_a_address, module_b.address.clone()], + dependencies: vec![module_a_address, module_b.address], }; let modules: Vec<(&dyn ModuleInfo, i32)> = @@ -117,15 +117,15 @@ fn test_sorting_modules_cycle() { }; let module_b = Module { address: Address::from([2; 32]), - dependencies: vec![module_a.address.clone()], + dependencies: vec![module_a.address], }; let module_d = Module { address: Address::from([4; 32]), - dependencies: vec![module_e_address.clone()], + dependencies: vec![module_e_address], }; let module_e = Module { address: module_e_address, - dependencies: vec![module_a.address.clone(), module_d.address.clone()], + dependencies: vec![module_a.address, module_d.address], }; let modules: Vec<(&dyn ModuleInfo, i32)> = vec![ @@ -150,7 +150,7 @@ fn test_sorting_modules_duplicate() { }; let module_b = Module { address: Address::from([2; 32]), - dependencies: vec![module_a.address.clone()], + dependencies: vec![module_a.address], }; let module_a2 = Module { address: Address::from([1; 32]), diff --git a/module-system/sov-modules-api/src/transaction.rs b/module-system/sov-modules-api/src/transaction.rs index 011a0d501..b4999e2d1 100644 --- a/module-system/sov-modules-api/src/transaction.rs +++ b/module-system/sov-modules-api/src/transaction.rs @@ -1,10 +1,9 @@ +#[cfg(all(target_os = "zkvm", feature = "bench"))] +use zk_cycle_macros::cycle_tracker; + #[cfg(feature = "native")] -use crate::default_context::DefaultContext; -#[cfg(feature = "native")] -use crate::default_signature::private_key::DefaultPrivateKey; +use crate::PrivateKey; use crate::{Context, Signature}; -#[cfg(feature = "native")] -use crate::{PrivateKey, Spec}; /// A Transaction object that is compatible with the module-system/sov-default-stf. #[derive(Debug, PartialEq, Eq, Clone, borsh::BorshDeserialize, borsh::BorshSerialize)] @@ -33,6 +32,7 @@ impl Transaction { } /// Check whether the transaction has been signed correctly. + #[cfg_attr(all(target_os = "zkvm", feature = "bench"), cycle_tracker)] pub fn verify(&self) -> anyhow::Result<()> { let mut serialized_tx = Vec::with_capacity(self.runtime_msg().len() + std::mem::size_of::()); @@ -45,19 +45,19 @@ impl Transaction { } #[cfg(feature = "native")] -impl Transaction { +impl Transaction { /// New signed transaction. - pub fn new_signed_tx(priv_key: &DefaultPrivateKey, mut message: Vec, nonce: u64) -> Self { + pub fn new_signed_tx(priv_key: &C::PrivateKey, mut message: Vec, nonce: u64) -> Self { // Since we own the message already, try to add the serialized nonce in-place. // This lets us avoid a copy if the message vec has at least 8 bytes of extra capacity. - let orignal_length = message.len(); + let original_length = message.len(); message.extend_from_slice(&nonce.to_le_bytes()); let pub_key = priv_key.pub_key(); let signature = priv_key.sign(&message); // Don't forget to truncate the message back to its original length! - message.truncate(orignal_length); + message.truncate(original_length); Self { signature, @@ -69,9 +69,9 @@ impl Transaction { /// New transaction. pub fn new( - pub_key: ::PublicKey, + pub_key: C::PublicKey, message: Vec, - signature: ::Signature, + signature: C::Signature, nonce: u64, ) -> Self { Self { diff --git a/module-system/sov-modules-api/src/utils.rs b/module-system/sov-modules-api/src/utils.rs index bb5d969f2..003e4a858 100644 --- a/module-system/sov-modules-api/src/utils.rs +++ b/module-system/sov-modules-api/src/utils.rs @@ -1,10 +1,8 @@ -use jsonrpsee::types::ErrorObjectOwned; +pub use sov_sequencer::utils::to_jsonrpsee_error_object; -/// Creates an jsonrpsee ErrorObject -pub fn to_jsonrpsee_error_object(err: impl ToString, message: &str) -> ErrorObjectOwned { - ErrorObjectOwned::owned( - jsonrpsee::types::error::UNKNOWN_ERROR_CODE, - message, - Some(err.to_string()), - ) +use crate::{Context, Digest, Spec}; + +pub fn generate_address(key: &str) -> ::Address { + let hash: [u8; 32] = ::Hasher::digest(key.as_bytes()).into(); + C::Address::from(hash) } diff --git a/module-system/sov-modules-macros/Cargo.toml b/module-system/sov-modules-macros/Cargo.toml index f75d180ad..658c03c7e 100644 --- a/module-system/sov-modules-macros/Cargo.toml +++ b/module-system/sov-modules-macros/Cargo.toml @@ -20,15 +20,18 @@ name = "tests" path = "tests/all_tests.rs" [dev-dependencies] -serde_json = "1" -jsonrpsee = { workspace = true, features = ["macros", "http-client", "server"]} + +clap = { workspace = true } +jsonrpsee = { workspace = true, features = ["macros", "http-client", "server"] } +serde = { workspace = true } +serde_json = { workspace = true } +tempfile = { workspace = true } trybuild = "1.0" -sov-modules-api = { path = "../sov-modules-api", version = "0.1", default-features = false } -sov-state = { path = "../sov-state", version = "0.1", default-features = false } +sov-modules-api = { path = "../sov-modules-api", version = "0.1" } +sov-state = { path = "../sov-state", version = "0.1" } sov-bank = { path = "../module-implementations/sov-bank", version = "0.1", features = ["native"] } -serde = { workspace = true } -clap = { workspace = true } +sov-modules-macros = { path = ".", features = ["native"] } [dependencies] anyhow = { workspace = true } @@ -41,4 +44,4 @@ syn = { version = "1.0", features = ["full"] } [features] default = [] -native = ["dep:jsonrpsee"] +native = ["jsonrpsee"] diff --git a/module-system/sov-modules-macros/src/cli_parser.rs b/module-system/sov-modules-macros/src/cli_parser.rs index 320a86d10..100ca6a2c 100644 --- a/module-system/sov-modules-macros/src/cli_parser.rs +++ b/module-system/sov-modules-macros/src/cli_parser.rs @@ -1,7 +1,7 @@ use quote::{format_ident, quote}; -use syn::{Data, DataEnum, DeriveInput, Fields, Ident, Type}; +use syn::{Data, DataEnum, DeriveInput, Fields, Ident}; -use crate::common::{extract_ident, StructFieldExtractor}; +use crate::common::StructFieldExtractor; pub(crate) struct CliParserMacro { field_extractor: StructFieldExtractor, @@ -28,11 +28,11 @@ impl CliParserMacro { let (_, ty_generics, _) = generics.split_for_impl(); - let mut module_command_arms = vec![]; - // let mut module_args = vec![]; - let mut match_arms = vec![]; - let mut parse_match_arms = vec![]; - let mut convert_match_arms = vec![]; + let mut module_json_parser_arms = vec![]; + let mut module_message_arms = vec![]; + let mut try_from_subcommand_match_arms = vec![]; + let mut try_map_match_arms = vec![]; + let mut from_json_match_arms = vec![]; let mut deserialize_constraints: Vec = vec![]; // Loop over the fields @@ -50,43 +50,38 @@ impl CliParserMacro { let module_path = type_path.path.clone(); let field_name = field.ident.clone(); let doc_str = format!("Generates a transaction for the `{}` module", &field_name); - module_command_arms.push(quote! { - #[clap(subcommand)] - #[doc = #doc_str] - #field_name(<<#module_path as ::sov_modules_api::Module>::CallMessage as ::sov_modules_api::CliWalletArg>::CliStringRepr) - }); - let field_name_string = field_name.to_string(); - let encode_function_name = format_ident!("encode_{}_call", field_name_string); + module_json_parser_arms.push(quote! { + #[doc = #doc_str] + #field_name { + #[clap(flatten)] + contents: __Inner + } + }); - let type_name_string = match &field.ty { - Type::Path(type_path) => extract_ident(type_path).to_string(), - _ => { - return Err(syn::Error::new_spanned( - field.ident.clone(), - "expected a type path", - )) + module_message_arms.push(quote! { + #[doc = #doc_str] + #field_name { + contents: __Inner } - }; + }); - // Build the `match` arm for the CLI's `clap` parse function - parse_match_arms.push(quote! { - CliTransactionParser::#field_name(mod_args) => { - let command_as_call_message: <#module_path as ::sov_modules_api::Module>::CallMessage = mod_args.into(); - #ident:: #ty_generics ::#encode_function_name( - command_as_call_message + from_json_match_arms.push(quote! { + RuntimeMessage::#field_name{ contents } => { + ::serde_json::from_str::<<#module_path as ::sov_modules_api::Module>::CallMessage>(&contents.json).map( + // Use the enum variant as a constructor + <#ident #ty_generics as ::sov_modules_api::DispatchCall>::Decodable:: #field_name ) }, }); - convert_match_arms.push(quote! { - CliTransactionParser::#field_name(mod_args) => { - let command_as_call_message: <#module_path as ::sov_modules_api::Module>::CallMessage = mod_args.into(); - <#ident #ty_generics as ::sov_modules_api::DispatchCall>::Decodable:: #field_name( - command_as_call_message - ) - }, - }); + try_map_match_arms.push(quote! { + RuntimeMessage::#field_name { contents } => RuntimeMessage::#field_name { contents: contents.try_into()? }, + }); + + try_from_subcommand_match_arms.push(quote! { + RuntimeSubcommand::#field_name { contents } => RuntimeMessage::#field_name { contents: contents.try_into()? }, + }); // Build a constraint requiring that all call messages support serde deserialization let deserialization_constraint = { @@ -101,19 +96,11 @@ impl CliParserMacro { }) }; deserialize_constraints.push(deserialization_constraint); - - // Build the `match` arms for the CLI's json parser - match_arms.push(quote! { - #type_name_string => Ok({ - #ident:: #ty_generics ::#encode_function_name( - ::serde_json::from_str::<<#module_path as ::sov_modules_api::Module>::CallMessage>(&call_data)? - ) - }), - }); } } let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + let where_clause_with_deserialize_bounds = match where_clause { Some(where_clause) => { let mut result = where_clause.clone(); @@ -126,46 +113,122 @@ impl CliParserMacro { where #(#deserialize_constraints),* }, }; + + // The generics from the `runtime`, with an additional `__Inner` generic + // which holds the clap arguments. + let generics_with_inner = { + let mut generics = generics.clone(); + generics.params.insert(0, syn::parse_quote! {__Inner }); + generics.where_clause = match generics.where_clause { + Some(where_clause) => { + let mut result = where_clause; + result + .predicates + .push(syn::parse_quote! { __Inner: ::clap::Args }); + Some(result) + } + None => syn::parse_quote! { + where __Inner: ::clap::Args + }, + }; + generics + }; + let (impl_generics_with_inner, ty_generics_with_inner, where_clause_with_inner_as_clap) = + generics_with_inner.split_for_impl(); + + // Generics identical to generics_with_inner, but with the `__Inner` type renamed to `__Dest`. + // This type is used in the the try_map conversion + let generics_for_dest = { + let mut generics = generics.clone(); + generics.params.insert(0, syn::parse_quote! {__Dest}); + generics + }; + let (_, ty_generics_for_dest, _) = generics_for_dest.split_for_impl(); + + let generics_with_inner_and_dest = { + let mut generics = generics_with_inner.clone(); + generics.params.insert(0, syn::parse_quote! {__Dest}); + if let Some(c) = generics.where_clause.as_mut() { + c.predicates + .push(syn::parse_quote! { __Dest: ::core::convert::TryFrom<__Inner> }) + } + generics + }; + let (impl_generics_with_inner_and_dest, _, where_clause_with_inner_clap_and_try_from) = + generics_with_inner_and_dest.split_for_impl(); + + // Generics identical to `generics_with_inner`, with the `__Inner` type bound to `JsonStringArg` + let generics_for_json = { + let mut generics = generics.clone(); + generics + .params + .insert(0, syn::parse_quote! {__JsonStringArg}); + generics + }; + let (_impl_generics_for_json, ty_generics_for_json, _) = generics_for_json.split_for_impl(); + // Merge and generate the new code let expanded = quote! { - /// Parse a transaction from command-line arguments + + + /// An enum expressing the subcommands available to this runtime. Contains + /// one subcommand for each module, except modules annotated with the #[cli_skip] attribute #[derive(::clap::Parser)] #[allow(non_camel_case_types)] - pub enum CliTransactionParser #impl_generics #where_clause { - #( #module_command_arms, )* + pub enum RuntimeSubcommand #impl_generics_with_inner #where_clause_with_inner_as_clap { + #( #module_json_parser_arms, )* + #[clap(skip)] + #[doc(hidden)] + ____phantom(::std::marker::PhantomData<#ident #ty_generics>) } - /// Borsh encode a transaction parsed from the CLI - pub fn borsh_encode_cli_tx #impl_generics (cmd: CliTransactionParser #ty_generics) -> ::std::vec::Vec - #where_clause { - use ::borsh::BorshSerialize as _; - match cmd { - #(#parse_match_arms)* - _ => panic!("unknown module name"), - } + impl #impl_generics_with_inner ::sov_modules_api::cli::CliFrontEnd<#ident #ty_generics> for RuntimeSubcommand #ty_generics_with_inner #where_clause_with_deserialize_bounds, __Inner: ::clap::Args { + type CliIntermediateRepr<__Dest> = RuntimeMessage #ty_generics_for_dest; } - impl #impl_generics From for <#ident #ty_generics as ::sov_modules_api::DispatchCall>::Decodable #where_clause { - fn from(cmd: CliTransactionParser #ty_generics) -> Self { - match cmd { - #(#convert_match_arms)* + /// An intermediate enum between the RuntimeSubcommand (which must implement `clap`) and the + /// final RT::Decodable type. Like the RuntimeSubcommand, this type contains one variant for each cli-enabled module. + #[allow(non_camel_case_types)] + pub enum RuntimeMessage #impl_generics_with_inner #where_clause { + #( #module_message_arms, )* + #[doc(hidden)] + ____phantom(::std::marker::PhantomData<#ident #ty_generics>) + } + + use ::sov_modules_api::cli::JsonStringArg as __JsonStringArg; + // Implement TryFrom> for the runtime's call message. Uses serde_json to deserialize the json string. + impl #impl_generics ::core::convert::TryFrom for <#ident #ty_generics as ::sov_modules_api::DispatchCall>::Decodable #where_clause_with_deserialize_bounds { + type Error = ::serde_json::Error; + fn try_from(item: RuntimeMessage #ty_generics_for_json ) -> Result { + match item { + #( #from_json_match_arms )* + RuntimeMessage::____phantom(_) => unreachable!(), } } } - impl #impl_generics sov_modules_api::CliWallet for #ident #ty_generics #where_clause { - type CliStringRepr = CliTransactionParser #ty_generics; + // Allow arbitrary conversions from the `clap`-enabled `RuntimeSubcommand` to the less constrained `RuntimeMessage` enum. + // This allows us to (for example), accept a `JsonStringArgs` or a `FileNameArgs` as a CLI argument, and then + // use fallible logic to convert it into the final JSON string to be parsed into a callmessage. + impl #impl_generics_with_inner_and_dest ::core::convert::TryFrom for RuntimeMessage #ty_generics_for_dest #where_clause_with_inner_clap_and_try_from { + type Error = <__Dest as ::core::convert::TryFrom<__Inner>>::Error; + /// Convert a `RuntimeSubcommand` to a `RuntimeSubcommand` with a different `__Inner` type using `try_from`. + /// + /// This method is called `try_map` instead of `try_from` to avoid conflicting with the `TryFrom` trait in + /// the corner case where the source and destination types are the same. + fn try_from(item: RuntimeSubcommand #ty_generics_with_inner ) -> Result + { + Ok(match item { + #( #try_from_subcommand_match_arms )* + RuntimeSubcommand::____phantom(_) => unreachable!(), + }) + } } - /// Attempts to parse the provided call data as a [`sov_modules_api::Module::CallMessage`] for the given module. - pub fn parse_call_message_json #impl_generics (module_name: &str, call_data: &str) -> ::anyhow::Result> - #where_clause_with_deserialize_bounds - { - match module_name { - #(#match_arms)* - _ => panic!("unknown module name"), - } + impl #impl_generics ::sov_modules_api::CliWallet for #ident #ty_generics #where_clause_with_deserialize_bounds { + type CliStringRepr<__Inner> = RuntimeMessage #ty_generics_with_inner; } + }; Ok(expanded.into()) } diff --git a/module-system/sov-modules-macros/src/common.rs b/module-system/sov-modules-macros/src/common.rs index 07223876c..f87b01cc6 100644 --- a/module-system/sov-modules-macros/src/common.rs +++ b/module-system/sov-modules-macros/src/common.rs @@ -1,13 +1,7 @@ -use std::collections::HashMap; - use proc_macro2::{Ident, Span, TokenStream}; use quote::{format_ident, ToTokens}; -use syn::punctuated::Punctuated; use syn::spanned::Spanned; -use syn::{ - DataStruct, Fields, GenericParam, Generics, ImplGenerics, Meta, PathArguments, PathSegment, - TypeGenerics, TypeParamBound, TypePath, WhereClause, WherePredicate, -}; +use syn::{DataStruct, Fields, GenericParam, ImplGenerics, Meta, TypeGenerics}; #[derive(Clone)] pub(crate) struct StructNamedField { @@ -144,7 +138,7 @@ pub(crate) struct StructDef<'a> { pub(crate) type_generics: TypeGenerics<'a>, pub(crate) generic_param: &'a Ident, pub(crate) fields: Vec, - pub(crate) where_clause: Option<&'a WhereClause>, + pub(crate) where_clause: Option<&'a syn::WhereClause>, } impl<'a> StructDef<'a> { @@ -154,7 +148,7 @@ impl<'a> StructDef<'a> { impl_generics: ImplGenerics<'a>, type_generics: TypeGenerics<'a>, generic_param: &'a Ident, - where_clause: Option<&'a WhereClause>, + where_clause: Option<&'a syn::WhereClause>, ) -> Self { Self { ident, @@ -192,9 +186,9 @@ impl<'a> StructDef<'a> { } } -/// Gets the type parameter's identifier from [`syn::Generics`]. +/// Gets the first type parameter's identifier from [`syn::Generics`]. pub(crate) fn get_generics_type_param( - generics: &Generics, + generics: &syn::Generics, error_span: Span, ) -> Result { let generic_param = match generics @@ -206,13 +200,13 @@ pub(crate) fn get_generics_type_param( GenericParam::Lifetime(lf) => { return Err(syn::Error::new_spanned( lf, - "Lifetime parameters not supported.", + "Lifetime parameters are not supported.", )) } GenericParam::Const(cnst) => { return Err(syn::Error::new_spanned( cnst, - "Const parameters not supported.", + "Const parameters are not supported.", )) } }; @@ -287,208 +281,40 @@ pub(crate) fn get_serialization_attrs( Ok(serialization_attrs) } -/// Extract a mapping from generic types to their associated trait bounds, including -/// the ones from the where clause. -/// -/// For example, given the following struct: -/// ```rust,ignore -/// use sov_modules_macros::common::GenericTypesWithBounds; -/// let test_struct: syn::ItemStruct = syn::parse_quote! { -/// struct TestStruct where T: SomeOtherTrait { -/// field: T -/// } -/// }; -/// // We want to extract both the inline bounds, and the bounds from the where clause... -/// // so that the generics from above definition are equivalent what we would have gotten -/// // from writing `T: SomeTrait + SomeOtherTrait` inline -/// let desired_bounds_for_t: syn::TypeParam = syn::parse_quote!(T: SomeTrait + SomeThirdTrait); -/// -/// // That is exactly what `GenericTypesWithBounds` does -/// let our_bounds = extract_generic_type_bounds(&test_struct.generics); -/// assert_eq!(our_bounds.get(T), Some(&desired_bounds_for_t.bounds)); -/// ``` -/// -#[cfg_attr(not(feature = "native"), allow(unused))] -pub(crate) fn extract_generic_type_bounds( - generics: &Generics, -) -> HashMap> { - let mut generics_with_bounds: HashMap<_, _> = Default::default(); - // Collect the inline bounds from each generic param - for param in generics.params.iter() { - if let GenericParam::Type(ty) = param { - let path_segment = PathSegment { - ident: ty.ident.clone(), - arguments: syn::PathArguments::None, - }; - let path = syn::Path { - leading_colon: None, - segments: Punctuated::from_iter(vec![path_segment]), - }; - let type_path = syn::TypePath { qself: None, path }; - generics_with_bounds.insert(type_path, ty.bounds.clone()); - } - } - - // Iterate over the bounds in the `where_clause` and add them to the map - if let Some(where_clause) = &generics.where_clause { - for predicate in &where_clause.predicates { - // We can ignore lifetimes and "Eq" predicates since they don't add any trait bounds - // so just match on `Type` predicates - if let WherePredicate::Type(predicate_type) = predicate { - // If the bounded type is a regular type path, we need to extract the bounds and add them to the map. - // For now, we ignore more exotic bounds `[T; N]: SomeTrait`. - if let syn::Type::Path(type_path) = &predicate_type.bounded_ty { - match generics_with_bounds.entry(type_path.clone()) { - std::collections::hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().extend(predicate_type.bounds.clone()) - } - std::collections::hash_map::Entry::Vacant(entry) => { - entry.insert(predicate_type.bounds.clone()); - } - } - } - } - } - } - generics_with_bounds -} - -/// Extract the type ident from a `TypePath`. -#[cfg_attr(not(feature = "native"), allow(unused))] -pub fn extract_ident(type_path: &syn::TypePath) -> &Ident { - &type_path - .path - .segments - .last() - .expect("Type path must have at least one segment") - .ident -} - -/// Build the generics for a field based on the generics of the outer struct. -/// For example, given the following struct: -/// ```rust,ignore -/// struct MyStruct { -/// field1: PhantomData, -/// field2: Vec -/// } -/// ``` -/// -/// This function will return a `syn::Generics` corresponding to `` when -/// invoked on the PathArguments for field1. -#[cfg_attr(not(feature = "native"), allow(unused))] -pub(crate) fn generics_for_field( - outer_generics: &Generics, - field_generic_types: &PathArguments, -) -> Generics { - let generic_bounds = extract_generic_type_bounds(outer_generics); - match field_generic_types { - PathArguments::AngleBracketed(angle_bracketed_data) => { - let mut args_with_bounds = Punctuated::::new(); - for generic_arg in &angle_bracketed_data.args { - if let syn::GenericArgument::Type(syn::Type::Path(type_path)) = generic_arg { - let ident = extract_ident(type_path); - let bounds = generic_bounds.get(type_path).cloned().unwrap_or_default(); - - // Construct a "type param" with the appropriate bounds. This corresponds to a syntax - // tree like `T: Trait1 + Trait2` - let generic_type_param_with_bounds = syn::TypeParam { - attrs: Vec::new(), - ident: ident.clone(), - colon_token: Some(syn::token::Colon { - spans: [type_path.span()], - }), - bounds: bounds.clone(), - eq_token: None, - default: None, - }; - args_with_bounds.push(GenericParam::Type(generic_type_param_with_bounds)) - } - } - // Construct a `Generics` struct with the generic type parameters and their bounds. - // This corresponds to a syntax tree like `` - syn::Generics { - lt_token: Some(syn::token::Lt { - spans: [field_generic_types.span()], - }), - params: args_with_bounds, - gt_token: Some(syn::token::Gt { - spans: [field_generic_types.span()], - }), - where_clause: None, - } - } - // We don't need to do anything if the generic type parameters are not angle bracketed - _ => Default::default(), - } -} - #[cfg(test)] mod tests { - use syn::parse_quote; - - use crate::common::extract_generic_type_bounds; + use super::*; #[test] - fn test_generic_types_with_bounds() { - let test_struct: syn::ItemStruct = syn::parse_quote! { - struct TestStruct where T: SomeThirdTrait { - field: (T, U, V) - } + fn get_generic_type_param_success() { + // tests for get_generics_type_param + let generics = syn::parse_quote! { + }; - let generics = test_struct.generics; - let our_bounds = extract_generic_type_bounds(&generics); - let expected_bounds_for_t: syn::TypeParam = - syn::parse_quote!(T: SomeTrait + SomeThirdTrait); - let expected_bounds_for_u: syn::TypeParam = syn::parse_quote!(U: SomeOtherTrait); - - assert_eq!( - our_bounds.get(&parse_quote!(T)), - Some(&expected_bounds_for_t.bounds) - ); - assert_eq!( - our_bounds.get(&parse_quote!(U)), - Some(&expected_bounds_for_u.bounds) - ); - assert_eq!( - our_bounds.get(&parse_quote!(V)), - Some(&syn::punctuated::Punctuated::new()) - ); + + let generic_param = get_generics_type_param(&generics, Span::call_site()).unwrap(); + assert_eq!(generic_param, "T"); } #[test] - fn test_generic_types_with_associated_type_bounds() { - let test_struct: syn::ItemStruct = syn::parse_quote! { - struct TestStruct where T::Error: Debug { - field: (T, U, V) - } + fn get_generic_type_param_first_lifetime() { + let generics = syn::parse_quote! { + <'a, T: Trait> }; - let generics = test_struct.generics; - let our_bounds = extract_generic_type_bounds(&generics); - let expected_bounds_for_t: syn::TypeParam = syn::parse_quote!(T: SomeTrait); - let expected_bounds_for_t_error: syn::WherePredicate = syn::parse_quote!(T::Error: Debug); - if let syn::WherePredicate::Type(expected_bounds_for_t_error) = expected_bounds_for_t_error - { - assert_eq!( - our_bounds.get(&parse_quote!(T::Error)), - Some(&expected_bounds_for_t_error.bounds) - ); - } else { - unreachable!("Expected a type predicate") + let generic_param = get_generics_type_param(&generics, Span::call_site()); + let error = generic_param.unwrap_err(); + assert_eq!(error.to_string(), "Lifetime parameters are not supported."); + } + + #[test] + fn get_generic_type_param_first_const() { + // error test case for get_generics_type_param when first generic param is const + let generics = syn::parse_quote! { + }; - let expected_bounds_for_u: syn::TypeParam = syn::parse_quote!(U: SomeOtherTrait); - - assert_eq!( - our_bounds.get(&parse_quote!(T)), - Some(&expected_bounds_for_t.bounds) - ); - - assert_eq!( - our_bounds.get(&parse_quote!(U)), - Some(&expected_bounds_for_u.bounds) - ); - assert_eq!( - our_bounds.get(&parse_quote!(V)), - Some(&syn::punctuated::Punctuated::new()) - ); + let generic_param = get_generics_type_param(&generics, Span::call_site()); + + let error = generic_param.unwrap_err(); + assert_eq!(error.to_string(), "Const parameters are not supported."); } } diff --git a/module-system/sov-modules-macros/src/dispatch/dispatch_call.rs b/module-system/sov-modules-macros/src/dispatch/dispatch_call.rs index 5da10e0a1..26234a18a 100644 --- a/module-system/sov-modules-macros/src/dispatch/dispatch_call.rs +++ b/module-system/sov-modules-macros/src/dispatch/dispatch_call.rs @@ -14,7 +14,8 @@ impl<'a> StructDef<'a> { let ty = &field.ty; quote::quote!( - #name(<#ty as sov_modules_api::Module>::CallMessage), + #[doc = "Module call message."] + #name(<#ty as ::sov_modules_api::Module>::CallMessage), ) }) .collect() @@ -29,7 +30,7 @@ impl<'a> StructDef<'a> { quote::quote!( #enum_ident::#name(message)=>{ - sov_modules_api::Module::call(&self.#name, message, context, working_set) + ::sov_modules_api::Module::call(&self.#name, message, context, working_set) }, ) }); @@ -40,7 +41,7 @@ impl<'a> StructDef<'a> { quote::quote!( #enum_ident::#name(message)=>{ - <#ty as sov_modules_api::ModuleInfo>::address(&self.#name) + <#ty as ::sov_modules_api::ModuleInfo>::address(&self.#name) }, ) }); @@ -53,22 +54,21 @@ impl<'a> StructDef<'a> { let call_enum = self.enum_ident(CALL); quote::quote! { - impl #impl_generics sov_modules_api::DispatchCall for #ident #type_generics #where_clause { + impl #impl_generics ::sov_modules_api::DispatchCall for #ident #type_generics #where_clause { type Context = #generic_param; type Decodable = #call_enum #ty_generics; - - fn decode_call(serialized_message: &[u8]) -> core::result::Result { - let mut data = std::io::Cursor::new(serialized_message); + fn decode_call(serialized_message: &[u8]) -> ::core::result::Result { + let mut data = ::std::io::Cursor::new(serialized_message); <#call_enum #ty_generics as ::borsh::BorshDeserialize>::deserialize_reader(&mut data) } fn dispatch_call( &self, decodable: Self::Decodable, - working_set: &mut sov_state::WorkingSet<::Storage>, + working_set: &mut ::sov_state::WorkingSet<::Storage>, context: &Self::Context, - ) -> core::result::Result { + ) -> ::core::result::Result<::sov_modules_api::CallResponse, ::sov_modules_api::Error> { match decodable { #(#match_legs)* @@ -76,7 +76,7 @@ impl<'a> StructDef<'a> { } - fn module_address(&self, decodable: &Self::Decodable) -> &::Address { + fn module_address(&self, decodable: &Self::Decodable) -> &::Address { match decodable { #(#match_legs_address)* } diff --git a/module-system/sov-modules-macros/src/dispatch/genesis.rs b/module-system/sov-modules-macros/src/dispatch/genesis.rs index 3064ee77a..15f4eabe3 100644 --- a/module-system/sov-modules-macros/src/dispatch/genesis.rs +++ b/module-system/sov-modules-macros/src/dispatch/genesis.rs @@ -102,10 +102,11 @@ impl GenesisMacro { quote::quote! { #[doc = "Initial configuration for the rollup."] pub struct GenesisConfig #impl_generics #where_clause{ - #(pub #fields)* + #(#[doc = "Module configuration"] pub #fields)* } impl #impl_generics GenesisConfig #type_generics #where_clause { + #[doc = "GenesisConfig constructor."] pub fn new(#(#fields)*) -> Self { Self { #(#field_names),* diff --git a/module-system/sov-modules-macros/src/dispatch/message_codec.rs b/module-system/sov-modules-macros/src/dispatch/message_codec.rs index 54a13a395..57f27e915 100644 --- a/module-system/sov-modules-macros/src/dispatch/message_codec.rs +++ b/module-system/sov-modules-macros/src/dispatch/message_codec.rs @@ -1,45 +1,40 @@ use proc_macro2::{Span, TokenStream}; -use quote::format_ident; use syn::DeriveInput; use crate::common::{get_generics_type_param, StructDef, StructFieldExtractor, CALL}; impl<'a> StructDef<'a> { fn create_message_codec(&self) -> TokenStream { + let original_ident = &self.ident; let call_enum = self.enum_ident(CALL); let ty_generics = &self.type_generics; + let impl_generics = &self.impl_generics; + let where_clause = &self.where_clause; let fns = self.fields.iter().map(|field| { let variant = &field.ident; let ty = &field.ty; - let fn_call_name = format_ident!("encode_{}_call", &field.ident); - - - let call_doc = format!("Encodes {} call message.",field.ident); + let call_doc = format!("Encodes {} call message.", field.ident); // Creates functions like: // encode_*module_name*_call(data: ..) -> Vec // encode_*module_name*_query(data: ..) -> Vec quote::quote! { + impl #impl_generics sov_modules_api::EncodeCall<#ty> for #original_ident #ty_generics #where_clause { #[doc = #call_doc] - pub fn #fn_call_name(data: <#ty as sov_modules_api::Module>::CallMessage) -> std::vec::Vec { + fn encode_call(data: <#ty as sov_modules_api::Module>::CallMessage) -> std::vec::Vec { let call = #call_enum:: #ty_generics ::#variant(data); ::borsh::BorshSerialize::try_to_vec(&call).unwrap() } } + } }); - let original_ident = &self.ident; - let impl_generics = &self.impl_generics; - let where_clause = self.where_clause; - // Adds decoding functionality to the underlying type and // hides auto generated types behind impl DispatchCall. quote::quote! { - impl #impl_generics #original_ident #ty_generics #where_clause { - #(#fns)* - } + #(#fns)* } } } diff --git a/module-system/sov-modules-macros/src/lib.rs b/module-system/sov-modules-macros/src/lib.rs index 2c16326e0..4aa26ed03 100644 --- a/module-system/sov-modules-macros/src/lib.rs +++ b/module-system/sov-modules-macros/src/lib.rs @@ -235,8 +235,13 @@ fn handle_macro_error(result: Result) -> To } } -/// This proc macro generates the actual implementations for the trait created above for the module -/// It iterates over each struct +/// The macro exposes RPC endpoints from all modules in the runtime. +/// It gets storage from the Context generic +/// and utilizes output of [`#[rpc_gen]`] macro to generate RPC methods. +/// +/// It has limitations: +/// - First type generic attribute must have bound to [`sov_modules_api::Context`] trait +/// - All generic attributes must own the data, thus have bound `'static` #[cfg(feature = "native")] #[proc_macro_attribute] pub fn expose_rpc(_attr: TokenStream, input: TokenStream) -> TokenStream { @@ -280,7 +285,7 @@ pub fn cli_parser(input: TokenStream) -> TokenStream { /// [`sov_modules_api::CliWalletArg`] trait where the `CliStringRepr` type is the new struct or enum. /// /// As an implementation detail, `clap` requires that all types have named fields - so this macro auto generates an appropriate -/// `clap`-compatible type from the annotated item. Tor example, the struct `MyStruct(u64, u64)` would be transformed into +/// `clap`-compatible type from the annotated item. For example, the struct `MyStruct(u64, u64)` would be transformed into /// `MyStructWithNamedFields { field0: u64, field1: u64 }`. /// /// ## Example diff --git a/module-system/sov-modules-macros/src/module_info.rs b/module-system/sov-modules-macros/src/module_info.rs index 982315512..cca82959e 100644 --- a/module-system/sov-modules-macros/src/module_info.rs +++ b/module-system/sov-modules-macros/src/module_info.rs @@ -1,59 +1,18 @@ use proc_macro2::{self, Ident, Span}; -use syn::{DataStruct, DeriveInput, ImplGenerics, PathArguments, TypeGenerics, WhereClause}; +use syn::{ + Attribute, DataStruct, DeriveInput, ImplGenerics, PathArguments, TypeGenerics, WhereClause, +}; +use self::parsing::{ModuleField, ModuleFieldAttribute, StructDef}; use crate::common::get_generics_type_param; -#[derive(Clone)] -struct StructNamedField { - ident: proc_macro2::Ident, - ty: syn::Type, -} - -// A field can be either a state variable or another module. -// We don't generate prefix functions for imported modules as they are already generated. -#[derive(Clone)] -enum FieldKind { - Address(StructNamedField), - State(StructNamedField), - Module(StructNamedField), -} - -struct StructDef<'a> { - ident: proc_macro2::Ident, - impl_generics: ImplGenerics<'a>, - type_generics: TypeGenerics<'a>, - generic_param: &'a Ident, - - fields: Result, syn::Error>, - where_clause: Option<&'a WhereClause>, -} - pub(crate) fn derive_module_info( input: DeriveInput, ) -> Result { - let DeriveInput { - data, - ident, - generics, - .. - } = input; - - let generic_param = get_generics_type_param(&generics, Span::call_site())?; - - let (impl_generics, type_generics, where_clause) = generics.split_for_impl(); - let fields = get_fields_from_struct(&data); - - let struct_def = StructDef { - ident, - fields, - impl_generics, - type_generics, - generic_param: &generic_param, - where_clause, - }; + let struct_def = StructDef::parse(&input)?; - let impl_prefix_functions = struct_def.impl_prefix_functions()?; - let impl_new = struct_def.impl_module_info()?; + let impl_prefix_functions = impl_prefix_functions(&struct_def)?; + let impl_new = impl_module_info(&struct_def)?; Ok(quote::quote! { #impl_prefix_functions @@ -63,162 +22,102 @@ pub(crate) fn derive_module_info( .into()) } -impl<'a> StructDef<'a> { - // Creates a prefix function for each field of the underlying structure. - fn impl_prefix_functions(&self) -> Result { - let fields = self.fields.clone()?; - - let impl_prefix_functions = fields.iter().filter_map(|field| match field { - FieldKind::State(field) => Some(make_prefix_func(field, &self.ident)), - // Don't generate prefix functions for modules - FieldKind::Module(_) => None, - // Don't generate prefix functions for address - FieldKind::Address(_) => None, - }); - - let impl_generics = &self.impl_generics; - let ident = &self.ident; - let ty_generics = &self.type_generics; - let where_clause = self.where_clause; - - Ok(quote::quote! { - impl #impl_generics #ident #ty_generics #where_clause{ - #(#impl_prefix_functions)* - } - }) - } - - // Implements the `ModuleInfo` trait. - fn impl_module_info(&self) -> Result { - let fields = self.fields.clone()?; - let type_generics = &self.type_generics; - - let mut impl_self_init = Vec::default(); - let mut impl_self_body = Vec::default(); - let mut modules = Vec::default(); - - let mut module_address = None; - for field in fields.iter() { - match field { - FieldKind::State(field) => { - impl_self_init.push(make_init_state(field)?); - impl_self_body.push(&field.ident); - } - FieldKind::Module(field) => { - impl_self_init.push(make_init_module(field)?); - impl_self_body.push(&field.ident); - modules.push(&field.ident); - } - FieldKind::Address(field) => { - impl_self_init.push(make_init_address( - field, - &self.ident, - module_address, - self.generic_param, - )?); - impl_self_body.push(&field.ident); - module_address = Some(&field.ident); - } - }; - } - - let generic_param = self.generic_param; - let impl_generics = &self.impl_generics; - let ident = &self.ident; - - let where_clause = self.where_clause; +// Creates a prefix function for each field of the underlying structure. +fn impl_prefix_functions(struct_def: &StructDef) -> Result { + let StructDef { + ident, + impl_generics, + type_generics, + fields, + where_clause, + .. + } = struct_def; - let fn_address = make_fn_address(module_address)?; - let fn_dependencies = make_fn_dependencies(modules); + let prefix_functions = fields + .iter() + // Don't generate prefix functions for modules or addresses; only state. + .filter(|field| matches!(field.attr, ModuleFieldAttribute::State { .. })) + .map(|field| make_prefix_func(field, ident)); - Ok(quote::quote! { - impl #impl_generics ::std::default::Default for #ident #type_generics #where_clause{ + Ok(quote::quote! { + impl #impl_generics #ident #type_generics #where_clause{ + #(#prefix_functions)* + } + }) +} - fn default() -> Self { - #(#impl_self_init)* +// Implements the `ModuleInfo` trait. +fn impl_module_info(struct_def: &StructDef) -> Result { + let module_address = struct_def.module_address(); - Self{ - #(#impl_self_body),* - } - } + let StructDef { + ident, + impl_generics, + type_generics, + generic_param, + fields, + where_clause, + } = struct_def; + + let mut impl_self_init = Vec::default(); + let mut impl_self_body = Vec::default(); + let mut modules = Vec::default(); + + for field in fields.iter() { + match &field.attr { + ModuleFieldAttribute::State { codec_builder } => { + impl_self_init.push(make_init_state( + field, + &codec_builder + .as_ref() + .cloned() + .unwrap_or_else(default_codec_builder), + )?); + impl_self_body.push(&field.ident); } - - impl #impl_generics ::sov_modules_api::ModuleInfo for #ident #type_generics #where_clause{ - type Context = #generic_param; - - #fn_address - - #fn_dependencies + ModuleFieldAttribute::Module => { + impl_self_init.push(make_init_module(field)?); + impl_self_body.push(&field.ident); + modules.push(&field.ident); } - }) + ModuleFieldAttribute::Address => { + impl_self_init.push(make_init_address(field, ident, generic_param)?); + impl_self_body.push(&field.ident); + } + }; } -} -// Extracts named fields form a struct or emits an error. -fn get_fields_from_struct(data: &syn::Data) -> Result, syn::Error> { - match data { - syn::Data::Struct(data_struct) => get_fields_from_data_struct(data_struct), - syn::Data::Enum(en) => Err(syn::Error::new_spanned( - en.enum_token, - "The `ModuleInfo` macro supports structs only.", - )), - syn::Data::Union(un) => Err(syn::Error::new_spanned( - un.union_token, - "The `ModuleInfo` macro supports structs only.", - )), - } -} + let fn_address = make_fn_address(&module_address.ident)?; + let fn_dependencies = make_fn_dependencies(modules); -fn get_fields_from_data_struct(data_struct: &DataStruct) -> Result, syn::Error> { - let mut output_fields = Vec::default(); + Ok(quote::quote! { + impl #impl_generics ::std::default::Default for #ident #type_generics #where_clause{ - for original_field in data_struct.fields.iter() { - let field_ident = original_field - .ident - .as_ref() - .ok_or(syn::Error::new_spanned( - &original_field.ident, - "The `ModuleInfo` macro supports structs only, unnamed fields witnessed.", - ))?; + fn default() -> Self { + #(#impl_self_init)* - if original_field.attrs.is_empty() { - return Err(syn::Error::new_spanned( - &original_field.ident, - "This field is missing an attribute: add `#[module]`, `#[state]` or `#[address]`. ", - )); + Self{ + #(#impl_self_body),* + } + } } - for attribute in &original_field.attrs { - let field = StructNamedField { - ident: field_ident.clone(), - ty: original_field.ty.clone(), - }; - - if attribute.path.segments[0].ident == "state" { - output_fields.push(FieldKind::State(field)); - } else if attribute.path.segments[0].ident == "module" { - output_fields.push(FieldKind::Module(field)) - } else if attribute.path.segments[0].ident == "address" { - output_fields.push(FieldKind::Address(field)) - } else if attribute.path.segments[0].ident == "doc" { - // Skip doc comments. - } else { - return Err(syn::Error::new_spanned( - field_ident, - "Only `#[module]`, `#[state]` or `#[address]` attributes are supported.", - )); - }; + impl #impl_generics ::sov_modules_api::ModuleInfo for #ident #type_generics #where_clause{ + type Context = #generic_param; + + #fn_address + + #fn_dependencies } - } - Ok(output_fields) + }) } -fn prefix_func_ident(ident: &proc_macro2::Ident) -> proc_macro2::Ident { - syn::Ident::new(&format!("_prefix_{ident}"), ident.span()) +fn default_codec_builder() -> syn::Path { + syn::parse_str("::core::default::Default::default").unwrap() } fn make_prefix_func( - field: &StructNamedField, + field: &ModuleField, module_ident: &proc_macro2::Ident, ) -> proc_macro2::TokenStream { let field_ident = &field.ident; @@ -237,20 +136,18 @@ fn make_prefix_func( } } +fn prefix_func_ident(ident: &proc_macro2::Ident) -> proc_macro2::Ident { + syn::Ident::new(&format!("_prefix_{ident}"), ident.span()) +} + fn make_fn_address( - address_ident: Option<&proc_macro2::Ident>, + address_ident: &proc_macro2::Ident, ) -> Result { - match address_ident { - Some(address_ident) => Ok(quote::quote! { - fn address(&self) -> &::Address { - &self.#address_ident - } - }), - None => Err(syn::Error::new( - Span::call_site(), - "The `ModuleInfo` macro requires `[address]` attribute.", - )), - } + Ok(quote::quote! { + fn address(&self) -> &::Address { + &self.#address_ident + } + }) } fn make_fn_dependencies(modules: Vec<&proc_macro2::Ident>) -> proc_macro2::TokenStream { @@ -266,7 +163,10 @@ fn make_fn_dependencies(modules: Vec<&proc_macro2::Ident>) -> proc_macro2::Token } } } -fn make_init_state(field: &StructNamedField) -> Result { +fn make_init_state( + field: &ModuleField, + encoding_constructor: &syn::Path, +) -> Result { let prefix_fun = prefix_func_ident(&field.ident); let field_ident = &field.ident; let ty = &field.ty; @@ -297,11 +197,11 @@ fn make_init_state(field: &StructNamedField) -> Result Result { +fn make_init_module(field: &ModuleField) -> Result { let field_ident = &field.ident; let ty = &field.ty; @@ -311,28 +211,224 @@ fn make_init_module(field: &StructNamedField) -> Result, generic_param: &Ident, ) -> Result { let field_ident = &field.ident; - match address { - Some(addr) => Err(syn::Error::new_spanned( - addr, - format!( - "The `address` attribute is defined more than once, revisit field: {}", - addr - ), - )), - None => Ok(quote::quote! { - use ::sov_modules_api::digest::Digest as _; - let module_path = module_path!(); - let prefix = sov_modules_api::Prefix::new_module(module_path, stringify!(#struct_ident)); - let #field_ident : <#generic_param as sov_modules_api::Spec>::Address = - <#generic_param as ::sov_modules_api::Spec>::Address::try_from(&prefix.hash::<#generic_param>()) - .unwrap_or_else(|e| panic!("ModuleInfo macro error, unable to create an Address for module: {}", e)); - }), + Ok(quote::quote! { + use ::sov_modules_api::digest::Digest as _; + let module_path = module_path!(); + let prefix = sov_modules_api::Prefix::new_module(module_path, stringify!(#struct_ident)); + let #field_ident : <#generic_param as sov_modules_api::Spec>::Address = + <#generic_param as ::sov_modules_api::Spec>::Address::try_from(&prefix.hash::<#generic_param>()) + .unwrap_or_else(|e| panic!("ModuleInfo macro error, unable to create an Address for module: {}", e)); + }) +} + +/// Internal `proc macro` parsing utilities. +pub mod parsing { + use super::*; + + pub struct StructDef<'a> { + pub ident: &'a proc_macro2::Ident, + pub impl_generics: ImplGenerics<'a>, + pub type_generics: TypeGenerics<'a>, + pub generic_param: Ident, + + pub fields: Vec, + pub where_clause: Option<&'a WhereClause>, + } + + impl<'a> StructDef<'a> { + pub fn parse(input: &'a DeriveInput) -> syn::Result { + let ident = &input.ident; + let generic_param = get_generics_type_param(&input.generics, Span::call_site())?; + let (impl_generics, type_generics, where_clause) = input.generics.split_for_impl(); + let fields = parse_module_fields(&input.data)?; + check_exactly_one_address(&fields)?; + + Ok(StructDef { + ident, + fields, + impl_generics, + type_generics, + generic_param, + where_clause, + }) + } + + pub fn module_address(&self) -> &ModuleField { + self.fields + .iter() + .find(|field| matches!(field.attr, ModuleFieldAttribute::Address)) + .expect("Module address not found but it was validated already; this is a bug") + } + } + + #[derive(Clone)] + pub struct ModuleField { + pub ident: syn::Ident, + pub ty: syn::Type, + pub attr: ModuleFieldAttribute, + } + + #[derive(Clone)] + pub enum ModuleFieldAttribute { + Module, + State { codec_builder: Option }, + Address, + } + + impl ModuleFieldAttribute { + fn parse(attr: &Attribute) -> syn::Result { + match attr.path.segments[0].ident.to_string().as_str() { + "module" => { + if attr.tokens.is_empty() { + Ok(Self::Module) + } else { + Err(syn::Error::new_spanned( + attr, + "The `#[module]` attribute does not accept any arguments.", + )) + } + } + "address" => { + if attr.tokens.is_empty() { + Ok(Self::Address) + } else { + Err(syn::Error::new_spanned( + attr, + "The `#[address]` attribute does not accept any arguments.", + )) + } + } + "state" => parse_state_attr(attr), + _ => unreachable!("attribute names were validated already; this is a bug"), + } + } + } + + fn parse_state_attr(attr: &Attribute) -> syn::Result { + let syntax_err = + syn::Error::new_spanned(attr, "Invalid syntax for the `#[state]` attribute."); + + let meta = if attr.tokens.is_empty() { + return Ok(ModuleFieldAttribute::State { + codec_builder: None, + }); + } else { + attr.parse_meta()? + }; + + let meta_list = match meta { + syn::Meta::List(l) if !l.nested.is_empty() => l, + _ => return Err(syntax_err), + }; + let name_value = match &meta_list.nested[0] { + syn::NestedMeta::Meta(syn::Meta::NameValue(nv)) => nv, + _ => return Err(syntax_err), + }; + + if name_value.path.get_ident().map(Ident::to_string).as_deref() != Some("codec_builder") { + return Err(syntax_err); + } + + let codec_builder_path = match &name_value.lit { + syn::Lit::Str(lit) => lit.parse_with(syn::Path::parse_mod_style)?, + _ => return Err(syntax_err), + }; + Ok(ModuleFieldAttribute::State { + codec_builder: Some(codec_builder_path), + }) + } + + fn parse_module_fields(data: &syn::Data) -> syn::Result> { + let data_struct = data_to_struct(data)?; + let mut parsed_fields = vec![]; + + for field in data_struct.fields.iter() { + let ident = get_field_ident(field)?; + let ty = field.ty.clone(); + let attr = get_field_attribute(field)?; + + parsed_fields.push(ModuleField { + ident: ident.clone(), + ty, + attr: ModuleFieldAttribute::parse(attr)?, + }); + } + + Ok(parsed_fields) + } + + fn check_exactly_one_address(fields: &[ModuleField]) -> syn::Result<()> { + let address_fields = fields + .iter() + .filter(|field| matches!(field.attr, ModuleFieldAttribute::Address)) + .collect::>(); + + match address_fields.len() { + 0 => Err(syn::Error::new( + Span::call_site(), + "The `ModuleInfo` macro requires `[address]` attribute.", + )), + 1 => Ok(()), + _ => Err(syn::Error::new_spanned( + address_fields[1].ident.clone(), + format!( + "The `address` attribute is defined more than once, revisit field: {}", + address_fields[1].ident, + ), + )), + } + } + + fn data_to_struct(data: &syn::Data) -> syn::Result<&DataStruct> { + match data { + syn::Data::Struct(data_struct) => Ok(data_struct), + syn::Data::Enum(en) => Err(syn::Error::new_spanned( + en.enum_token, + "The `ModuleInfo` macro supports structs only.", + )), + syn::Data::Union(un) => Err(syn::Error::new_spanned( + un.union_token, + "The `ModuleInfo` macro supports structs only.", + )), + } + } + + fn get_field_ident(field: &syn::Field) -> syn::Result<&syn::Ident> { + field.ident.as_ref().ok_or(syn::Error::new_spanned( + field, + "The `ModuleInfo` macro supports structs only, unnamed fields witnessed.", + )) + } + + fn get_field_attribute(field: &syn::Field) -> syn::Result<&Attribute> { + let ident = get_field_ident(field)?; + let mut attr = None; + for a in field.attrs.iter() { + match a.path.segments[0].ident.to_string().as_str() { + "state" | "module" | "address" => { + if attr.is_some() { + return Err(syn::Error::new_spanned(ident, "Only one attribute out of `#[module]`, `#[state]` and `#[address]` is allowed per field.")); + } else { + attr = Some(a); + } + } + _ => {} + } + } + + if let Some(attr) = attr { + Ok(attr) + } else { + Err(syn::Error::new_spanned( + ident, + "This field is missing an attribute: add `#[module]`, `#[state]` or `#[address]`.", + )) + } } } diff --git a/module-system/sov-modules-macros/src/rpc/expose_rpc.rs b/module-system/sov-modules-macros/src/rpc/expose_rpc.rs index 54829eff7..073a5f979 100644 --- a/module-system/sov-modules-macros/src/rpc/expose_rpc.rs +++ b/module-system/sov-modules-macros/src/rpc/expose_rpc.rs @@ -1,7 +1,7 @@ use quote::quote; use syn::DeriveInput; -use crate::common::{generics_for_field, StructFieldExtractor}; +use crate::common::StructFieldExtractor; pub(crate) struct ExposeRpcMacro { field_extractor: StructFieldExtractor, @@ -19,8 +19,15 @@ impl ExposeRpcMacro { original: proc_macro::TokenStream, input: DeriveInput, ) -> Result { - let DeriveInput { data, generics, .. } = input; - let (impl_generics, _ty_generics, where_clause) = generics.split_for_impl(); + let DeriveInput { + data, + generics, + ident: input_ident, + .. + } = input; + + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + let context_type = generics .params .iter() @@ -33,16 +40,30 @@ impl ExposeRpcMacro { }) .ok_or(syn::Error::new_spanned( &generics, - "a runtime must be generic over a sov_modules_api::Context to derive CliWallet", + "a runtime must be generic over a sov_modules_api::Context to generate rpc methods", ))?; let fields = self.field_extractor.get_fields_from_struct(&data)?; let rpc_storage_struct = quote! { - #[derive(Clone)] - struct RpcStorage { - storage: C::Storage + struct RpcStorage #impl_generics #where_clause { + storage: #context_type::Storage, + _phantom: ::std::marker::PhantomData< #input_ident #ty_generics >, } + + // Manually implementing clone, as in reality only cloning storage + impl #impl_generics ::std::clone::Clone for RpcStorage #ty_generics #where_clause { + fn clone(&self) -> Self { + Self { + storage: self.storage.clone(), + _phantom: ::std::marker::PhantomData, + } + } + } + + // As long as RpcStorage only cares about C::Storage, which is Sync + Send, we can do this: + unsafe impl #impl_generics ::std::marker::Sync for RpcStorage #ty_generics #where_clause {} + unsafe impl #impl_generics ::std::marker::Send for RpcStorage #ty_generics #where_clause {} }; let mut merge_operations = proc_macro2::TokenStream::new(); @@ -60,7 +81,6 @@ impl ExposeRpcMacro { .last() .expect("A type path must have at least one segment") .arguments; - let field_generics = generics_for_field(&generics, field_path_args); let module_ident = ty.path.segments.last().unwrap().clone().ident; @@ -79,21 +99,25 @@ impl ExposeRpcMacro { merge_operations.extend(merge_operation); let rpc_trait_impl = quote! { - impl #field_generics #rpc_trait_ident #field_path_args for RpcStorage<#context_type> { + impl #impl_generics #rpc_trait_ident #field_path_args for RpcStorage #ty_generics #where_clause { + /// Get a working set on top of the current storage fn get_working_set(&self) -> ::sov_state::WorkingSet<<#context_type as ::sov_modules_api::Spec>::Storage> { ::sov_state::WorkingSet::new(self.storage.clone()) } } }; + rpc_trait_impls.extend(rpc_trait_impl); } let get_rpc_methods: proc_macro2::TokenStream = quote! { - pub fn get_rpc_methods #impl_generics (storage: <#context_type as ::sov_modules_api::Spec>::Storage) -> jsonrpsee::RpcModule<()> #where_clause{ - let mut module = jsonrpsee::RpcModule::new(()); - let r = RpcStorage::<#context_type> { + /// Returns a [`jsonrpsee::RpcModule`] with all the rpc methods exposed by the module + pub fn get_rpc_methods #impl_generics (storage: <#context_type as ::sov_modules_api::Spec>::Storage) -> ::jsonrpsee::RpcModule<()> #where_clause { + let mut module = ::jsonrpsee::RpcModule::new(()); + let r = RpcStorage:: #ty_generics { storage: storage.clone(), + _phantom: ::std::marker::PhantomData }; #merge_operations diff --git a/module-system/sov-modules-macros/src/rpc/rpc_gen.rs b/module-system/sov-modules-macros/src/rpc/rpc_gen.rs index fd1e027de..afc4f3c2e 100644 --- a/module-system/sov-modules-macros/src/rpc/rpc_gen.rs +++ b/module-system/sov-modules-macros/src/rpc/rpc_gen.rs @@ -6,7 +6,7 @@ use syn::parse::{Parse, ParseStream}; use syn::punctuated::Punctuated; use syn::{ parenthesized, Attribute, FnArg, ImplItem, Meta, MetaList, PatType, Path, PathSegment, - Signature, Type, + Signature, }; /// Returns an attribute with the name `rpc_method` replaced with `method`, and the index @@ -64,13 +64,14 @@ fn find_working_set_argument(sig: &Signature) -> Option<(usize, syn::Type)> { struct RpcImplBlock { pub(crate) type_name: Ident, pub(crate) methods: Vec, - pub(crate) working_set_type: Option, + pub(crate) working_set_type: Option, pub(crate) generics: syn::Generics, } struct RpcEnabledMethod { pub(crate) method_name: Ident, pub(crate) method_signature: Signature, + pub(crate) docs: Vec, pub(crate) idx_of_working_set_arg: Option, } @@ -106,6 +107,7 @@ impl RpcImplBlock { let mut signature = method.method_signature.clone(); let method_name = &method.method_name; + let docs = &method.docs; let impl_trait_method = if let Some(idx) = method.idx_of_working_set_arg { // If necessary, adjust the signature to remove the working set argument and replace it with one generated by the implementer. @@ -124,6 +126,7 @@ impl RpcImplBlock { signature.inputs = inputs.into_iter().collect(); quote! { + #( #docs )* #signature { <#type_name #ty_generics as ::std::default::Default>::default().#method_name(#(#pre_working_set_args,)* &mut Self::get_working_set(self), #(#post_working_set_args),* ) } @@ -147,12 +150,14 @@ impl RpcImplBlock { let pre_working_set_args = arg_values.clone().take(idx); let post_working_set_args = arg_values.clone().skip(idx + 1); quote! { + #( #docs )* #signature { ::#method_name(#(#pre_working_set_args,)* #(#post_working_set_args),* ) } } } else { quote! { + #( #docs )* #signature { ::#method_name(#(#arg_values),*) } @@ -164,14 +169,19 @@ impl RpcImplBlock { let rpc_impl_trait = if let Some(ref working_set_type) = self.working_set_type { quote! { - pub trait #impl_trait_name #generics { + /// Allows a Runtime to be converted into a functional RPC server by simply implementing the two required methods - + /// `get_backing_impl(&self) -> MyModule` and `get_working_set(&self) -> ::sov_modules_api::WorkingSet` + pub trait #impl_trait_name #generics #where_clause { + /// Get a clean working set on top of the latest state fn get_working_set(&self) -> #working_set_type; #(#impl_trait_methods)* } } } else { quote! { - pub trait #impl_trait_name #generics { + /// Allows a Runtime to be converted into a functional RPC server by simply implementing the two required methods - + /// `get_backing_impl(&self) -> MyModule` and `get_working_set(&self) -> ::sov_modules_api::WorkingSet` + pub trait #impl_trait_name #generics #where_clause { #(#impl_trait_methods)* } } @@ -243,7 +253,7 @@ fn build_rpc_trait( let generics = &input.generics; let mut rpc_info = RpcImplBlock { - type_name, + type_name: type_name.clone(), methods: vec![], working_set_type: None, generics: generics.clone(), @@ -269,9 +279,16 @@ fn build_rpc_trait( } else { None }; + let docs = method + .attrs + .iter() + .filter(|attr| attr.path.is_ident("doc")) + .cloned() + .collect::>(); rpc_info.methods.push(RpcEnabledMethod { method_name: method.sig.ident.clone(), method_signature: method.sig.clone(), + docs: docs.clone(), idx_of_working_set_arg, }); @@ -281,6 +298,7 @@ fn build_rpc_trait( // Build the annotated signature for the intermediate trait let annotated_signature = quote! { + #( #docs )* #attr #intermediate_signature; }; @@ -303,16 +321,22 @@ fn build_rpc_trait( #input }; + let doc_string = format!("Generated RPC trait for {}", type_name); + let where_clause = &generics.where_clause; + let rpc_output = quote! { #simplified_impl #impl_rpc_trait_impl + #rpc_attribute - pub trait #intermediate_trait_name #generics { + #[doc = #doc_string] + pub trait #intermediate_trait_name #generics #where_clause { #(#intermediate_trait_items)* + /// Check the health of the RPC server #[method(name = "health")] fn health(&self) -> ::jsonrpsee::core::RpcResult<()> { Ok(()) @@ -335,12 +359,12 @@ pub(crate) fn rpc_gen( build_rpc_trait(attrs, type_name.clone(), input) } -struct TypeList(pub Punctuated); +struct TypeList(pub Punctuated); impl Parse for TypeList { fn parse(input: ParseStream) -> syn::Result { let content; parenthesized!(content in input); - Ok(TypeList(content.parse_terminated(Type::parse)?)) + Ok(TypeList(content.parse_terminated(syn::Type::parse)?)) } } diff --git a/module-system/sov-modules-macros/tests/all_tests.rs b/module-system/sov-modules-macros/tests/all_tests.rs index 30d1240b1..4111c6f3d 100644 --- a/module-system/sov-modules-macros/tests/all_tests.rs +++ b/module-system/sov-modules-macros/tests/all_tests.rs @@ -4,11 +4,13 @@ fn module_info_tests() { t.pass("tests/module_info/parse.rs"); t.pass("tests/module_info/mod_and_state.rs"); t.pass("tests/module_info/use_address_trait.rs"); + t.pass("tests/module_info/not_supported_attribute.rs"); + t.pass("tests/module_info/custom_codec_builder.rs"); + t.pass("tests/custom_codec_must_be_used.rs"); t.compile_fail("tests/module_info/derive_on_enum_not_supported.rs"); t.compile_fail("tests/module_info/field_missing_attribute.rs"); t.compile_fail("tests/module_info/missing_address.rs"); t.compile_fail("tests/module_info/no_generics.rs"); - t.compile_fail("tests/module_info/not_supported_attribute.rs"); t.compile_fail("tests/module_info/not_supported_type.rs"); t.compile_fail("tests/module_info/second_addr_not_supported.rs"); } @@ -21,14 +23,19 @@ fn module_dispatch_tests() { t.compile_fail("tests/dispatch/missing_serialization.rs"); } -#[cfg(feature = "native")] #[test] fn rpc_tests() { let t = trybuild::TestCases::new(); - t.pass("tests/derive_rpc.rs"); + t.pass("tests/rpc/derive_rpc.rs"); + t.pass("tests/rpc/derive_rpc_with_where.rs"); + t.pass("tests/rpc/expose_rpc.rs"); + t.pass("tests/rpc/expose_rpc_associated_types.rs"); + t.pass("tests/rpc/expose_rpc_associated_types_nested.rs"); + + t.compile_fail("tests/rpc/expose_rpc_associated_type_not_static.rs"); + t.compile_fail("tests/rpc/expose_rpc_first_generic_not_context.rs"); } -#[cfg(feature = "native")] #[test] fn cli_wallet_arg_tests() { let t: trybuild::TestCases = trybuild::TestCases::new(); diff --git a/module-system/sov-modules-macros/tests/cli_wallet_arg/derive_enum_mixed_fields.rs b/module-system/sov-modules-macros/tests/cli_wallet_arg/derive_enum_mixed_fields.rs index 031ff8dd7..6844cf511 100644 --- a/module-system/sov-modules-macros/tests/cli_wallet_arg/derive_enum_mixed_fields.rs +++ b/module-system/sov-modules-macros/tests/cli_wallet_arg/derive_enum_mixed_fields.rs @@ -15,7 +15,7 @@ fn main() { let actual_foo = ::CliStringRepr::try_parse_from(&[ "myenum", "foo", "1", "hello", ]) - .expect("parsing must succed") + .expect("parsing must succeed") .into(); assert_eq!(expected_foo, actual_foo); @@ -23,7 +23,7 @@ fn main() { let actual_bar = ::CliStringRepr::try_parse_from(&[ "myenum", "bar", "2", ]) - .expect("parsing must succed") + .expect("parsing must succeed") .into(); assert_eq!(expected_bar, actual_bar); diff --git a/module-system/sov-modules-macros/tests/cli_wallet_arg/derive_wallet.rs b/module-system/sov-modules-macros/tests/cli_wallet_arg/derive_wallet.rs index 1f5906910..58f8ee8b6 100644 --- a/module-system/sov-modules-macros/tests/cli_wallet_arg/derive_wallet.rs +++ b/module-system/sov-modules-macros/tests/cli_wallet_arg/derive_wallet.rs @@ -1,4 +1,5 @@ use clap::Parser; +use sov_modules_api::cli::JsonStringArg; use sov_modules_api::default_context::DefaultContext; use sov_modules_api::macros::{CliWallet, CliWalletArg, DefaultRuntime}; use sov_modules_api::{ @@ -9,7 +10,15 @@ use sov_state::{StateValue, WorkingSet}; pub mod first_test_module { use super::*; - #[derive(CliWalletArg, Debug, PartialEq, borsh::BorshDeserialize, borsh::BorshSerialize)] + #[derive( + CliWalletArg, + Debug, + PartialEq, + borsh::BorshDeserialize, + borsh::BorshSerialize, + serde::Serialize, + serde::Deserialize, + )] pub struct MyStruct { pub first_field: u32, pub str_field: String, @@ -60,7 +69,15 @@ pub mod second_test_module { pub state_in_second_struct: StateValue, } - #[derive(CliWalletArg, Debug, PartialEq, borsh::BorshDeserialize, borsh::BorshSerialize)] + #[derive( + CliWalletArg, + Debug, + PartialEq, + borsh::BorshDeserialize, + borsh::BorshSerialize, + serde::Serialize, + serde::Deserialize, + )] pub enum MyEnum { Foo { first_field: u32, str_field: String }, Bar(u8), @@ -102,25 +119,29 @@ fn main() { first_field: 1, str_field: "hello".to_string(), }); - let actual_foo = - as sov_modules_api::CliWallet>::CliStringRepr::try_parse_from(&[ + let foo_from_cli: RuntimeSubcommand = + >::try_parse_from(&[ "main", "first", - "my-struct", - "1", - "hello", + "--json", + r#"{"first_field": 1, "str_field": "hello"}"#, ]) .expect("parsing must succed") .into(); - assert_eq!(expected_foo, actual_foo); + let foo_ir: RuntimeMessage = foo_from_cli.try_into().unwrap(); + assert_eq!(expected_foo, foo_ir.try_into().unwrap()); let expected_bar = RuntimeCall::second(second_test_module::MyEnum::Bar(2)); - let actual_bar = - as sov_modules_api::CliWallet>::CliStringRepr::try_parse_from(&[ - "main", "second", "bar", "2", + let bar_from_cli: RuntimeSubcommand = + >::try_parse_from(&[ + "main", + "second", + "--json", + r#"{"Bar": 2}"#, ]) .expect("parsing must succed") .into(); + let bar_ir: RuntimeMessage = bar_from_cli.try_into().unwrap(); - assert_eq!(expected_bar, actual_bar); + assert_eq!(expected_bar, bar_ir.try_into().unwrap()); } diff --git a/module-system/sov-modules-macros/tests/custom_codec_must_be_used.rs b/module-system/sov-modules-macros/tests/custom_codec_must_be_used.rs new file mode 100644 index 000000000..1dbfb8896 --- /dev/null +++ b/module-system/sov-modules-macros/tests/custom_codec_must_be_used.rs @@ -0,0 +1,51 @@ +use std::panic::catch_unwind; + +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::{Context, ModuleInfo}; +use sov_state::codec::StateValueCodec; +use sov_state::{DefaultStorageSpec, ProverStorage, StateValue, WorkingSet}; + +#[derive(ModuleInfo)] +struct TestModule +where + C: Context, +{ + #[address] + address: C::Address, + + #[state(codec_builder = "crate::CustomCodec::new")] + state_value: StateValue, +} + +#[derive(Default)] +struct CustomCodec; + +impl CustomCodec { + fn new() -> Self { + Self + } +} + +impl StateValueCodec for CustomCodec { + type Error = String; + + fn encode_value(&self, _value: &V) -> Vec { + unimplemented!() + } + + fn try_decode_value(&self, _bytes: &[u8]) -> Result { + unimplemented!() + } +} + +fn main() { + let tempdir = tempfile::tempdir().unwrap(); + let storage: ProverStorage = ProverStorage::with_path(&tempdir).unwrap(); + let module: TestModule = TestModule::default(); + + catch_unwind(|| { + let mut working_set = WorkingSet::new(storage); + module.state_value.set(&0u32, &mut working_set); + }) + .unwrap_err(); +} diff --git a/module-system/sov-modules-macros/tests/dispatch/derive_dispatch.rs b/module-system/sov-modules-macros/tests/dispatch/derive_dispatch.rs index 41c977d76..38952914d 100644 --- a/module-system/sov-modules-macros/tests/dispatch/derive_dispatch.rs +++ b/module-system/sov-modules-macros/tests/dispatch/derive_dispatch.rs @@ -3,7 +3,9 @@ use modules::third_test_module::{self, ModuleThreeStorable}; use modules::{first_test_module, second_test_module}; use sov_modules_api::default_context::ZkDefaultContext; use sov_modules_api::macros::DefaultRuntime; -use sov_modules_api::{Address, Context, DispatchCall, Genesis, MessageCodec, ModuleInfo}; +use sov_modules_api::{ + Address, Context, DispatchCall, EncodeCall, Genesis, MessageCodec, ModuleInfo, +}; use sov_state::ZkStorage; #[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] @@ -31,7 +33,9 @@ fn main() { let value = 11; { let message = value; - let serialized_message = RT::encode_first_call(message); + let serialized_message = , + >>::encode_call(message); let module = RT::decode_call(&serialized_message).unwrap(); assert_eq!(runtime.module_address(&module), runtime.first.address()); @@ -48,7 +52,9 @@ fn main() { let value = 22; { let message = value; - let serialized_message = RT::encode_second_call(message); + let serialized_message = , + >>::encode_call(message); let module = RT::decode_call(&serialized_message).unwrap(); assert_eq!(runtime.module_address(&module), runtime.second.address()); diff --git a/module-system/sov-modules-macros/tests/dispatch/modules.rs b/module-system/sov-modules-macros/tests/dispatch/modules.rs index 16f28b92d..165bc0c71 100644 --- a/module-system/sov-modules-macros/tests/dispatch/modules.rs +++ b/module-system/sov-modules-macros/tests/dispatch/modules.rs @@ -49,16 +49,16 @@ pub mod second_test_module { use super::*; #[derive(ModuleInfo)] - pub struct SecondTestStruct { + pub struct SecondTestStruct { #[address] - pub address: Ctx::Address, + pub address: C::Address, #[state] pub state_in_second_struct: StateValue, } - impl SecondTestStruct { - pub fn get_state_value(&self, working_set: &mut WorkingSet) -> u8 { + impl SecondTestStruct { + pub fn get_state_value(&self, working_set: &mut WorkingSet) -> u8 { self.state_in_second_struct.get(working_set).unwrap() } } diff --git a/module-system/sov-modules-macros/tests/module_info/custom_codec_builder.rs b/module-system/sov-modules-macros/tests/module_info/custom_codec_builder.rs new file mode 100644 index 000000000..e5101a2b2 --- /dev/null +++ b/module-system/sov-modules-macros/tests/module_info/custom_codec_builder.rs @@ -0,0 +1,16 @@ +use sov_modules_api::{Context, ModuleInfo}; +use sov_state::StateMap; + +#[derive(ModuleInfo)] +struct FirstTestStruct +where + C: Context, +{ + #[address] + pub address: C::Address, + + #[state(codec_builder = "sov_state::codec::BorshCodec::default")] + pub state_in_first_struct_1: StateMap, +} + +fn main() {} diff --git a/module-system/sov-modules-macros/tests/module_info/not_supported_attribute.rs b/module-system/sov-modules-macros/tests/module_info/not_supported_attribute.rs index 1f01783ff..a244c7b30 100644 --- a/module-system/sov-modules-macros/tests/module_info/not_supported_attribute.rs +++ b/module-system/sov-modules-macros/tests/module_info/not_supported_attribute.rs @@ -6,7 +6,10 @@ struct TestStruct { #[address] address: C::Address, - #[other] + // Unsupported attributes should be ignored to guarantee compatibility with + // other macros. + #[allow(dead_code)] + #[state] test_state1: StateMap, #[state] diff --git a/module-system/sov-modules-macros/tests/module_info/not_supported_attribute.stderr b/module-system/sov-modules-macros/tests/module_info/not_supported_attribute.stderr deleted file mode 100644 index a36937a55..000000000 --- a/module-system/sov-modules-macros/tests/module_info/not_supported_attribute.stderr +++ /dev/null @@ -1,11 +0,0 @@ -error: Only `#[module]`, `#[state]` or `#[address]` attributes are supported. - --> tests/module_info/not_supported_attribute.rs:10:5 - | -10 | test_state1: StateMap, - | ^^^^^^^^^^^ - -error: cannot find attribute `other` in this scope - --> tests/module_info/not_supported_attribute.rs:9:7 - | -9 | #[other] - | ^^^^^ diff --git a/module-system/sov-modules-macros/tests/module_info/second_addr_not_supported.stderr b/module-system/sov-modules-macros/tests/module_info/second_addr_not_supported.stderr index ed877e3be..b7117973c 100644 --- a/module-system/sov-modules-macros/tests/module_info/second_addr_not_supported.stderr +++ b/module-system/sov-modules-macros/tests/module_info/second_addr_not_supported.stderr @@ -1,5 +1,5 @@ -error: The `address` attribute is defined more than once, revisit field: address_1 - --> tests/module_info/second_addr_not_supported.rs:7:5 - | -7 | address_1: C::Address, - | ^^^^^^^^^ +error: The `address` attribute is defined more than once, revisit field: address_2 + --> tests/module_info/second_addr_not_supported.rs:10:5 + | +10 | address_2: C::Address, + | ^^^^^^^^^ diff --git a/module-system/sov-modules-macros/tests/module_info/use_address_trait.rs b/module-system/sov-modules-macros/tests/module_info/use_address_trait.rs index 6948cd397..641d3f338 100644 --- a/module-system/sov-modules-macros/tests/module_info/use_address_trait.rs +++ b/module-system/sov-modules-macros/tests/module_info/use_address_trait.rs @@ -2,7 +2,7 @@ #![allow(unused_imports)] -use sov_modules_api::{AddressTrait, Context, ModuleInfo}; +use sov_modules_api::{Context, ModuleInfo, RollupAddress}; #[derive(ModuleInfo)] struct TestModule { diff --git a/module-system/sov-modules-macros/tests/derive_rpc.rs b/module-system/sov-modules-macros/tests/rpc/derive_rpc.rs similarity index 98% rename from module-system/sov-modules-macros/tests/derive_rpc.rs rename to module-system/sov-modules-macros/tests/rpc/derive_rpc.rs index 760dcdfe8..09c20f729 100644 --- a/module-system/sov-modules-macros/tests/derive_rpc.rs +++ b/module-system/sov-modules-macros/tests/rpc/derive_rpc.rs @@ -107,5 +107,5 @@ fn main() { assert_eq!(result, ()); } - println!("All tests passed!") + println!("All tests passed!"); } diff --git a/module-system/sov-modules-macros/tests/rpc/derive_rpc_with_where.rs b/module-system/sov-modules-macros/tests/rpc/derive_rpc_with_where.rs new file mode 100644 index 000000000..716d8f0e6 --- /dev/null +++ b/module-system/sov-modules-macros/tests/rpc/derive_rpc_with_where.rs @@ -0,0 +1,99 @@ +use std::hash::Hasher; + +use jsonrpsee::core::RpcResult; +use sov_modules_api::default_context::ZkDefaultContext; +use sov_modules_api::macros::rpc_gen; +use sov_modules_api::{Context, ModuleInfo}; +use sov_state::{WorkingSet, ZkStorage}; + +#[derive(ModuleInfo)] +pub struct TestStruct +where + D: std::hash::Hash + + std::clone::Clone + + borsh::BorshSerialize + + borsh::BorshDeserialize + + serde::Serialize + + serde::de::DeserializeOwned + + 'static, +{ + #[address] + pub(crate) address: C::Address, + #[state] + pub(crate) data: ::sov_state::StateValue, +} + +#[rpc_gen(client, server, namespace = "test")] +impl TestStruct +where + D: std::hash::Hash + + std::clone::Clone + + borsh::BorshSerialize + + borsh::BorshDeserialize + + serde::Serialize + + serde::de::DeserializeOwned + + 'static, +{ + #[rpc_method(name = "firstMethod")] + pub fn first_method(&self, _working_set: &mut WorkingSet) -> RpcResult { + Ok(11) + } + + #[rpc_method(name = "secondMethod")] + pub fn second_method( + &self, + result: D, + _working_set: &mut WorkingSet, + ) -> RpcResult<(D, u64)> { + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + let value = result.clone(); + value.hash(&mut hasher); + let hashed_value = hasher.finish(); + + Ok((value, hashed_value)) + } +} + +pub struct TestRuntime { + test_struct: TestStruct, +} + +// This is generated by a macro annotating the state transition runner, +// but we do not have that in scope here so generating the struct manually. +struct RpcStorage { + pub storage: C::Storage, +} + +impl TestStructRpcImpl for RpcStorage { + fn get_working_set( + &self, + ) -> ::sov_state::WorkingSet<::Storage> { + ::sov_state::WorkingSet::new(self.storage.clone()) + } +} + +fn main() { + let storage = ZkStorage::new([1u8; 32]); + let r: RpcStorage = RpcStorage { + storage: storage.clone(), + }; + { + let result = + as TestStructRpcServer>::first_method( + &r, + ) + .unwrap(); + assert_eq!(result, 11); + } + + { + let result = + as TestStructRpcServer>::second_method( + &r, 22, + ) + .unwrap(); + assert_eq!(result, (22, 15733059416522709050)); + } + + println!("All tests passed!"); +} diff --git a/module-system/sov-modules-macros/tests/rpc/expose_rpc.rs b/module-system/sov-modules-macros/tests/rpc/expose_rpc.rs new file mode 100644 index 000000000..cc48918c6 --- /dev/null +++ b/module-system/sov-modules-macros/tests/rpc/expose_rpc.rs @@ -0,0 +1,64 @@ +use jsonrpsee::core::RpcResult; +pub use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::macros::{expose_rpc, rpc_gen}; +use sov_modules_api::{CallResponse, Context, Error, Module, ModuleInfo}; +use sov_state::{StateValue, WorkingSet}; + +#[derive(ModuleInfo)] +pub struct QueryModule { + #[address] + pub address: C::Address, + + #[state] + pub data: StateValue, +} + +impl Module for QueryModule { + type Context = C; + type Config = u8; + type CallMessage = u8; + + fn genesis( + &self, + config: &Self::Config, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + self.data.set(config, working_set); + Ok(()) + } + + fn call( + &self, + msg: Self::CallMessage, + _context: &Self::Context, + working_set: &mut WorkingSet, + ) -> Result { + self.data.set(&msg, working_set); + Ok(CallResponse::default()) + } +} + +#[derive(Debug, Eq, PartialEq, Clone, serde::Serialize, serde::Deserialize)] +pub struct QueryResponse { + pub value: Option, +} + +#[rpc_gen(client, server, namespace = "queryModule")] +impl QueryModule { + #[rpc_method(name = "queryValue")] + pub fn query_value( + &self, + working_set: &mut WorkingSet, + ) -> RpcResult { + Ok(QueryResponse { + value: self.data.get(working_set), + }) + } +} + +#[expose_rpc] +struct Runtime { + pub first: QueryModule, +} + +fn main() {} diff --git a/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_type_not_static.rs b/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_type_not_static.rs new file mode 100644 index 000000000..b71e8a18e --- /dev/null +++ b/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_type_not_static.rs @@ -0,0 +1,129 @@ +use jsonrpsee::core::RpcResult; +use sov_modules_api::default_context::ZkDefaultContext; +use sov_modules_api::macros::{expose_rpc, rpc_gen, DefaultRuntime}; +use sov_modules_api::{ + Address, CallResponse, Context, DispatchCall, EncodeCall, Error, Genesis, MessageCodec, Module, + ModuleInfo, +}; +use sov_state::{StateValue, WorkingSet, ZkStorage}; + +pub trait TestSpec { + type Data: Data; +} + +pub trait Data: + Clone + + Eq + + PartialEq + + std::fmt::Debug + + serde::Serialize + + serde::de::DeserializeOwned + + borsh::BorshSerialize + + borsh::BorshDeserialize + + 'static +{ +} + +impl Data for u32 {} + +pub mod my_module { + use super::*; + + #[derive(ModuleInfo)] + pub struct QueryModule { + #[address] + pub address: C::Address, + + #[state] + pub data: StateValue, + } + + impl Module for QueryModule + where + D: Data, + { + type Context = C; + type Config = D; + type CallMessage = D; + + fn genesis( + &self, + config: &Self::Config, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + self.data.set(config, working_set); + Ok(()) + } + + fn call( + &self, + msg: Self::CallMessage, + _context: &Self::Context, + working_set: &mut WorkingSet, + ) -> Result { + self.data.set(&msg, working_set); + Ok(CallResponse::default()) + } + } + + pub mod query { + + use super::*; + use crate::my_module::QueryModule; + + #[derive(Debug, Eq, PartialEq, Clone, serde::Serialize, serde::Deserialize)] + pub struct QueryResponse { + pub value: Option, + } + + #[rpc_gen(client, server, namespace = "queryModule")] + impl QueryModule + where + C: Context, + { + #[rpc_method(name = "queryValue")] + pub fn query_value( + &self, + working_set: &mut WorkingSet, + ) -> RpcResult { + let value = self.data.get(working_set).map(|d| format!("{:?}", d)); + Ok(QueryResponse { value }) + } + } + } +} + +use my_module::query::{QueryModuleRpcImpl, QueryModuleRpcServer}; + +#[expose_rpc] +#[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] +#[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] +struct Runtime { + pub first: my_module::QueryModule, +} + +struct ActualSpec; + +impl TestSpec for ActualSpec { + type Data = u32; +} + +fn main() { + type C = ZkDefaultContext; + type RT = Runtime; + let storage = ZkStorage::new([1u8; 32]); + let working_set = &mut WorkingSet::new(storage); + let runtime = &mut Runtime::::default(); + let config = GenesisConfig::new(22); + runtime.genesis(&config, working_set).unwrap(); + + let message: u32 = 33; + let serialized_message = + >>::encode_call(message); + let module = RT::decode_call(&serialized_message).unwrap(); + let context = C::new(Address::try_from([11; 32].as_ref()).unwrap()); + + let _ = runtime + .dispatch_call(module, working_set, &context) + .unwrap(); +} diff --git a/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_type_not_static.stderr b/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_type_not_static.stderr new file mode 100644 index 000000000..fd207f1be --- /dev/null +++ b/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_type_not_static.stderr @@ -0,0 +1,11 @@ +error[E0310]: the parameter type `S` may not live long enough + --> tests/rpc/expose_rpc_associated_type_not_static.rs:98:1 + | +98 | #[expose_rpc] + | ^^^^^^^^^^^^^ ...so that the type `S` will meet its required lifetime bounds + | + = note: this error originates in the attribute macro `expose_rpc` (in Nightly builds, run with -Z macro-backtrace for more info) +help: consider adding an explicit lifetime bound... + | +101| struct Runtime { + | +++++++++ diff --git a/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_types.rs b/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_types.rs new file mode 100644 index 000000000..352553613 --- /dev/null +++ b/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_types.rs @@ -0,0 +1,129 @@ +use jsonrpsee::core::RpcResult; +use sov_modules_api::default_context::ZkDefaultContext; +use sov_modules_api::macros::{expose_rpc, rpc_gen, DefaultRuntime}; +use sov_modules_api::{ + Address, CallResponse, Context, DispatchCall, EncodeCall, Error, Genesis, MessageCodec, Module, + ModuleInfo, +}; +use sov_state::{StateValue, WorkingSet, ZkStorage}; + +pub trait TestSpec: 'static { + type Data: Data; +} + +pub trait Data: + Clone + + Eq + + PartialEq + + std::fmt::Debug + + serde::Serialize + + serde::de::DeserializeOwned + + borsh::BorshSerialize + + borsh::BorshDeserialize + + 'static +{ +} + +impl Data for u32 {} + +pub mod my_module { + use super::*; + + #[derive(ModuleInfo)] + pub struct QueryModule { + #[address] + pub address: C::Address, + + #[state] + pub data: StateValue, + } + + impl Module for QueryModule + where + D: Data, + { + type Context = C; + type Config = D; + type CallMessage = D; + + fn genesis( + &self, + config: &Self::Config, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + self.data.set(config, working_set); + Ok(()) + } + + fn call( + &self, + msg: Self::CallMessage, + _context: &Self::Context, + working_set: &mut WorkingSet, + ) -> Result { + self.data.set(&msg, working_set); + Ok(CallResponse::default()) + } + } + + pub mod query { + + use super::*; + use crate::my_module::QueryModule; + + #[derive(Debug, Eq, PartialEq, Clone, serde::Serialize, serde::Deserialize)] + pub struct QueryResponse { + pub value: Option, + } + + #[rpc_gen(client, server, namespace = "queryModule")] + impl QueryModule + where + C: Context, + { + #[rpc_method(name = "queryValue")] + pub fn query_value( + &self, + working_set: &mut WorkingSet, + ) -> RpcResult { + let value = self.data.get(working_set).map(|d| format!("{:?}", d)); + Ok(QueryResponse { value }) + } + } + } +} + +use my_module::query::{QueryModuleRpcImpl, QueryModuleRpcServer}; + +#[expose_rpc] +#[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] +#[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] +struct Runtime { + pub first: my_module::QueryModule, +} + +struct ActualSpec; + +impl TestSpec for ActualSpec { + type Data = u32; +} + +fn main() { + type C = ZkDefaultContext; + type RT = Runtime; + let storage = ZkStorage::new([1u8; 32]); + let working_set = &mut WorkingSet::new(storage); + let runtime = &mut Runtime::::default(); + let config = GenesisConfig::new(22); + runtime.genesis(&config, working_set).unwrap(); + + let message: u32 = 33; + let serialized_message = + >>::encode_call(message); + let module = RT::decode_call(&serialized_message).unwrap(); + let context = C::new(Address::try_from([11; 32].as_ref()).unwrap()); + + let _ = runtime + .dispatch_call(module, working_set, &context) + .unwrap(); +} diff --git a/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_types_nested.rs b/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_types_nested.rs new file mode 100644 index 000000000..87242a9ea --- /dev/null +++ b/module-system/sov-modules-macros/tests/rpc/expose_rpc_associated_types_nested.rs @@ -0,0 +1,140 @@ +use jsonrpsee::core::RpcResult; +use sov_modules_api::default_context::ZkDefaultContext; +use sov_modules_api::macros::{expose_rpc, rpc_gen, DefaultRuntime}; +use sov_modules_api::{ + Address, CallResponse, Context, DispatchCall, EncodeCall, Error, Genesis, MessageCodec, Module, + ModuleInfo, +}; +use sov_state::{StateValue, WorkingSet, ZkStorage}; + +pub trait Message: 'static { + type Caller: std::fmt::Display; + type Data: Data; +} +pub trait TestSpec: 'static { + type Message: Message; +} + +pub trait Data: + Clone + + Eq + + PartialEq + + std::fmt::Debug + + serde::Serialize + + serde::de::DeserializeOwned + + borsh::BorshSerialize + + borsh::BorshDeserialize + + 'static +{ +} + +impl Data for u32 {} + +pub mod my_module { + use super::*; + + #[derive(ModuleInfo)] + pub struct QueryModule { + #[address] + pub address: C::Address, + + #[state] + pub data: StateValue, + } + + impl Module for QueryModule + where + D: Data, + { + type Context = C; + type Config = D; + type CallMessage = D; + + fn genesis( + &self, + config: &Self::Config, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + self.data.set(config, working_set); + Ok(()) + } + + fn call( + &self, + msg: Self::CallMessage, + _context: &Self::Context, + working_set: &mut WorkingSet, + ) -> Result { + self.data.set(&msg, working_set); + Ok(CallResponse::default()) + } + } + + pub mod query { + + use super::*; + use crate::my_module::QueryModule; + + #[derive(Debug, Eq, PartialEq, Clone, serde::Serialize, serde::Deserialize)] + pub struct QueryResponse { + pub value: Option, + } + + #[rpc_gen(client, server, namespace = "queryModule")] + impl QueryModule + where + C: Context, + { + #[rpc_method(name = "queryValue")] + pub fn query_value( + &self, + working_set: &mut WorkingSet, + ) -> RpcResult { + let value = self.data.get(working_set).map(|d| format!("{:?}", d)); + Ok(QueryResponse { value }) + } + } + } +} + +use my_module::query::{QueryModuleRpcImpl, QueryModuleRpcServer}; + +#[expose_rpc] +#[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] +#[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] +struct Runtime { + pub first: my_module::QueryModule::Message as Message>::Data>, +} + +struct ActualMessage; + +impl Message for ActualMessage { + type Caller = String; + type Data = u32; +} + +struct ActualSpec; + +impl TestSpec for ActualSpec { + type Message = ActualMessage; +} + +fn main() { + type C = ZkDefaultContext; + type RT = Runtime; + let storage = ZkStorage::new([1u8; 32]); + let working_set = &mut WorkingSet::new(storage); + let runtime = &mut Runtime::::default(); + let config = GenesisConfig::new(22); + runtime.genesis(&config, working_set).unwrap(); + + let message: u32 = 33; + let serialized_message = + >>::encode_call(message); + let module = RT::decode_call(&serialized_message).unwrap(); + let context = C::new(Address::try_from([11; 32].as_ref()).unwrap()); + + let _ = runtime + .dispatch_call(module, working_set, &context) + .unwrap(); +} diff --git a/module-system/sov-modules-macros/tests/rpc/expose_rpc_first_generic_not_context.rs b/module-system/sov-modules-macros/tests/rpc/expose_rpc_first_generic_not_context.rs new file mode 100644 index 000000000..70af8d1aa --- /dev/null +++ b/module-system/sov-modules-macros/tests/rpc/expose_rpc_first_generic_not_context.rs @@ -0,0 +1,129 @@ +use jsonrpsee::core::RpcResult; +use sov_modules_api::default_context::ZkDefaultContext; +use sov_modules_api::macros::{expose_rpc, rpc_gen, DefaultRuntime}; +use sov_modules_api::{ + Address, CallResponse, Context, DispatchCall, EncodeCall, Error, Genesis, MessageCodec, Module, + ModuleInfo, +}; +use sov_state::{StateValue, WorkingSet, ZkStorage}; + +pub trait TestSpec: 'static { + type Data: Data; +} + +pub trait Data: + Clone + + Eq + + PartialEq + + std::fmt::Debug + + serde::Serialize + + serde::de::DeserializeOwned + + borsh::BorshSerialize + + borsh::BorshDeserialize + + 'static +{ +} + +impl Data for u32 {} + +pub mod my_module { + use super::*; + + #[derive(ModuleInfo)] + pub struct QueryModule { + #[address] + pub address: C::Address, + + #[state] + pub data: StateValue, + } + + impl Module for QueryModule + where + D: Data, + { + type Context = C; + type Config = D; + type CallMessage = D; + + fn genesis( + &self, + config: &Self::Config, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + self.data.set(config, working_set); + Ok(()) + } + + fn call( + &self, + msg: Self::CallMessage, + _context: &Self::Context, + working_set: &mut WorkingSet, + ) -> Result { + self.data.set(&msg, working_set); + Ok(CallResponse::default()) + } + } + + pub mod query { + + use super::*; + use crate::my_module::QueryModule; + + #[derive(Debug, Eq, PartialEq, Clone, serde::Serialize, serde::Deserialize)] + pub struct QueryResponse { + pub value: Option, + } + + #[rpc_gen(client, server, namespace = "queryModule")] + impl QueryModule + where + C: Context, + { + #[rpc_method(name = "queryValue")] + pub fn query_value( + &self, + working_set: &mut WorkingSet, + ) -> RpcResult { + let value = self.data.get(working_set).map(|d| format!("{:?}", d)); + Ok(QueryResponse { value }) + } + } + } +} + +use my_module::query::{QueryModuleRpcImpl, QueryModuleRpcServer}; + +#[expose_rpc] +#[derive(Genesis, DispatchCall, MessageCodec, DefaultRuntime)] +#[serialization(borsh::BorshDeserialize, borsh::BorshSerialize)] +struct Runtime { + pub first: my_module::QueryModule, +} + +struct ActualSpec; + +impl TestSpec for ActualSpec { + type Data = u32; +} + +fn main() { + type C = ZkDefaultContext; + type RT = Runtime; + let storage = ZkStorage::new([1u8; 32]); + let working_set = &mut WorkingSet::new(storage); + let runtime = &mut Runtime::::default(); + let config = GenesisConfig::new(22); + runtime.genesis(&config, working_set).unwrap(); + + let message: u32 = 33; + let serialized_message = + >>::encode_call(message); + let module = RT::decode_call(&serialized_message).unwrap(); + let context = C::new(Address::try_from([11; 32].as_ref()).unwrap()); + + let _ = runtime + .dispatch_call(module, working_set, &context) + .unwrap(); +} diff --git a/module-system/sov-modules-macros/tests/rpc/expose_rpc_first_generic_not_context.stderr b/module-system/sov-modules-macros/tests/rpc/expose_rpc_first_generic_not_context.stderr new file mode 100644 index 000000000..f0d5ed575 --- /dev/null +++ b/module-system/sov-modules-macros/tests/rpc/expose_rpc_first_generic_not_context.stderr @@ -0,0 +1,7 @@ +error[E0220]: associated type `Storage` not found for `S` + --> tests/rpc/expose_rpc_first_generic_not_context.rs:98:1 + | +98 | #[expose_rpc] + | ^^^^^^^^^^^^^ associated type `Storage` not found + | + = note: this error originates in the attribute macro `expose_rpc` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/module-system/sov-modules-stf-template/Cargo.toml b/module-system/sov-modules-stf-template/Cargo.toml index 577470e7a..46803d44a 100644 --- a/module-system/sov-modules-stf-template/Cargo.toml +++ b/module-system/sov-modules-stf-template/Cargo.toml @@ -13,6 +13,7 @@ resolver = "2" [dependencies] anyhow = { workspace = true } +thiserror = { workspace = true } borsh = { workspace = true } serde = { workspace = true, features = ["derive"] } tracing = { workspace = true } @@ -20,5 +21,17 @@ jmt = { workspace = true } hex = { workspace = true } sov-rollup-interface = { path = "../../rollup-interface", version = "0.1" } -sov-state = { path = "../sov-state", version = "0.1", default-features = false } -sov-modules-api = { path = "../sov-modules-api", version = "0.1", default-features = false } +sov-state = { path = "../sov-state", version = "0.1" } +sov-modules-api = { path = "../sov-modules-api", version = "0.1" } +zk-cycle-macros = { path = "../../utils/zk-cycle-macros", optional = true } +risc0-zkvm = { version = "0.16", default-features = false, features = ["std"], optional = true } +risc0-zkvm-platform = { version = "0.16", optional = true } +zk-cycle-utils = { path = "../../utils/zk-cycle-utils", optional = true } + +[features] +bench = ["zk-cycle-macros/bench", "zk-cycle-utils", "risc0-zkvm", "risc0-zkvm-platform"] +default = [] +native = [ + "sov-state/native", + "sov-modules-api/native", +] \ No newline at end of file diff --git a/module-system/sov-modules-stf-template/README.md b/module-system/sov-modules-stf-template/README.md index 20f0078f8..149e2aa4b 100644 --- a/module-system/sov-modules-stf-template/README.md +++ b/module-system/sov-modules-stf-template/README.md @@ -4,28 +4,6 @@ This crate contains an implementation of a `StateTransitionFunction` called `AppTemplate` that is specifically designed to work with the Module System. The `AppTemplate` relies on a set of traits that, when combined, define the logic for transitioning the rollup state. -```rust ignore -pub struct AppTemplate { - pub current_storage: C::Storage, - pub runtime: RT, - pub (crate) working_set: Option>, - phantom_vm: PhantomData, -} - -impl AppTemplate -where - RT: DispatchCall - + Genesis - + TxHooks - + ApplyBlobHooks, -{ - - pub fn new(storage: C::Storage, runtime: RT) -> Self { - ... - } - ... -} -``` 1. The `DispatchCall` trait is responsible for decoding serialized messages and forwarding them to the appropriate module. 1. The `Genesis` trait handles the initialization process of the rollup. It sets up the initial state upon the rollup deployment. diff --git a/module-system/sov-modules-stf-template/src/app_template.rs b/module-system/sov-modules-stf-template/src/app_template.rs index 8f2d828e5..c210e57b4 100644 --- a/module-system/sov-modules-stf-template/src/app_template.rs +++ b/module-system/sov-modules-stf-template/src/app_template.rs @@ -2,43 +2,51 @@ use std::marker::PhantomData; use borsh::BorshDeserialize; use sov_modules_api::{Context, DispatchCall}; -use sov_rollup_interface::da::{BlobReaderTrait, CountedBufReader}; +use sov_rollup_interface::da::{BlobReaderTrait, CountedBufReader, DaSpec}; use sov_rollup_interface::stf::{BatchReceipt, TransactionReceipt}; -use sov_rollup_interface::Buf; +use sov_rollup_interface::{BasicAddress, Buf}; use sov_state::StateCheckpoint; use tracing::{debug, error}; use crate::tx_verifier::{verify_txs_stateless, TransactionAndRawHash}; use crate::{Batch, Runtime, SequencerOutcome, SlashingReason, TxEffect}; -type ApplyBatchResult = Result; +type ApplyBatchResult = Result>; + +#[allow(type_alias_bounds)] +type ApplyBatch = ApplyBatchResult< + BatchReceipt::Address>, TxEffect>, + ::Address, +>; +#[cfg(all(target_os = "zkvm", feature = "bench"))] +use zk_cycle_macros::cycle_tracker; /// An implementation of the /// [`StateTransitionFunction`](sov_rollup_interface::stf::StateTransitionFunction) /// that is specifically designed to work with the module-system. -pub struct AppTemplate { +pub struct AppTemplate> { /// State storage used by the rollup. pub current_storage: C::Storage, /// The runtime includes all the modules that the rollup supports. pub runtime: RT, pub(crate) checkpoint: Option>, phantom_vm: PhantomData, - phantom_blob: PhantomData, + phantom_da: PhantomData, } -pub(crate) enum ApplyBatchError { +pub(crate) enum ApplyBatchError { // Contains batch hash Ignored([u8; 32]), Slashed { // Contains batch hash hash: [u8; 32], reason: SlashingReason, - sequencer_da_address: Vec, + sequencer_da_address: A, }, } -impl From for BatchReceipt { - fn from(value: ApplyBatchError) -> Self { +impl From> for BatchReceipt, TxEffect> { + fn from(value: ApplyBatchError) -> Self { match value { ApplyBatchError::Ignored(hash) => BatchReceipt { batch_hash: hash, @@ -61,9 +69,11 @@ impl From for BatchReceipt { } } -impl AppTemplate +impl AppTemplate where - RT: Runtime, + C: Context, + Da: DaSpec, + RT: Runtime, { /// [`AppTemplate`] constructor. pub fn new(storage: C::Storage, runtime: RT) -> Self { @@ -72,14 +82,12 @@ where current_storage: storage, checkpoint: None, phantom_vm: PhantomData, - phantom_blob: PhantomData, + phantom_da: PhantomData, } } - pub(crate) fn apply_blob( - &mut self, - blob: &mut B, - ) -> ApplyBatchResult> { + #[cfg_attr(all(target_os = "zkvm", feature = "bench"), cycle_tracker)] + pub(crate) fn apply_blob(&mut self, blob: &mut Da::BlobTransaction) -> ApplyBatch { debug!( "Applying batch from sequencer: 0x{}", hex::encode(blob.sender()) @@ -100,6 +108,7 @@ where ); // TODO: will be covered in https://github.com/Sovereign-Labs/sovereign-sdk/issues/421 self.checkpoint = Some(batch_workspace.revert()); + return Err(ApplyBatchError::Ignored(blob.hash())); } batch_workspace = batch_workspace.checkpoint().to_revertable(); @@ -112,7 +121,7 @@ where Err(reason) => { // Explicitly revert on slashing, even though nothing has changed in pre_process. let mut batch_workspace = batch_workspace.revert().to_revertable(); - let sequencer_da_address = blob.sender().as_ref().to_vec(); + let sequencer_da_address = blob.sender(); let sequencer_outcome = SequencerOutcome::Slashed { reason, sequencer_da_address: sequencer_da_address.clone(), @@ -222,6 +231,7 @@ where }; self.checkpoint = Some(batch_workspace.checkpoint()); + Ok(BatchReceipt { batch_hash: blob.hash(), tx_receipts, diff --git a/module-system/sov-modules-stf-template/src/lib.rs b/module-system/sov-modules-stf-template/src/lib.rs index cf5c1f3cd..5a2f3c08d 100644 --- a/module-system/sov-modules-stf-template/src/lib.rs +++ b/module-system/sov-modules-stf-template/src/lib.rs @@ -6,21 +6,33 @@ mod tx_verifier; pub use app_template::AppTemplate; pub use batch::Batch; -use sov_modules_api::hooks::{ApplyBlobHooks, TxHooks}; +use sov_modules_api::capabilities::BlobSelector; +use sov_modules_api::hooks::{ApplyBlobHooks, SlotHooks, TxHooks}; use sov_modules_api::{Context, DispatchCall, Genesis, Spec}; -use sov_rollup_interface::da::BlobReaderTrait; +use sov_rollup_interface::da::{BlobReaderTrait, DaSpec}; +use sov_rollup_interface::services::da::SlotData; use sov_rollup_interface::stf::{SlotResult, StateTransitionFunction}; use sov_rollup_interface::zk::Zkvm; -use sov_state::{StateCheckpoint, Storage}; +use sov_rollup_interface::BasicAddress; +use sov_state::{StateCheckpoint, Storage, WorkingSet}; use tracing::info; pub use tx_verifier::RawTx; +#[cfg(all(target_os = "zkvm", feature = "bench"))] +use zk_cycle_macros::cycle_tracker; /// This trait has to be implemented by a runtime in order to be used in `AppTemplate`. -pub trait Runtime: +pub trait Runtime: DispatchCall + Genesis + TxHooks - + ApplyBlobHooks + + SlotHooks + + ApplyBlobHooks< + Da::BlobTransaction, + Context = C, + BlobResult = SequencerOutcome< + <::BlobTransaction as BlobReaderTrait>::Address, + >, + > + BlobSelector { } @@ -34,20 +46,17 @@ pub enum TxEffect { } #[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)] -// TODO: Should be generic for Address for pretty printing https://github.com/Sovereign-Labs/sovereign-sdk/issues/465 /// Represents the different outcomes that can occur for a sequencer after batch processing. -pub enum SequencerOutcome { +pub enum SequencerOutcome { /// Sequencer receives reward amount in defined token and can withdraw its deposit Rewarded(u64), /// Sequencer loses its deposit and receives no reward Slashed { /// Reason why sequencer was slashed. reason: SlashingReason, - // Keep this comment for so it doesn't need to investigate serde issue again. - // https://github.com/Sovereign-Labs/sovereign-sdk/issues/465 - // #[serde(bound(deserialize = ""))] + #[serde(bound(deserialize = ""))] /// Sequencer address on DA. - sequencer_da_address: Vec, + sequencer_da_address: A, }, /// Batch was ignored, sequencer deposit left untouched. Ignored, @@ -64,28 +73,49 @@ pub enum SlashingReason { InvalidTransactionEncoding, } -impl AppTemplate { - fn begin_slot(&mut self, witness: <::Storage as Storage>::Witness) { - self.checkpoint = Some(StateCheckpoint::with_witness( - self.current_storage.clone(), - witness, - )); +impl AppTemplate +where + C: Context, + Vm: Zkvm, + Da: DaSpec, + RT: Runtime, +{ + #[cfg_attr(all(target_os = "zkvm", feature = "bench"), cycle_tracker)] + fn begin_slot( + &mut self, + slot_data: &impl SlotData, + witness: >::Witness, + ) { + let state_checkpoint = StateCheckpoint::with_witness(self.current_storage.clone(), witness); + let mut working_set = state_checkpoint.to_revertable(); + + self.runtime.begin_slot_hook(slot_data, &mut working_set); + + self.checkpoint = Some(working_set.checkpoint()); } + #[cfg_attr(all(target_os = "zkvm", feature = "bench"), cycle_tracker)] fn end_slot(&mut self) -> (jmt::RootHash, <::Storage as Storage>::Witness) { let (cache_log, witness) = self.checkpoint.take().unwrap().freeze(); - let root_hash = self + let (root_hash, authenticated_node_batch) = self .current_storage - .validate_and_commit(cache_log, &witness) + .compute_state_update(cache_log, &witness) .expect("jellyfish merkle tree update must succeed"); + + let mut working_set = WorkingSet::new(self.current_storage.clone()); + self.runtime.end_slot_hook(root_hash, &mut working_set); + + self.current_storage.commit(&authenticated_node_batch); (jmt::RootHash(root_hash), witness) } } -impl StateTransitionFunction - for AppTemplate +impl StateTransitionFunction for AppTemplate where - RT: Runtime, + C: Context, + Da: DaSpec, + Vm: Zkvm, + RT: Runtime, { type StateRoot = jmt::RootHash; @@ -93,11 +123,13 @@ where type TxReceiptContents = TxEffect; - type BatchReceiptContents = SequencerOutcome; + type BatchReceiptContents = SequencerOutcome<::Address>; type Witness = <::Storage as Storage>::Witness; - fn init_chain(&mut self, params: Self::InitialState) { + type Condition = Da::ValidityCondition; + + fn init_chain(&mut self, params: Self::InitialState) -> jmt::RootHash { let mut working_set = StateCheckpoint::new(self.current_storage.clone()).to_revertable(); self.runtime @@ -105,14 +137,19 @@ where .expect("module initialization must succeed"); let (log, witness) = working_set.checkpoint().freeze(); - self.current_storage - .validate_and_commit(log, &witness) + let (genesis_hash, node_batch) = self + .current_storage + .compute_state_update(log, &witness) .expect("Storage update must succeed"); + + self.current_storage.commit(&node_batch); + jmt::RootHash(genesis_hash) } - fn apply_slot<'a, I>( + fn apply_slot<'a, I, Data>( &mut self, witness: Self::Witness, + slot_data: &Data, blobs: I, ) -> SlotResult< Self::StateRoot, @@ -121,16 +158,40 @@ where Self::Witness, > where - I: IntoIterator, + I: IntoIterator, + Data: SlotData, { - self.begin_slot(witness); + self.begin_slot(slot_data, witness); + + // Initialize batch workspace + let mut batch_workspace = self + .checkpoint + .take() + .expect("Working_set was initialized in begin_slot") + .to_revertable(); + + let selected_blobs = self + .runtime + .get_blobs_for_this_slot(blobs, &mut batch_workspace) + .expect("blob selection must succeed, probably serialization failed"); + + info!( + "Selected {} blob(s) for execution in current slot", + selected_blobs.len() + ); + + self.checkpoint = Some(batch_workspace.checkpoint()); let mut batch_receipts = vec![]; - for (blob_idx, blob) in blobs.into_iter().enumerate() { - let batch_receipt = self.apply_blob(blob).unwrap_or_else(Into::into); + + for (blob_idx, mut blob) in selected_blobs.into_iter().enumerate() { + let batch_receipt = self + .apply_blob(blob.as_mut_ref()) + .unwrap_or_else(Into::into); info!( - "blob #{} with blob_hash 0x{} has been applied with #{} transactions, sequencer outcome {:?}", + "blob #{} from sequencer {} with blob_hash 0x{} has been applied with #{} transactions, sequencer outcome {:?}", blob_idx, + blob.as_mut_ref().sender(), hex::encode(batch_receipt.batch_hash), batch_receipt.tx_receipts.len(), batch_receipt.inner @@ -143,15 +204,20 @@ where tx_receipt.receipt ); } - batch_receipts.push(batch_receipt); + batch_receipts.push(batch_receipt.clone()); } let (state_root, witness) = self.end_slot(); - SlotResult { state_root, batch_receipts, witness, } } + + fn get_current_state_root(&self) -> anyhow::Result { + self.current_storage + .get_state_root(&Default::default()) + .map(jmt::RootHash) + } } diff --git a/module-system/sov-state/Cargo.toml b/module-system/sov-state/Cargo.toml index 5d2d181af..12950032e 100644 --- a/module-system/sov-state/Cargo.toml +++ b/module-system/sov-state/Cargo.toml @@ -13,7 +13,9 @@ resolver = "2" [dependencies] anyhow = { workspace = true } +arbitrary = { workspace = true, optional = true } borsh = { workspace = true } +bcs = { workspace = true } serde = { workspace = true } thiserror = { workspace = true } sov-rollup-interface = { path = "../../rollup-interface", version = "0.1" } @@ -23,9 +25,14 @@ jmt = { workspace = true } hex = { workspace = true } sha2 = { workspace = true } +zk-cycle-macros = { path = "../../utils/zk-cycle-macros", optional = true } +risc0-zkvm = { version = "0.16", default-features = false, features = ["std"], optional = true } +risc0-zkvm-platform = { version = "0.16", optional = true } + [dev-dependencies] tempfile = { workspace = true } [features] -default = ["native"] +bench = ["zk-cycle-macros", "risc0-zkvm", "risc0-zkvm-platform"] +default = [] native = ["sov-db"] diff --git a/module-system/sov-state/src/borsh_codec.rs b/module-system/sov-state/src/borsh_codec.rs new file mode 100644 index 000000000..20e139d35 --- /dev/null +++ b/module-system/sov-state/src/borsh_codec.rs @@ -0,0 +1,35 @@ +use crate::codec::{StateKeyCodec, StateValueCodec}; + +/// A [`StateCodec`] that uses [`borsh`] for all keys and values. +#[derive(Debug, Default, PartialEq, Eq, Clone, borsh::BorshDeserialize, borsh::BorshSerialize)] +pub struct BorshCodec; + +impl StateKeyCodec for BorshCodec +where + K: borsh::BorshSerialize + borsh::BorshDeserialize, +{ + type KeyError = std::io::Error; + + fn encode_key(&self, key: &K) -> Vec { + key.try_to_vec().expect("Failed to serialize key") + } + + fn try_decode_key(&self, bytes: &[u8]) -> Result { + K::try_from_slice(bytes) + } +} + +impl StateValueCodec for BorshCodec +where + V: borsh::BorshSerialize + borsh::BorshDeserialize, +{ + type ValueError = std::io::Error; + + fn encode_value(&self, value: &V) -> Vec { + value.try_to_vec().expect("Failed to serialize value") + } + + fn try_decode_value(&self, bytes: &[u8]) -> Result { + V::try_from_slice(bytes) + } +} diff --git a/module-system/sov-state/src/codec/bcs_codec.rs b/module-system/sov-state/src/codec/bcs_codec.rs new file mode 100644 index 000000000..2a8fc467f --- /dev/null +++ b/module-system/sov-state/src/codec/bcs_codec.rs @@ -0,0 +1,20 @@ +use crate::codec::StateValueCodec; + +/// A [`StateCodec`] that uses [`bcs`] for all keys and values. +#[derive(Debug, Default, PartialEq, Eq, Clone)] +pub struct BcsCodec; + +impl StateValueCodec for BcsCodec +where + V: serde::Serialize + for<'a> serde::Deserialize<'a>, +{ + type Error = bcs::Error; + + fn encode_value(&self, value: &V) -> Vec { + bcs::to_bytes(value).expect("Failed to serialize key") + } + + fn try_decode_value(&self, bytes: &[u8]) -> Result { + bcs::from_bytes(bytes) + } +} diff --git a/module-system/sov-state/src/codec/borsh_codec.rs b/module-system/sov-state/src/codec/borsh_codec.rs new file mode 100644 index 000000000..4647755de --- /dev/null +++ b/module-system/sov-state/src/codec/borsh_codec.rs @@ -0,0 +1,20 @@ +use crate::codec::StateValueCodec; + +/// A [`StateValueCodec`] that uses [`borsh`] for all values. +#[derive(Debug, Default, PartialEq, Eq, Clone, borsh::BorshDeserialize, borsh::BorshSerialize)] +pub struct BorshCodec; + +impl StateValueCodec for BorshCodec +where + V: borsh::BorshSerialize + borsh::BorshDeserialize, +{ + type Error = std::io::Error; + + fn encode_value(&self, value: &V) -> Vec { + value.try_to_vec().expect("Failed to serialize value") + } + + fn try_decode_value(&self, bytes: &[u8]) -> Result { + V::try_from_slice(bytes) + } +} diff --git a/module-system/sov-state/src/codec/mod.rs b/module-system/sov-state/src/codec/mod.rs new file mode 100644 index 000000000..cf3cfe91d --- /dev/null +++ b/module-system/sov-state/src/codec/mod.rs @@ -0,0 +1,42 @@ +//! Serialization and deserialization -related logic. + +mod bcs_codec; +mod borsh_codec; + +pub use bcs_codec::BcsCodec; +pub use borsh_codec::BorshCodec; + +/// A trait for types that can serialize and deserialize values for storage +/// access. +pub trait StateValueCodec { + /// Error type that can arise during deserialization. + type Error: std::fmt::Debug; + + /// Serializes a value into a bytes vector. + /// + /// This method **must** not panic as all instances of the value type are + /// supposed to be serializable. + fn encode_value(&self, value: &V) -> Vec; + + /// Tries to deserialize a value from a bytes slice, and returns a + /// [`Result`] with either the deserialized value or an error. + fn try_decode_value(&self, bytes: &[u8]) -> Result; + + /// Deserializes a value from a bytes slice. + /// + /// # Panics + /// Panics if the call to [`StateValueCodec::try_decode_value`] fails. Use + /// [`StateValueCodec::try_decode_value`] if you need to gracefully handle + /// errors. + fn decode_value_unwrap(&self, bytes: &[u8]) -> V { + self.try_decode_value(bytes) + .map_err(|err| { + format!( + "Failed to decode value 0x{}, error: {:?}", + hex::encode(bytes), + err + ) + }) + .unwrap() + } +} diff --git a/module-system/sov-state/src/internal_cache.rs b/module-system/sov-state/src/internal_cache.rs index 34428225b..efda9f5b5 100644 --- a/module-system/sov-state/src/internal_cache.rs +++ b/module-system/sov-state/src/internal_cache.rs @@ -42,21 +42,19 @@ impl StorageInternalCache { /// Gets a value from the cache or reads it from the provided `ValueReader`. pub(crate) fn get_or_fetch( &mut self, - key: StorageKey, + key: &StorageKey, value_reader: &S, witness: &S::Witness, ) -> Option { - let cache_key = key.clone().as_cache_key(); - let cache_value = self.get_value_from_cache(cache_key.clone()); + let cache_key = key.to_cache_key(); + let cache_value = self.get_value_from_cache(&cache_key); match cache_value { - cache::ValueExists::Yes(cache_value_exists) => { - cache_value_exists.map(StorageValue::new_from_cache_value) - } + cache::ValueExists::Yes(cache_value_exists) => cache_value_exists.map(Into::into), // If the value does not exist in the cache, then fetch it from an external source. cache::ValueExists::No => { let storage_value = value_reader.get(key, witness); - let cache_value = storage_value.as_ref().map(|v| v.clone().as_cache_value()); + let cache_value = storage_value.as_ref().map(|v| v.clone().into_cache_value()); self.add_read(cache_key, cache_value); storage_value @@ -64,24 +62,24 @@ impl StorageInternalCache { } } - pub fn try_get(&self, key: StorageKey) -> ValueExists { - let cache_key = key.as_cache_key(); - self.get_value_from_cache(cache_key) + pub fn try_get(&self, key: &StorageKey) -> ValueExists { + let cache_key = key.to_cache_key(); + self.get_value_from_cache(&cache_key) } - pub(crate) fn set(&mut self, key: StorageKey, value: StorageValue) { - let cache_key = key.as_cache_key(); - let cache_value = value.as_cache_value(); + pub(crate) fn set(&mut self, key: &StorageKey, value: StorageValue) { + let cache_key = key.to_cache_key(); + let cache_value = value.into_cache_value(); self.tx_cache.add_write(cache_key, Some(cache_value)); } - pub(crate) fn delete(&mut self, key: StorageKey) { - let cache_key = key.as_cache_key(); + pub(crate) fn delete(&mut self, key: &StorageKey) { + let cache_key = key.to_cache_key(); self.tx_cache.add_write(cache_key, None); } - fn get_value_from_cache(&self, cache_key: CacheKey) -> cache::ValueExists { - self.tx_cache.get_value(&cache_key) + fn get_value_from_cache(&self, cache_key: &CacheKey) -> cache::ValueExists { + self.tx_cache.get_value(cache_key) } pub fn merge_left( diff --git a/module-system/sov-state/src/lib.rs b/module-system/sov-state/src/lib.rs index 0002eea8d..da556515c 100644 --- a/module-system/sov-state/src/lib.rs +++ b/module-system/sov-state/src/lib.rs @@ -1,16 +1,26 @@ +pub mod codec; mod internal_cache; mod map; + #[cfg(feature = "native")] mod prover_storage; -mod scratchpad; -pub mod storage; + #[cfg(feature = "native")] mod tree_db; + +mod scratchpad; + +pub mod storage; + mod utils; mod value; +mod vec; mod witness; + mod zk_storage; +pub use zk_storage::ZkStorage; + pub mod config; #[cfg(test)] mod state_tests; @@ -18,15 +28,16 @@ mod state_tests; use std::fmt::Display; use std::str; -pub use map::StateMap; +pub use map::{StateMap, StateMapError}; #[cfg(feature = "native")] pub use prover_storage::{delete_storage, ProverStorage}; pub use scratchpad::*; pub use sov_first_read_last_write_cache::cache::CacheLog; +use sov_rollup_interface::digest::Digest; pub use storage::Storage; use utils::AlignedVec; pub use value::StateValue; -pub use zk_storage::ZkStorage; +pub use vec::StateVec; pub use crate::witness::{ArrayWitness, TreeWitnessReader, Witness}; @@ -34,8 +45,8 @@ pub use crate::witness::{ArrayWitness, TreeWitnessReader, Witness}; // All the collection types in this crate are backed by the same storage instance, this means that insertions of the same key // to two different `StorageMaps` would collide with each other. We solve it by instantiating every collection type with a unique // prefix that is prepended to each key. - #[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Eq, Clone)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] pub struct Prefix { prefix: AlignedVec, } @@ -73,6 +84,19 @@ impl Prefix { pub fn is_empty(&self) -> bool { self.prefix.is_empty() } + + pub fn extended(&self, bytes: &[u8]) -> Self { + let mut prefix = self.clone(); + prefix.extend(bytes.iter().copied()); + prefix + } +} + +impl Extend for Prefix { + fn extend>(&mut self, iter: T) { + self.prefix + .extend(&AlignedVec::new(iter.into_iter().collect())) + } } /// A trait specifying the hash function and format of the witness used in @@ -84,7 +108,7 @@ pub trait MerkleProofSpec { type Hasher: Digest; } -use sha2::{Digest, Sha256}; +use sha2::Sha256; #[derive(Clone)] pub struct DefaultStorageSpec; diff --git a/module-system/sov-state/src/map.rs b/module-system/sov-state/src/map.rs index be677d0bd..aa545e253 100644 --- a/module-system/sov-state/src/map.rs +++ b/module-system/sov-state/src/map.rs @@ -1,77 +1,205 @@ +use std::borrow::Borrow; +use std::hash::Hash; use std::marker::PhantomData; -use borsh::{BorshDeserialize, BorshSerialize}; use thiserror::Error; +use crate::codec::{BorshCodec, StateValueCodec}; use crate::storage::StorageKey; use crate::{Prefix, Storage, WorkingSet}; /// A container that maps keys to values. - -#[derive(borsh::BorshDeserialize, borsh::BorshSerialize, Debug, PartialEq, Clone)] -pub struct StateMap { +/// +/// # Type parameters +/// [`StateMap`] is generic over: +/// - a key type `K`; +/// - a value type `V`; +/// - a [`StateValueCodec`] `VC`. +#[derive(Debug, Clone, PartialEq, borsh::BorshDeserialize, borsh::BorshSerialize)] +pub struct StateMap { _phantom: (PhantomData, PhantomData), + value_codec: VC, prefix: Prefix, } -/// Error type for `StateMap` get method. +/// Error type for the [`StateMap::get`] method. #[derive(Debug, Error)] -pub enum Error { +pub enum StateMapError { #[error("Value not found for prefix: {0} and: storage key {1}")] MissingValue(Prefix, StorageKey), } -impl StateMap { +impl StateMap { + /// Creates a new [`StateMap`] with the given prefix and the default + /// [`StateValueCodec`] (i.e. [`BorshCodec`]). pub fn new(prefix: Prefix) -> Self { + Self::with_codec(prefix, BorshCodec) + } +} + +impl StateMap { + /// Creates a new [`StateMap`] with the given prefix and [`StateValueCodec`]. + pub fn with_codec(prefix: Prefix, codec: VC) -> Self { Self { _phantom: (PhantomData, PhantomData), + value_codec: codec, prefix, } } + /// Returns the prefix used when this [`StateMap`] was created. + pub fn prefix(&self) -> &Prefix { + &self.prefix + } +} + +impl StateMap +where + K: Hash + Eq, + VC: StateValueCodec, +{ /// Inserts a key-value pair into the map. - pub fn set(&self, key: &K, value: &V, working_set: &mut WorkingSet) { - working_set.set_value(self.prefix(), key, value) + /// + /// Much like [`StateMap::get`], the key may be any borrowed form of the + /// map’s key type. + pub fn set(&self, key: &Q, value: &V, working_set: &mut WorkingSet) + where + K: Borrow, + Q: Hash + Eq + ?Sized, + { + working_set.set_value(self.prefix(), key, value, &self.value_codec) } - /// Returns the value corresponding to the key or None if key is absent in the StateMap. - pub fn get(&self, key: &K, working_set: &mut WorkingSet) -> Option { - working_set.get_value(self.prefix(), key) + /// Returns the value corresponding to the key, or [`None`] if the map + /// doesn't contain the key. + /// + /// # Examples + /// + /// The key may be any borrowed form of the map’s key type. Note that + /// [`Hash`] and [`Eq`] on the borrowed form must match those for the key + /// type. + /// + /// ``` + /// use sov_state::{StateMap, Storage, WorkingSet}; + /// + /// fn foo(map: StateMap, u64>, key: &[u8], ws: &mut WorkingSet) -> Option + /// where + /// S: Storage, + /// { + /// // We perform the `get` with a slice, and not the `Vec`. it is so because `Vec` borrows + /// // `[T]`. + /// map.get(key, ws) + /// } + /// ``` + /// + /// If the map's key type does not implement [`Borrow`] for your desired + /// target type, you'll have to convert the key to something else. An + /// example of this would be "slicing" an array to use in [`Vec`]-keyed + /// maps: + /// + /// ``` + /// use sov_state::{StateMap, Storage, WorkingSet}; + /// + /// fn foo(map: StateMap, u64>, key: [u8; 32], ws: &mut WorkingSet) -> Option + /// where + /// S: Storage, + /// { + /// map.get(&key[..], ws) + /// } + /// ``` + pub fn get(&self, key: &Q, working_set: &mut WorkingSet) -> Option + where + K: Borrow, + Q: Hash + Eq + ?Sized, + { + working_set.get_value(self.prefix(), key, &self.value_codec) } - /// Returns the value corresponding to the key or Error if key is absent in the StateMap. - pub fn get_or_err( + /// Returns the value corresponding to the key or [`StateMapError`] if key is absent in + /// the map. + pub fn get_or_err( &self, - key: &K, + key: &Q, working_set: &mut WorkingSet, - ) -> Result { + ) -> Result + where + K: Borrow, + Q: Hash + Eq + ?Sized, + { self.get(key, working_set).ok_or_else(|| { - Error::MissingValue(self.prefix().clone(), StorageKey::new(self.prefix(), key)) + StateMapError::MissingValue(self.prefix().clone(), StorageKey::new(self.prefix(), key)) }) } - /// Removes a key from the StateMap, returning the corresponding value (or None if the key is absent). - pub fn remove(&self, key: &K, working_set: &mut WorkingSet) -> Option { - working_set.remove_value(self.prefix(), key) + /// Removes a key from the map, returning the corresponding value (or + /// [`None`] if the key is absent). + pub fn remove(&self, key: &Q, working_set: &mut WorkingSet) -> Option + where + K: Borrow, + Q: Hash + Eq + ?Sized, + { + working_set.remove_value(self.prefix(), key, &self.value_codec) } - /// Removes a key from the StateMap, returning the corresponding value (or Error if the key is absent). - pub fn remove_or_err( + /// Removes a key from the map, returning the corresponding value (or + /// [`StateMapError`] if the key is absent). + /// + /// Use [`StateMap::remove`] if you want an [`Option`] instead of a [`Result`]. + pub fn remove_or_err( &self, - key: &K, + key: &Q, working_set: &mut WorkingSet, - ) -> Result { + ) -> Result + where + K: Borrow, + Q: Hash + Eq + ?Sized, + { self.remove(key, working_set).ok_or_else(|| { - Error::MissingValue(self.prefix().clone(), StorageKey::new(self.prefix(), key)) + StateMapError::MissingValue(self.prefix().clone(), StorageKey::new(self.prefix(), key)) }) } - /// Deletes a key from the StateMap. - pub fn delete(&self, key: &K, working_set: &mut WorkingSet) { + /// Deletes a key-value pair from the map. + /// + /// This is equivalent to [`StateMap::remove`], but doesn't deserialize and + /// return the value beforing deletion. + pub fn delete(&self, key: &Q, working_set: &mut WorkingSet) + where + K: Borrow, + Q: Hash + Eq + ?Sized, + { working_set.delete_value(self.prefix(), key); } +} - pub fn prefix(&self) -> &Prefix { - &self.prefix +#[cfg(feature = "arbitrary")] +impl<'a, K, V, VC> StateMap +where + K: arbitrary::Arbitrary<'a> + Hash + Eq, + V: arbitrary::Arbitrary<'a> + Hash + Eq, + VC: StateValueCodec + Default, +{ + pub fn arbitrary_workset( + u: &mut arbitrary::Unstructured<'a>, + working_set: &mut WorkingSet, + ) -> arbitrary::Result + where + S: Storage, + { + use arbitrary::Arbitrary; + + let prefix = Prefix::arbitrary(u)?; + let len = u.arbitrary_len::<(K, V)>()?; + let codec = VC::default(); + let map = StateMap::with_codec(prefix, codec); + + (0..len).try_fold(map, |map, _| { + let key = K::arbitrary(u)?; + let value = V::arbitrary(u)?; + + map.set(&key, &value, working_set); + + Ok(map) + }) } } diff --git a/module-system/sov-state/src/prover_storage.rs b/module-system/sov-state/src/prover_storage.rs index 6bfaffc05..74da92eba 100644 --- a/module-system/sov-state/src/prover_storage.rs +++ b/module-system/sov-state/src/prover_storage.rs @@ -3,13 +3,13 @@ use std::marker::PhantomData; use std::path::Path; use std::sync::Arc; -use jmt::storage::TreeWriter; -use jmt::{JellyfishMerkleTree, KeyHash}; +use jmt::storage::{NodeBatch, TreeWriter}; +use jmt::{JellyfishMerkleTree, KeyHash, RootHash, Version}; use sov_db::state_db::StateDB; use crate::config::Config; use crate::internal_cache::OrderedReadsAndWrites; -use crate::storage::{StorageKey, StorageValue}; +use crate::storage::{NativeStorage, StorageKey, StorageProof, StorageValue}; use crate::tree_db::TreeReadLogger; use crate::witness::Witness; use crate::{MerkleProofSpec, Storage}; @@ -41,37 +41,50 @@ impl ProverStorage { }) } - fn read_value(&self, key: StorageKey) -> Option { + fn read_value(&self, key: &StorageKey) -> Option { match self .db .get_value_option_by_key(self.db.get_next_version(), key.as_ref()) { - Ok(value) => value.map(StorageValue::new_from_bytes), + Ok(value) => value.map(Into::into), // It is ok to panic here, we assume the db is available and consistent. Err(e) => panic!("Unable to read value from db: {e}"), } } + + fn get_root_hash(&self, version: Version) -> Result { + let temp_merkle: JellyfishMerkleTree<'_, StateDB, S::Hasher> = + JellyfishMerkleTree::new(&self.db); + temp_merkle.get_root_hash(version) + } } impl Storage for ProverStorage { type Witness = S::Witness; type RuntimeConfig = Config; + type Proof = jmt::proof::SparseMerkleProof; + type StateUpdate = NodeBatch; fn with_config(config: Self::RuntimeConfig) -> Result { Self::with_path(config.path.as_path()) } - fn get(&self, key: StorageKey, witness: &Self::Witness) -> Option { + fn get(&self, key: &StorageKey, witness: &Self::Witness) -> Option { let val = self.read_value(key); witness.add_hint(val.clone()); val } - fn validate_and_commit( + fn get_state_root(&self, _witness: &Self::Witness) -> anyhow::Result<[u8; 32]> { + self.get_root_hash(self.db.get_next_version() - 1) + .map(|root| root.0) + } + + fn compute_state_update( &self, state_accesses: OrderedReadsAndWrites, witness: &Self::Witness, - ) -> Result<[u8; 32], anyhow::Error> { + ) -> Result<([u8; 32], Self::StateUpdate), anyhow::Error> { let latest_version = self.db.get_next_version() - 1; witness.add_hint(latest_version); @@ -127,17 +140,57 @@ impl Storage for ProverStorage { .put_value_set(batch, next_version) .expect("JMT update must succeed"); + Ok((new_root.0, tree_update.node_batch)) + } + + fn commit(&self, node_batch: &Self::StateUpdate) { self.db - .write_node_batch(&tree_update.node_batch) + .write_node_batch(node_batch) .expect("db write must succeed"); self.db.inc_next_version(); - Ok(new_root.0) } // Based on assumption `validate_and_commit` increments version. fn is_empty(&self) -> bool { self.db.get_next_version() <= 1 } + + fn open_proof( + &self, + state_root: [u8; 32], + state_proof: StorageProof, + ) -> Result<(StorageKey, Option), anyhow::Error> { + let StorageProof { key, value, proof } = state_proof; + let key_hash = KeyHash::with::(key.as_ref()); + + proof.verify( + jmt::RootHash(state_root), + key_hash, + value.as_ref().map(|v| v.value()), + )?; + Ok((key, value)) + } +} + +impl NativeStorage for ProverStorage { + fn get_with_proof( + &self, + key: StorageKey, + _witness: &Self::Witness, + ) -> StorageProof { + let merkle = JellyfishMerkleTree::::new(&self.db); + let (val_opt, proof) = merkle + .get_with_proof( + KeyHash::with::(key.as_ref()), + self.db.get_next_version() - 1, + ) + .unwrap(); + StorageProof { + key, + value: val_opt.map(StorageValue::from), + proof, + } + } } pub fn delete_storage(path: impl AsRef) { @@ -197,7 +250,7 @@ mod test { .validate_and_commit(cache, &witness) .expect("storage is valid"); - assert_eq!(test.value, prover_storage.get(test.key, &witness).unwrap()); + assert_eq!(test.value, prover_storage.get(&test.key, &witness).unwrap()); assert_eq!(prover_storage.db.get_next_version(), test.version + 1) } } @@ -208,7 +261,7 @@ mod test { for test in tests { assert_eq!( test.value, - storage.get(test.key, &Default::default()).unwrap() + storage.get(&test.key, &Default::default()).unwrap() ); } } @@ -241,7 +294,10 @@ mod test { { let prover_storage = ProverStorage::::with_path(path).unwrap(); assert!(!prover_storage.is_empty()); - assert_eq!(value, prover_storage.get(key, &Default::default()).unwrap()); + assert_eq!( + value, + prover_storage.get(&key, &Default::default()).unwrap() + ); } } } diff --git a/module-system/sov-state/src/scratchpad.rs b/module-system/sov-state/src/scratchpad.rs index f28f537e6..1badd0ec5 100644 --- a/module-system/sov-state/src/scratchpad.rs +++ b/module-system/sov-state/src/scratchpad.rs @@ -1,10 +1,11 @@ use std::collections::HashMap; use std::fmt::Debug; +use std::hash::Hash; -use borsh::{BorshDeserialize, BorshSerialize}; use sov_first_read_last_write_cache::{CacheKey, CacheValue}; use sov_rollup_interface::stf::Event; +use crate::codec::StateValueCodec; use crate::internal_cache::{OrderedReadsAndWrites, StorageInternalCache}; use crate::storage::{StorageKey, StorageValue}; use crate::{Prefix, Storage}; @@ -53,6 +54,10 @@ impl StateCheckpoint { } } + pub fn get(&mut self, key: &StorageKey) -> Option { + self.delta.get(key) + } + pub fn with_witness(inner: S, witness: S::Witness) -> Self { Self { delta: Delta::with_witness(inner, witness), @@ -125,76 +130,91 @@ impl WorkingSet { &self.events } - #[cfg(test)] pub fn backing(&self) -> &S { &self.delta.inner.inner } } impl WorkingSet { - pub(crate) fn set_value( + pub(crate) fn set_value( &mut self, prefix: &Prefix, storage_key: &K, value: &V, - ) { + codec: &VC, + ) where + K: Hash + Eq + ?Sized, + VC: StateValueCodec, + { let storage_key = StorageKey::new(prefix, storage_key); - let storage_value = StorageValue::new(value); + let storage_value = StorageValue::new(value, codec); self.set(storage_key, storage_value); } - pub(crate) fn get_value( + pub(crate) fn get_value( &mut self, prefix: &Prefix, storage_key: &K, - ) -> Option { + codec: &VC, + ) -> Option + where + K: Hash + Eq + ?Sized, + VC: StateValueCodec, + { let storage_key = StorageKey::new(prefix, storage_key); - self.get_decoded(storage_key) + self.get_decoded(storage_key, codec) } - pub(crate) fn remove_value( + pub(crate) fn remove_value( &mut self, prefix: &Prefix, storage_key: &K, - ) -> Option { + codec: &VC, + ) -> Option + where + K: Hash + Eq + ?Sized, + VC: StateValueCodec, + { let storage_key = StorageKey::new(prefix, storage_key); - let storage_value = self.get_decoded(storage_key.clone())?; + let storage_value = self.get_decoded(storage_key.clone(), codec)?; self.delete(storage_key); Some(storage_value) } - pub(crate) fn delete_value(&mut self, prefix: &Prefix, storage_key: &K) { + pub(crate) fn delete_value(&mut self, prefix: &Prefix, storage_key: &K) + where + K: Hash + Eq + ?Sized, + { let storage_key = StorageKey::new(prefix, storage_key); self.delete(storage_key); } - fn get_decoded(&mut self, storage_key: StorageKey) -> Option { + fn get_decoded(&mut self, storage_key: StorageKey, codec: &VC) -> Option + where + VC: StateValueCodec, + { let storage_value = self.get(storage_key)?; - // It is ok to panic here. Deserialization problem means that something is terribly wrong. - Some( - V::deserialize_reader(&mut storage_value.value()) - .unwrap_or_else(|e| panic!("Unable to deserialize storage value {e:?}")), - ) + Some(codec.decode_value_unwrap(storage_value.value())) } } impl RevertableDelta { fn get(&mut self, key: StorageKey) -> Option { - let key = key.as_cache_key(); + let key = key.to_cache_key(); if let Some(value) = self.writes.get(&key) { - return value.clone().map(StorageValue::new_from_cache_value); + return value.clone().map(Into::into); } - self.inner.get(key.into()) + self.inner.get(&key.into()) } fn set(&mut self, key: StorageKey, value: StorageValue) { self.writes - .insert(key.as_cache_key(), Some(value.as_cache_value())); + .insert(key.to_cache_key(), Some(value.into_cache_value())); } fn delete(&mut self, key: StorageKey) { - self.writes.insert(key.as_cache_key(), None); + self.writes.insert(key.to_cache_key(), None); } } @@ -204,9 +224,9 @@ impl RevertableDelta { for (k, v) in self.writes.into_iter() { if let Some(v) = v { - inner.set(k.into(), StorageValue::new_from_cache_value(v)); + inner.set(&k.into(), v.into()); } else { - inner.delete(k.into()); + inner.delete(&k.into()); } } @@ -250,15 +270,15 @@ impl Debug for Delta { } impl Delta { - fn get(&mut self, key: StorageKey) -> Option { + fn get(&mut self, key: &StorageKey) -> Option { self.cache.get_or_fetch(key, &self.inner, &self.witness) } - fn set(&mut self, key: StorageKey, value: StorageValue) { + fn set(&mut self, key: &StorageKey, value: StorageValue) { self.cache.set(key, value) } - fn delete(&mut self, key: StorageKey) { + fn delete(&mut self, key: &StorageKey) { self.cache.delete(key) } } diff --git a/module-system/sov-state/src/state_tests.rs b/module-system/sov-state/src/state_tests.rs index a0e77cf9d..3329818ae 100644 --- a/module-system/sov-state/src/state_tests.rs +++ b/module-system/sov-state/src/state_tests.rs @@ -1,7 +1,9 @@ use std::path::Path; +use borsh::{BorshDeserialize, BorshSerialize}; + use super::*; -use crate::{ArrayWitness, DefaultStorageSpec, ProverStorage}; +use crate::{DefaultStorageSpec, ProverStorage}; enum Operation { Merge, @@ -205,3 +207,205 @@ fn test_witness_roundtrip() { .expect("ZK validation should succeed"); }; } + +fn create_state_vec_and_storage( + values: Vec, + path: impl AsRef, +) -> (StateVec, WorkingSet>) { + let mut working_set = WorkingSet::new(ProverStorage::with_path(&path).unwrap()); + + let state_vec = StateVec::new(Prefix::new(vec![0])); + state_vec.set_all(values, &mut working_set); + (state_vec, working_set) +} + +#[test] +fn test_state_vec_len() { + let tempdir = tempfile::tempdir().unwrap(); + let path = tempdir.path(); + for (before_len, after_len) in create_storage_operations() { + let values = vec![11, 22, 33]; + let (state_vec, mut working_set) = create_state_vec_and_storage(values.clone(), path); + + working_set = before_len.execute(working_set); + + working_set = after_len.execute(working_set); + + assert_eq!(state_vec.len(&mut working_set), values.len()); + } +} + +#[test] +fn test_state_vec_get() { + let tempdir = tempfile::tempdir().unwrap(); + let path = tempdir.path(); + for (before_get, after_get) in create_storage_operations() { + let values = vec![56, 55, 54]; + let (state_vec, mut working_set) = create_state_vec_and_storage(values.clone(), path); + + working_set = before_get.execute(working_set); + + let val = state_vec.get(1, &mut working_set); + let err_val = state_vec.get_or_err(3, &mut working_set); + assert!(val.is_some()); + assert!(err_val.is_err()); + + let val = val.unwrap(); + assert_eq!(val, values.get(1).unwrap().clone()); + + working_set = after_get.execute(working_set); + let val = state_vec.get(1, &mut working_set); + let err_val = state_vec.get_or_err(3, &mut working_set); + assert!(val.is_some()); + assert!(err_val.is_err()); + + let val = val.unwrap(); + assert_eq!(val, values.get(1).unwrap().clone()); + } +} + +#[test] +fn test_state_vec_set() { + let tempdir = tempfile::tempdir().unwrap(); + let path = tempdir.path(); + for (before_set, after_set) in create_storage_operations() { + let values = vec![56, 55, 54]; + let (state_vec, mut working_set) = create_state_vec_and_storage(values.clone(), path); + + working_set = before_set.execute(working_set); + let val = state_vec.set(1, &99, &mut working_set); + assert!(val.is_ok()); + + let val_err = state_vec.set(3, &99, &mut working_set); + assert!(val_err.is_err()); + + working_set = after_set.execute(working_set); + + let val = state_vec.get(1, &mut working_set); + let err_val = state_vec.get_or_err(3, &mut working_set); + + assert!(val.is_some()); + assert!(err_val.is_err()); + + let val = val.unwrap(); + assert_eq!(val, 99); + } +} + +#[test] +fn test_state_vec_push() { + let tempdir = tempfile::tempdir().unwrap(); + let path = tempdir.path(); + for (before_push, after_push) in create_storage_operations() { + let values = vec![56, 55, 54]; + let (state_vec, mut working_set) = create_state_vec_and_storage(values.clone(), path); + + working_set = before_push.execute(working_set); + + state_vec.push(&53, &mut working_set); + + working_set = after_push.execute(working_set); + + let len = state_vec.len(&mut working_set); + assert_eq!(len, 4); + + let val = state_vec.get(3, &mut working_set); + assert!(val.is_some()); + + let val = val.unwrap(); + assert_eq!(val, 53); + } +} + +#[test] +fn test_state_vec_pop() { + let tempdir = tempfile::tempdir().unwrap(); + let path = tempdir.path(); + for (before_pop, after_pop) in create_storage_operations() { + let values = vec![56, 55, 54]; + let (state_vec, mut working_set) = create_state_vec_and_storage(values.clone(), path); + + working_set = before_pop.execute(working_set); + + let popped = state_vec.pop(&mut working_set); + + assert_eq!(popped.unwrap(), 54); + + working_set = after_pop.execute(working_set); + + let len = state_vec.len(&mut working_set); + assert_eq!(len, 2); + + let val = state_vec.get(1, &mut working_set); + assert!(val.is_some()); + + let val = val.unwrap(); + assert_eq!(val, 55); + } +} + +#[test] +fn test_state_vec_set_all() { + let tempdir = tempfile::tempdir().unwrap(); + let path = tempdir.path(); + for (before_set_all, after_set_all) in create_storage_operations() { + let values = vec![56, 55, 54]; + let (state_vec, mut working_set) = create_state_vec_and_storage(values.clone(), path); + + working_set = before_set_all.execute(working_set); + + let new_values: Vec = vec![1]; + state_vec.set_all(new_values, &mut working_set); + + working_set = after_set_all.execute(working_set); + + let val = state_vec.get(0, &mut working_set); + + assert!(val.is_some()); + + let val = val.unwrap(); + assert_eq!(val, 1); + + let len = state_vec.len(&mut working_set); + assert_eq!(len, 1); + + let val = state_vec.get_or_err(1, &mut working_set); + + assert!(val.is_err()); + } +} + +#[test] +fn test_state_vec_diff_type() { + let tempdir = tempfile::tempdir().unwrap(); + let path = tempdir.path(); + for (before_ops, after_ops) in create_storage_operations() { + let values = vec![String::from("Hello"), String::from("World")]; + let (state_vec, mut working_set) = create_state_vec_and_storage(values.clone(), path); + + working_set = before_ops.execute(working_set); + + let val0 = state_vec.get(0, &mut working_set); + let val1 = state_vec.pop(&mut working_set); + state_vec.push(&String::from("new str"), &mut working_set); + + working_set = after_ops.execute(working_set); + + assert!(val0.is_some()); + assert!(val1.is_some()); + + let val0 = val0.unwrap(); + let val1 = val1.unwrap(); + assert_eq!(val0, String::from("Hello")); + assert_eq!(val1, String::from("World")); + + let val = state_vec.get(1, &mut working_set); + assert!(val.is_some()); + + let val = val.unwrap(); + assert_eq!(val, String::from("new str")); + + let len = state_vec.len(&mut working_set); + assert_eq!(len, 2); + } +} diff --git a/module-system/sov-state/src/storage.rs b/module-system/sov-state/src/storage.rs index 60dada67d..d4bd3ebb4 100644 --- a/module-system/sov-state/src/storage.rs +++ b/module-system/sov-state/src/storage.rs @@ -1,18 +1,22 @@ use std::fmt::Display; +use std::hash::Hash; use std::sync::Arc; +use anyhow::ensure; use borsh::{BorshDeserialize, BorshSerialize}; use hex; +use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use sov_first_read_last_write_cache::{CacheKey, CacheValue}; +use crate::codec::StateValueCodec; use crate::internal_cache::OrderedReadsAndWrites; use crate::utils::AlignedVec; use crate::witness::Witness; -use crate::Prefix; +use crate::{Prefix, StateMap}; // `Key` type for the `Storage` -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize, BorshDeserialize, BorshSerialize)] pub struct StorageKey { key: Arc>, } @@ -28,7 +32,13 @@ impl StorageKey { self.key.clone() } - pub fn as_cache_key(self) -> CacheKey { + pub fn to_cache_key(&self) -> CacheKey { + CacheKey { + key: self.key.clone(), + } + } + + pub fn into_cache_key(self) -> CacheKey { CacheKey { key: self.key } } } @@ -47,8 +57,11 @@ impl Display for StorageKey { impl StorageKey { /// Creates a new StorageKey that combines a prefix and a key. - pub fn new(prefix: &Prefix, key: &K) -> Self { - let encoded_key = key.try_to_vec().unwrap(); + pub fn new(prefix: &Prefix, key: &K) -> Self + where + K: Hash + ?Sized, + { + let encoded_key = nohash_serialize(key); let encoded_key = AlignedVec::new(encoded_key); let full_key = Vec::::with_capacity(prefix.len() + encoded_key.len()); @@ -62,59 +75,164 @@ impl StorageKey { } } -// `Value` type for the `Storage` -#[derive(Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize, Serialize, Deserialize)] +// Serializes a value into a `Vec` using `std::hash::Hasher` +// writer methods, but without actually ever hashing anything. +fn nohash_serialize(item: T) -> Vec { + struct NoHasher(Vec); + + impl std::hash::Hasher for NoHasher { + fn finish(&self) -> u64 { + 0 + } + + fn write(&mut self, bytes: &[u8]) { + self.0.extend_from_slice(bytes); + } + } + + let mut hasher = NoHasher(vec![]); + item.hash(&mut hasher); + hasher.0 +} + +/// A serialized value suitable for storing. Internally uses an Arc> for cheap cloning. +#[derive( + Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Default, +)] pub struct StorageValue { value: Arc>, } +impl From for StorageValue { + fn from(cache_value: CacheValue) -> Self { + Self { + value: cache_value.value, + } + } +} + +impl From> for StorageValue { + fn from(value: Vec) -> Self { + Self { + value: Arc::new(value), + } + } +} + impl StorageValue { - pub fn new(value: &V) -> Self { - let encoded_value = value.try_to_vec().unwrap(); + /// Create a new storage value by serializing the input with the given codec. + pub fn new(value: &V, codec: &VC) -> Self + where + VC: StateValueCodec, + { + let encoded_value = codec.encode_value(value); Self { value: Arc::new(encoded_value), } } + /// Get the bytes of this value. pub fn value(&self) -> &[u8] { &self.value } - pub fn as_cache_value(self) -> CacheValue { + /// Convert this value into a [`CacheValue`]. + pub fn into_cache_value(self) -> CacheValue { CacheValue { value: self.value } } +} - pub fn new_from_cache_value(cache_value: CacheValue) -> Self { - Self { - value: cache_value.value, - } - } - - pub fn new_from_bytes(value: Vec) -> Self { - Self { - value: Arc::new(value), - } - } +#[derive(Debug, Clone, Serialize, Deserialize, BorshDeserialize, BorshSerialize)] +/// A proof that a particular storage key has a particular value, or is absent. +pub struct StorageProof

{ + /// The key which is proven + pub key: StorageKey, + /// The value, if any, which is proven + pub value: Option, + /// The cryptographic proof + pub proof: P, } /// An interface for storing and retrieving values in the storage. pub trait Storage: Clone { + /// The witness type for this storage instance. type Witness: Witness; + /// The runtime config for this storage instance. type RuntimeConfig; + /// A cryptographic proof that a particular key has a particular value, or is absent. + type Proof: Serialize + + DeserializeOwned + + core::fmt::Debug + + Clone + + BorshSerialize + + BorshDeserialize; + + /// State update that will be committed to the database. + type StateUpdate; + fn with_config(config: Self::RuntimeConfig) -> Result; /// Returns the value corresponding to the key or None if key is absent. - fn get(&self, key: StorageKey, witness: &Self::Witness) -> Option; + fn get(&self, key: &StorageKey, witness: &Self::Witness) -> Option; + + /// Returns the latest state root hash from the storage. + fn get_state_root(&self, witness: &Self::Witness) -> anyhow::Result<[u8; 32]>; + + /// Calculates new state root but does not commit any changes to the database. + fn compute_state_update( + &self, + state_accesses: OrderedReadsAndWrites, + witness: &Self::Witness, + ) -> Result<([u8; 32], Self::StateUpdate), anyhow::Error>; + + /// Commits state changes to the database. + fn commit(&self, node_batch: &Self::StateUpdate); /// Validate all of the storage accesses in a particular cache log, - /// returning the new state root after applying all writes + /// returning the new state root after applying all writes. + /// This function is equivalent to calling: + /// `self.compute_state_update & self.commit` fn validate_and_commit( &self, state_accesses: OrderedReadsAndWrites, witness: &Self::Witness, - ) -> Result<[u8; 32], anyhow::Error>; + ) -> Result<[u8; 32], anyhow::Error> { + let (root_hash, node_batch) = self.compute_state_update(state_accesses, witness)?; + self.commit(&node_batch); + + Ok(root_hash) + } + + /// Opens a storage access proof and validates it against a state root. + /// It returns a result with the opened leaf (key, value) pair in case of success. + fn open_proof( + &self, + state_root: [u8; 32], + proof: StorageProof, + ) -> Result<(StorageKey, Option), anyhow::Error>; + + fn verify_proof( + &self, + state_root: [u8; 32], + proof: StorageProof, + expected_key: &K, + storage_map: &StateMap, + ) -> Result, anyhow::Error> + where + K: Hash + Eq, + { + let (storage_key, storage_value) = self.open_proof(state_root, proof)?; + + // We have to check that the storage key is the same as the external key + ensure!( + storage_key == StorageKey::new(storage_map.prefix(), expected_key), + "The storage key from the proof doesn't match the expected storage key." + ); + + Ok(storage_value) + } /// Indicates if storage is empty or not. /// Useful during initialization @@ -140,3 +258,22 @@ impl From<&'static str> for StorageValue { } } } + +pub trait NativeStorage: Storage { + /// Returns the value corresponding to the key or None if key is absent and a proof to + /// get the value. Panics if [`get_with_proof_opt`] returns `None` in place of the proof. + fn get_with_proof(&self, key: StorageKey, witness: &Self::Witness) + -> StorageProof; + + fn get_with_proof_from_state_map( + &self, + key: &K, + state_map: &StateMap, + witness: &Self::Witness, + ) -> StorageProof + where + K: Hash + Eq, + { + self.get_with_proof(StorageKey::new(state_map.prefix(), key), witness) + } +} diff --git a/module-system/sov-state/src/utils.rs b/module-system/sov-state/src/utils.rs index 1ca65306f..1dfc152a7 100644 --- a/module-system/sov-state/src/utils.rs +++ b/module-system/sov-state/src/utils.rs @@ -8,15 +8,22 @@ pub struct AlignedVec { } impl AlignedVec { - // Creates a new AlignedVec whose length is aligned to 4 bytes. + /// The length of the chunks of the aligned vector. + pub const ALIGNMENT: usize = 4; + + // Creates a new AlignedVec whose length is aligned to [Self::ALIGNMENT] bytes. pub fn new(vector: Vec) -> Self { - // TODO pad the vector to Self { inner: vector } } // Extends self with the contents of the other AlignedVec. pub fn extend(&mut self, other: &Self) { // TODO check if the standard extend method does the right thing. + // debug_assert_eq!( + // self.inner.len() % Self::ALIGNMENT, + // 0, + // "`AlignedVec` is expected to have well-formed chunks" + // ); self.inner.extend(&other.inner); } @@ -39,3 +46,10 @@ impl AsRef> for AlignedVec { &self.inner } } + +#[cfg(feature = "arbitrary")] +impl<'a> arbitrary::Arbitrary<'a> for AlignedVec { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + u.arbitrary().map(Self::new) + } +} diff --git a/module-system/sov-state/src/value.rs b/module-system/sov-state/src/value.rs index 6f7e1b065..89a1bd7cb 100644 --- a/module-system/sov-state/src/value.rs +++ b/module-system/sov-state/src/value.rs @@ -1,29 +1,16 @@ -use std::io::Write; use std::marker::PhantomData; use borsh::{BorshDeserialize, BorshSerialize}; use thiserror::Error; +use crate::codec::{BorshCodec, StateValueCodec}; use crate::{Prefix, Storage, WorkingSet}; -// SingletonKey is very similar to the unit type `()` i.e. it has only one value. -#[derive(Debug, BorshDeserialize)] -pub struct SingletonKey; - -impl BorshSerialize for SingletonKey { - fn serialize(&self, _writer: &mut W) -> std::io::Result<()> { - Ok(()) - } - - fn try_to_vec(&self) -> std::io::Result> { - Ok(vec![]) - } -} - /// Container for a single value. #[derive(Debug, PartialEq, Eq, Clone, BorshDeserialize, BorshSerialize)] -pub struct StateValue { +pub struct StateValue { _phantom: PhantomData, + codec: VC, prefix: Prefix, } @@ -34,22 +21,42 @@ pub enum Error { MissingValue(Prefix), } -impl StateValue { +impl StateValue { + /// Crates a new [`StateValue`] with the given prefix and the default + /// [`StateValueCodec`] (i.e. [`BorshCodec`]). pub fn new(prefix: Prefix) -> Self { + Self::with_codec(prefix, BorshCodec) + } +} + +impl StateValue { + /// Creates a new [`StateValue`] with the given prefix and codec. + pub fn with_codec(prefix: Prefix, codec: VC) -> Self { Self { _phantom: PhantomData, + codec, prefix, } } + /// Returns the prefix used when this [`StateValue`] was created. + pub fn prefix(&self) -> &Prefix { + &self.prefix + } +} + +impl StateValue +where + VC: StateValueCodec, +{ /// Sets a value in the StateValue. pub fn set(&self, value: &V, working_set: &mut WorkingSet) { - working_set.set_value(self.prefix(), &SingletonKey, value) + working_set.set_value(self.prefix(), &SingletonKey, value, &self.codec) } /// Gets a value from the StateValue or None if the value is absent. pub fn get(&self, working_set: &mut WorkingSet) -> Option { - working_set.get_value(self.prefix(), &SingletonKey) + working_set.get_value(self.prefix(), &SingletonKey, &self.codec) } /// Gets a value from the StateValue or Error if the value is absent. @@ -60,7 +67,7 @@ impl StateValue { /// Removes a value from the StateValue, returning the value (or None if the key is absent). pub fn remove(&self, working_set: &mut WorkingSet) -> Option { - working_set.remove_value(self.prefix(), &SingletonKey) + working_set.remove_value(self.prefix(), &SingletonKey, &self.codec) } /// Removes a value and from the StateValue, returning the value (or Error if the key is absent). @@ -73,8 +80,8 @@ impl StateValue { pub fn delete(&self, working_set: &mut WorkingSet) { working_set.delete_value(self.prefix(), &SingletonKey); } - - pub fn prefix(&self) -> &Prefix { - &self.prefix - } } + +// SingletonKey is very similar to the unit type `()` i.e. it has only one value. +#[derive(Debug, PartialEq, Eq, Hash)] +struct SingletonKey; diff --git a/module-system/sov-state/src/vec.rs b/module-system/sov-state/src/vec.rs new file mode 100644 index 000000000..95ff45903 --- /dev/null +++ b/module-system/sov-state/src/vec.rs @@ -0,0 +1,331 @@ +use std::iter::FusedIterator; +use std::marker::PhantomData; + +use thiserror::Error; + +use crate::codec::{BorshCodec, StateValueCodec}; +use crate::{Prefix, StateMap, StateValue, Storage, WorkingSet}; + +#[derive(Debug, Clone)] +pub struct StateVec +where + VC: StateValueCodec, +{ + _phantom: PhantomData, + prefix: Prefix, + len_value: StateValue, + elems: StateMap, +} + +/// Error type for `StateVec` get method. +#[derive(Debug, Error)] +pub enum Error { + #[error("Index out of bounds for index: {0}")] + IndexOutOfBounds(usize), + #[error("Value not found for prefix: {0} and index: {1}")] + MissingValue(Prefix, usize), +} + +impl StateVec +where + BorshCodec: StateValueCodec, +{ + /// Crates a new [`StateVec`] with the given prefix and the default + /// [`StateValueCodec`] (i.e. [`BorshCodec`]). + pub fn new(prefix: Prefix) -> Self { + Self::with_codec(prefix, BorshCodec) + } +} + +impl StateVec +where + VC: StateValueCodec, +{ + /// Creates a new [`StateVec`] with the given prefix and codec. + pub fn with_codec(prefix: Prefix, codec: VC) -> Self { + // Differentiating the prefixes for the length and the elements + // shouldn't be necessary, but it's best not to rely on implementation + // details of `StateValue` and `StateMap` as they both have the right to + // reserve the whole key space for themselves. + let len_value = StateValue::::new(prefix.extended(b"l")); + let elems = StateMap::with_codec(prefix.extended(b"e"), codec); + Self { + _phantom: PhantomData, + prefix, + len_value, + elems, + } + } + + /// Returns the prefix used when this [`StateVec`] was created. + pub fn prefix(&self) -> &Prefix { + &self.prefix + } + + fn set_len(&self, length: usize, working_set: &mut WorkingSet) { + self.len_value.set(&length, working_set); + } + + /// Sets a value in the [`StateVec`]. + /// If the index is out of bounds, returns an error. + /// To push a value to the end of the StateVec, use [`StateVec::push`]. + pub fn set( + &self, + index: usize, + value: &V, + working_set: &mut WorkingSet, + ) -> Result<(), Error> { + let len = self.len(working_set); + + if index < len { + self.elems.set(&index, value, working_set); + Ok(()) + } else { + Err(Error::IndexOutOfBounds(index)) + } + } + + /// Returns the value for the given index. + pub fn get(&self, index: usize, working_set: &mut WorkingSet) -> Option { + self.elems.get(&index, working_set) + } + + /// Returns the value for the given index. + /// If the index is out of bounds, returns an error. + /// If the value is absent, returns an error. + pub fn get_or_err( + &self, + index: usize, + working_set: &mut WorkingSet, + ) -> Result { + let len = self.len(working_set); + + if index < len { + self.elems + .get(&index, working_set) + .ok_or_else(|| Error::MissingValue(self.prefix().clone(), index)) + } else { + Err(Error::IndexOutOfBounds(index)) + } + } + + /// Returns the length of the [`StateVec`]. + pub fn len(&self, working_set: &mut WorkingSet) -> usize { + self.len_value.get(working_set).unwrap_or_default() + } + + /// Pushes a value to the end of the [`StateVec`]. + pub fn push(&self, value: &V, working_set: &mut WorkingSet) { + let len = self.len(working_set); + + self.elems.set(&len, value, working_set); + self.set_len(len + 1, working_set); + } + + /// Pops a value from the end of the [`StateVec`] and returns it. + pub fn pop(&self, working_set: &mut WorkingSet) -> Option { + let len = self.len(working_set); + let last_i = len.checked_sub(1)?; + let elem = self.elems.remove(&last_i, working_set)?; + + let new_len = last_i; + self.set_len(new_len, working_set); + + Some(elem) + } + + pub fn clear(&self, working_set: &mut WorkingSet) { + let len = self.len_value.remove(working_set).unwrap_or_default(); + + for i in 0..len { + self.elems.delete(&i, working_set); + } + } + + /// Sets all values in the [`StateVec`]. + /// + /// If the length of the provided values is less than the length of the + /// [`StateVec`], the remaining values will be removed from storage. + pub fn set_all(&self, values: Vec, working_set: &mut WorkingSet) { + let old_len = self.len(working_set); + let new_len = values.len(); + + for i in new_len..old_len { + self.elems.delete(&i, working_set); + } + + for (i, value) in values.into_iter().enumerate() { + self.elems.set(&i, &value, working_set); + } + + self.set_len(new_len, working_set); + } + + /// Returns an iterator over all the values in the [`StateVec`]. + pub fn iter<'a, 'ws, S: Storage>( + &'a self, + working_set: &'ws mut WorkingSet, + ) -> StateVecIter<'a, 'ws, V, VC, S> { + let len = self.len(working_set); + StateVecIter { + state_vec: self, + ws: working_set, + len, + next_i: 0, + } + } +} + +/// An [`Iterator`] over a [`StateVec`] +/// +/// See [`StateVec::iter`] for more details. +pub struct StateVecIter<'a, 'ws, V, VC, S> +where + VC: StateValueCodec, + S: Storage, +{ + state_vec: &'a StateVec, + ws: &'ws mut WorkingSet, + len: usize, + next_i: usize, +} + +impl<'a, 'ws, V, VC, S> Iterator for StateVecIter<'a, 'ws, V, VC, S> +where + VC: StateValueCodec, + S: Storage, +{ + type Item = V; + + fn next(&mut self) -> Option { + let elem = self.state_vec.get(self.next_i, self.ws); + if elem.is_some() { + self.next_i += 1; + } + + elem + } +} + +impl<'a, 'ws, V, VC, S> ExactSizeIterator for StateVecIter<'a, 'ws, V, VC, S> +where + VC: StateValueCodec, + S: Storage, +{ + fn len(&self) -> usize { + self.len - self.next_i + } +} + +impl<'a, 'ws, V, VC, S> FusedIterator for StateVecIter<'a, 'ws, V, VC, S> +where + VC: StateValueCodec, + S: Storage, +{ +} + +#[cfg(all(test, feature = "native"))] +mod test { + use std::fmt::Debug; + + use super::*; + use crate::{DefaultStorageSpec, ProverStorage}; + + enum TestCaseAction { + Push(T), + Pop(T), + Set(usize, T), + SetAll(Vec), + CheckLen(usize), + CheckContents(Vec), + CheckGet(usize, Option), + Clear, + } + + fn test_cases() -> Vec> { + vec![ + TestCaseAction::Push(1), + TestCaseAction::Push(2), + TestCaseAction::CheckContents(vec![1, 2]), + TestCaseAction::CheckLen(2), + TestCaseAction::Pop(2), + TestCaseAction::Set(0, 10), + TestCaseAction::CheckContents(vec![10]), + TestCaseAction::Push(8), + TestCaseAction::CheckContents(vec![10, 8]), + TestCaseAction::SetAll(vec![10]), + TestCaseAction::CheckContents(vec![10]), + TestCaseAction::CheckGet(1, None), + TestCaseAction::Set(0, u32::MAX), + TestCaseAction::Push(8), + TestCaseAction::Push(0), + TestCaseAction::CheckContents(vec![u32::MAX, 8, 0]), + TestCaseAction::SetAll(vec![11, 12]), + TestCaseAction::CheckContents(vec![11, 12]), + TestCaseAction::SetAll(vec![]), + TestCaseAction::CheckLen(0), + TestCaseAction::Push(42), + TestCaseAction::Push(1337), + TestCaseAction::Clear, + TestCaseAction::CheckContents(vec![]), + TestCaseAction::CheckGet(0, None), + TestCaseAction::SetAll(vec![1, 2, 3]), + TestCaseAction::CheckContents(vec![1, 2, 3]), + ] + } + + #[test] + fn test_state_vec() { + let tmpdir = tempfile::tempdir().unwrap(); + let storage = ProverStorage::::with_path(tmpdir.path()).unwrap(); + let mut working_set = WorkingSet::new(storage); + + let prefix = Prefix::new("test".as_bytes().to_vec()); + let state_vec = StateVec::::new(prefix); + + for test_case_action in test_cases() { + check_test_case_action(&state_vec, test_case_action, &mut working_set); + } + } + + fn check_test_case_action( + state_vec: &StateVec, + action: TestCaseAction, + ws: &mut WorkingSet, + ) where + S: Storage, + BorshCodec: StateValueCodec + StateValueCodec, + T: Eq + Debug, + { + match action { + TestCaseAction::CheckContents(expected) => { + let contents: Vec = state_vec.iter(ws).collect(); + assert_eq!(expected, contents); + } + TestCaseAction::CheckLen(expected) => { + let actual = state_vec.len(ws); + assert_eq!(actual, expected); + } + TestCaseAction::Pop(expected) => { + let actual = state_vec.pop(ws); + assert_eq!(actual, Some(expected)); + } + TestCaseAction::Push(value) => { + state_vec.push(&value, ws); + } + TestCaseAction::Set(index, value) => { + state_vec.set(index, &value, ws).unwrap(); + } + TestCaseAction::SetAll(values) => { + state_vec.set_all(values, ws); + } + TestCaseAction::CheckGet(index, expected) => { + let actual = state_vec.get(index, ws); + assert_eq!(actual, expected); + } + TestCaseAction::Clear => { + state_vec.clear(ws); + } + } + } +} diff --git a/module-system/sov-state/src/zk_storage.rs b/module-system/sov-state/src/zk_storage.rs index 6f6021325..6cb215a09 100644 --- a/module-system/sov-state/src/zk_storage.rs +++ b/module-system/sov-state/src/zk_storage.rs @@ -1,13 +1,19 @@ use std::marker::PhantomData; use std::sync::Arc; +use jmt::storage::NodeBatch; use jmt::{JellyfishMerkleTree, KeyHash, Version}; +#[cfg(all(target_os = "zkvm", feature = "bench"))] +use zk_cycle_macros::cycle_tracker; use crate::internal_cache::OrderedReadsAndWrites; -use crate::storage::{StorageKey, StorageValue}; +use crate::storage::{StorageKey, StorageProof, StorageValue}; use crate::witness::{TreeWitnessReader, Witness}; use crate::{MerkleProofSpec, Storage}; +#[cfg(all(target_os = "zkvm", feature = "bench"))] +extern crate risc0_zkvm; + pub struct ZkStorage { prev_state_root: [u8; 32], _phantom_hasher: PhantomData, @@ -33,22 +39,28 @@ impl ZkStorage { impl Storage for ZkStorage { type Witness = S::Witness; - type RuntimeConfig = [u8; 32]; + type Proof = jmt::proof::SparseMerkleProof; + type StateUpdate = NodeBatch; fn with_config(config: Self::RuntimeConfig) -> Result { Ok(Self::new(config)) } - fn get(&self, _key: StorageKey, witness: &S::Witness) -> Option { + fn get(&self, _key: &StorageKey, witness: &Self::Witness) -> Option { witness.get_hint() } - fn validate_and_commit( + fn get_state_root(&self, witness: &Self::Witness) -> anyhow::Result<[u8; 32]> { + Ok(witness.get_hint()) + } + + #[cfg_attr(all(target_os = "zkvm", feature = "bench"), cycle_tracker)] + fn compute_state_update( &self, state_accesses: OrderedReadsAndWrites, witness: &Self::Witness, - ) -> Result<[u8; 32], anyhow::Error> { + ) -> Result<([u8; 32], Self::StateUpdate), anyhow::Error> { let latest_version: Version = witness.get_hint(); let reader = TreeWitnessReader::new(witness); @@ -84,14 +96,32 @@ impl Storage for ZkStorage { // because the TreeReader is trusted let jmt = JellyfishMerkleTree::<_, S::Hasher>::new(&reader); - let (new_root, _tree_update) = jmt + let (new_root, tree_update) = jmt .put_value_set(batch, next_version) .expect("JMT update must succeed"); - - Ok(new_root.0) + Ok((new_root.0, tree_update.node_batch)) } + #[cfg_attr(all(target_os = "zkvm", feature = "bench"), cycle_tracker)] + fn commit(&self, _node_batch: &Self::StateUpdate) {} + fn is_empty(&self) -> bool { unimplemented!("Needs simplification in JellyfishMerkleTree: https://github.com/Sovereign-Labs/sovereign-sdk/issues/362") } + + fn open_proof( + &self, + state_root: [u8; 32], + state_proof: StorageProof, + ) -> Result<(StorageKey, Option), anyhow::Error> { + let StorageProof { key, value, proof } = state_proof; + let key_hash = KeyHash::with::(key.as_ref()); + + proof.verify( + jmt::RootHash(state_root), + key_hash, + value.as_ref().map(|v| v.value()), + )?; + Ok((key, value)) + } } diff --git a/module-system/utils/sov-data-generators/Cargo.toml b/module-system/utils/sov-data-generators/Cargo.toml new file mode 100644 index 000000000..f4ae2a971 --- /dev/null +++ b/module-system/utils/sov-data-generators/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "sov-data-generators" +description = "A set of generator utils used to automatically produce and serialize transaction data" +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } +version = { workspace = true } +readme = "README.md" +resolver = "2" + + +[dependencies] +sov-modules-api = { path = "../../sov-modules-api", features = ["native"] } +sov-modules-stf-template = { path = "../../sov-modules-stf-template", features = ["native"] } +sov-value-setter = { path = "../../module-implementations/examples/sov-value-setter", features = ["native"] } +sov-bank = { path = "../../module-implementations/sov-bank", features = ["native"] } +sov-state = { path = "../../sov-state" } +sov-rollup-interface = { path = "../../../rollup-interface", features = ["mocks"] } + +borsh = { workspace = true } + +[dev-dependencies] +proptest = { workspace = true } diff --git a/module-system/utils/sov-data-generators/src/bank_data.rs b/module-system/utils/sov-data-generators/src/bank_data.rs new file mode 100644 index 000000000..b1ab56c37 --- /dev/null +++ b/module-system/utils/sov-data-generators/src/bank_data.rs @@ -0,0 +1,361 @@ +use std::rc::Rc; + +use sov_bank::{get_token_address, Bank, CallMessage, Coins}; +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::default_signature::private_key::DefaultPrivateKey; +use sov_modules_api::transaction::Transaction; +use sov_modules_api::utils::generate_address; +use sov_modules_api::{Context, EncodeCall, Module, PrivateKey, Spec}; + +use crate::{Message, MessageGenerator}; + +pub struct TransferData { + pub sender_pkey: Rc, + pub receiver_address: ::Address, + pub token_address: ::Address, + pub transfer_amount: u64, +} + +pub struct MintData { + pub token_name: String, + pub salt: u64, + pub initial_balance: u64, + pub minter_address: ::Address, + pub minter_pkey: Rc, + pub authorized_minters: Vec<::Address>, +} + +pub struct BankMessageGenerator { + pub token_mint_txs: Vec>, + pub transfer_txs: Vec>, +} + +const DEFAULT_TOKEN_NAME: &str = "Token1"; +const DEFAULT_SALT: u64 = 10; +const DEFAULT_PVT_KEY: &str = "236e80cb222c4ed0431b093b3ac53e6aa7a2273fe1f4351cd354989a823432a27b758bf2e7670fafaf6bf0015ce0ff5aa802306fc7e3f45762853ffc37180fe6"; + +pub fn get_default_token_address() -> ::Address { + let minter_key = DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap(); + let minter_address = minter_key.default_address(); + let salt = DEFAULT_SALT; + let token_name = DEFAULT_TOKEN_NAME.to_owned(); + get_token_address::(&token_name, minter_address.as_ref(), salt) +} + +pub fn get_default_private_key() -> DefaultPrivateKey { + DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap() +} + +impl Default for BankMessageGenerator { + fn default() -> Self { + let minter_key = DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap(); + let minter_address = minter_key.default_address(); + let salt = DEFAULT_SALT; + let token_name = DEFAULT_TOKEN_NAME.to_owned(); + let mint_data = MintData { + token_name: token_name.clone(), + salt, + initial_balance: 1000, + minter_address, + minter_pkey: Rc::new(minter_key), + authorized_minters: Vec::from([minter_address]), + }; + Self { + token_mint_txs: Vec::from([mint_data]), + transfer_txs: Vec::from([TransferData { + sender_pkey: Rc::new(DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap()), + transfer_amount: 15, + receiver_address: generate_address::("just_receiver"), + token_address: get_token_address::( + &token_name, + minter_address.as_ref(), + salt, + ), + }]), + } + } +} + +impl BankMessageGenerator { + pub fn create_invalid_transfer() -> Self { + let minter_key = DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap(); + let minter_address = minter_key.default_address(); + let salt = DEFAULT_SALT; + let token_name = DEFAULT_TOKEN_NAME.to_owned(); + let mint_data = MintData { + token_name: token_name.clone(), + salt, + initial_balance: 1000, + minter_address, + minter_pkey: Rc::new(minter_key), + authorized_minters: Vec::from([minter_address]), + }; + Self { + token_mint_txs: Vec::from([mint_data]), + transfer_txs: Vec::from([ + TransferData { + sender_pkey: Rc::new(DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap()), + transfer_amount: 15, + receiver_address: generate_address::("just_receiver"), + token_address: get_token_address::( + &token_name, + minter_address.as_ref(), + salt, + ), + }, + TransferData { + sender_pkey: Rc::new(DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap()), + // invalid transfer because transfer_amount > minted supply + transfer_amount: 5000, + receiver_address: generate_address::("just_receiver"), + token_address: get_token_address::( + &token_name, + minter_address.as_ref(), + salt, + ), + }, + ]), + } + } +} + +pub(crate) fn mint_token_tx(mint_data: &MintData) -> CallMessage { + CallMessage::CreateToken { + salt: mint_data.salt, + token_name: mint_data.token_name.clone(), + initial_balance: mint_data.initial_balance, + minter_address: mint_data.minter_address.clone(), + authorized_minters: mint_data.authorized_minters.clone(), + } +} + +pub(crate) fn transfer_token_tx(transfer_data: &TransferData) -> CallMessage { + CallMessage::Transfer { + to: transfer_data.receiver_address.clone(), + coins: Coins { + amount: transfer_data.transfer_amount, + token_address: transfer_data.token_address.clone(), + }, + } +} + +impl MessageGenerator for BankMessageGenerator { + type Module = Bank; + type Context = C; + + fn create_messages(&self) -> Vec> { + let mut messages = Vec::>>::new(); + + let mut nonce = 0; + + for mint_message in &self.token_mint_txs { + messages.push(Message::new( + mint_message.minter_pkey.clone(), + mint_token_tx::(mint_message), + nonce, + )); + nonce += 1; + } + + for transfer_message in &self.transfer_txs { + messages.push(Message::new( + transfer_message.sender_pkey.clone(), + transfer_token_tx::(transfer_message), + nonce, + )); + nonce += 1; + } + + messages + } + + fn create_tx>( + &self, + sender: &::PrivateKey, + message: ::CallMessage, + nonce: u64, + _is_last: bool, + ) -> sov_modules_api::transaction::Transaction { + let message = Encoder::encode_call(message); + Transaction::::new_signed_tx(sender, message, nonce) + } +} + +pub struct BadSerializationBankCallMessages; + +impl BadSerializationBankCallMessages { + pub fn new() -> Self { + Self {} + } +} + +impl Default for BadSerializationBankCallMessages { + fn default() -> Self { + Self::new() + } +} + +impl MessageGenerator for BadSerializationBankCallMessages { + type Module = Bank; + type Context = DefaultContext; + + fn create_messages(&self) -> Vec> { + let mut messages = Vec::>>::new(); + let minter_key = DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap(); + let minter_address = minter_key.default_address(); + let salt = DEFAULT_SALT; + let token_name = DEFAULT_TOKEN_NAME.to_owned(); + messages.push(Message::new( + Rc::new(DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap()), + CallMessage::CreateToken { + salt, + token_name, + initial_balance: 1000, + minter_address, + authorized_minters: Vec::from([minter_address]), + }, + 0, + )); + messages.push(Message::new( + Rc::new(DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap()), + CallMessage::Transfer { + to: generate_address::("just_receiver"), + coins: Coins { + amount: 50, + token_address: get_default_token_address(), + }, + }, + 0, + )); + messages + } + + fn create_tx>( + &self, + sender: &DefaultPrivateKey, + message: as Module>::CallMessage, + nonce: u64, + is_last: bool, + ) -> Transaction { + // just some random bytes that won't deserialize to a valid txn + let call_data = if is_last { + vec![1, 2, 3] + } else { + Encoder::encode_call(message) + }; + + Transaction::::new_signed_tx(sender, call_data, nonce) + } +} + +pub struct BadSignatureBankCallMessages; + +impl BadSignatureBankCallMessages { + pub fn new() -> Self { + Self {} + } +} + +impl Default for BadSignatureBankCallMessages { + fn default() -> Self { + Self::new() + } +} + +impl MessageGenerator for BadSignatureBankCallMessages { + type Module = Bank; + type Context = DefaultContext; + + fn create_messages(&self) -> Vec> { + let mut messages = Vec::>>::new(); + let minter_key = DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap(); + let minter_address = minter_key.default_address(); + let salt = DEFAULT_SALT; + let token_name = DEFAULT_TOKEN_NAME.to_owned(); + messages.push(Message::new( + Rc::new(DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap()), + CallMessage::CreateToken { + salt, + token_name, + initial_balance: 1000, + minter_address, + authorized_minters: Vec::from([minter_address]), + }, + 0, + )); + messages + } + + fn create_tx>( + &self, + sender: &DefaultPrivateKey, + message: as Module>::CallMessage, + nonce: u64, + is_last: bool, + ) -> Transaction { + let call_data = Encoder::encode_call(message); + + if is_last { + let tx = Transaction::::new_signed_tx(sender, call_data.clone(), nonce); + Transaction::new( + DefaultPrivateKey::generate().pub_key(), + call_data, + tx.signature().clone(), + nonce, + ) + } else { + Transaction::::new_signed_tx(sender, call_data, nonce) + } + } +} + +pub struct BadNonceBankCallMessages; + +impl BadNonceBankCallMessages { + pub fn new() -> Self { + Self {} + } +} + +impl Default for BadNonceBankCallMessages { + fn default() -> Self { + Self::new() + } +} + +impl MessageGenerator for BadNonceBankCallMessages { + type Module = Bank; + type Context = DefaultContext; + + fn create_messages(&self) -> Vec> { + let mut messages = Vec::>>::new(); + let minter_key = DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap(); + let minter_address = minter_key.default_address(); + let salt = DEFAULT_SALT; + let token_name = DEFAULT_TOKEN_NAME.to_owned(); + messages.push(Message::new( + Rc::new(DefaultPrivateKey::from_hex(DEFAULT_PVT_KEY).unwrap()), + CallMessage::CreateToken { + salt, + token_name, + initial_balance: 1000, + minter_address, + authorized_minters: Vec::from([minter_address]), + }, + 0, + )); + messages + } + + fn create_tx>( + &self, + sender: &DefaultPrivateKey, + message: as Module>::CallMessage, + _nonce: u64, + _is_last: bool, + ) -> Transaction { + let message = Encoder::encode_call(message); + // hard-coding the nonce to 1000 + Transaction::::new_signed_tx(sender, message, 1000) + } +} diff --git a/module-system/utils/sov-data-generators/src/lib.rs b/module-system/utils/sov-data-generators/src/lib.rs new file mode 100644 index 000000000..083753953 --- /dev/null +++ b/module-system/utils/sov-data-generators/src/lib.rs @@ -0,0 +1,102 @@ +use std::rc::Rc; + +use borsh::ser::BorshSerialize; +use sov_modules_api::transaction::Transaction; +pub use sov_modules_api::EncodeCall; +use sov_modules_api::{Context, Module, Spec}; +use sov_modules_stf_template::{Batch, RawTx, SequencerOutcome, TxEffect}; +use sov_rollup_interface::da::DaSpec; +use sov_rollup_interface::mocks::{MockAddress, MockBlob, MockDaSpec}; +use sov_rollup_interface::stf::BatchReceipt; +use sov_rollup_interface::RollupAddress; + +pub mod bank_data; +pub mod value_setter_data; + +pub fn new_test_blob_from_batch( + batch: Batch, + address: &[u8], + hash: [u8; 32], +) -> ::BlobTransaction { + let address = MockAddress::try_from(address).unwrap(); + let data = batch.try_to_vec().unwrap(); + MockBlob::new(data, address, hash) +} + +pub fn has_tx_events( + apply_blob_outcome: &BatchReceipt, TxEffect>, +) -> bool { + let events = apply_blob_outcome + .tx_receipts + .iter() + .flat_map(|receipts| receipts.events.iter()); + + events.peekable().peek().is_some() +} + +/// A generic message object used to create transactions. +pub struct Message { + /// The sender's private key. + pub sender_key: Rc<::PrivateKey>, + /// The message content. + pub content: Mod::CallMessage, + /// The message nonce. + pub nonce: u64, +} + +impl Message { + fn new(sender_key: Rc<::PrivateKey>, content: Mod::CallMessage, nonce: u64) -> Self { + Self { + sender_key, + content, + nonce, + } + } +} + +/// Trait used to generate messages from the DA layer to automate module testing +pub trait MessageGenerator { + /// Module where the messages originate from. + type Module: Module; + + /// Module context + type Context: Context; + + /// Generates a list of messages originating from the module. + fn create_messages(&self) -> Vec>; + + /// Creates a transaction object associated with a call message, for a given module. + fn create_tx>( + &self, + // Private key of the sender + sender: &::PrivateKey, + // The message itself + message: ::CallMessage, + // The message nonce + nonce: u64, + // A boolean that indicates whether this message is the last one to be sent. + // Useful to perform some operations specifically on the last message. + is_last: bool, + ) -> Transaction; + + /// Creates a vector of raw transactions from the module. + fn create_raw_txs>(&self) -> Vec { + let mut messages_iter = self.create_messages().into_iter().peekable(); + let mut serialized_messages = Vec::default(); + while let Some(message) = messages_iter.next() { + let is_last = messages_iter.peek().is_none(); + + let tx = self.create_tx::( + &message.sender_key, + message.content, + message.nonce, + is_last, + ); + + serialized_messages.push(RawTx { + data: tx.try_to_vec().unwrap(), + }) + } + serialized_messages + } +} diff --git a/module-system/utils/sov-data-generators/src/value_setter_data.rs b/module-system/utils/sov-data-generators/src/value_setter_data.rs new file mode 100644 index 000000000..9bd80195c --- /dev/null +++ b/module-system/utils/sov-data-generators/src/value_setter_data.rs @@ -0,0 +1,71 @@ +use std::vec; + +use sov_modules_api::default_context::DefaultContext; +use sov_modules_api::default_signature::private_key::DefaultPrivateKey; +use sov_modules_api::PrivateKey; +use sov_value_setter::ValueSetter; + +use super::*; +use crate::EncodeCall; + +pub struct ValueSetterMessage { + pub admin: Rc, + pub messages: Vec, +} + +pub struct ValueSetterMessages { + pub messages: Vec>, +} + +impl ValueSetterMessages { + pub fn new(messages: Vec>) -> Self { + Self { messages } + } +} + +impl Default for ValueSetterMessages { + /// This function will return a dummy value setter message containing one admin and two value setter messages. + fn default() -> Self { + Self::new(vec![ValueSetterMessage { + admin: Rc::new(DefaultPrivateKey::generate()), + messages: vec![99, 33], + }]) + } +} + +impl MessageGenerator for ValueSetterMessages { + type Module = ValueSetter; + type Context = C; + + fn create_messages(&self) -> Vec> { + let mut messages = Vec::default(); + for value_setter_message in &self.messages { + let admin = value_setter_message.admin.clone(); + + for (value_setter_admin_nonce, new_value) in + value_setter_message.messages.iter().enumerate() + { + let set_value_msg: sov_value_setter::CallMessage = + sov_value_setter::CallMessage::SetValue(*new_value); + + messages.push(Message::new( + admin.clone(), + set_value_msg, + value_setter_admin_nonce.try_into().unwrap(), + )); + } + } + messages + } + + fn create_tx>( + &self, + sender: &C::PrivateKey, + message: ::CallMessage, + nonce: u64, + _is_last: bool, + ) -> Transaction { + let message = Encoder::encode_call(message); + Transaction::::new_signed_tx(sender, message, nonce) + } +} diff --git a/rollup-interface/Cargo.toml b/rollup-interface/Cargo.toml index e971e4d99..0ace650a7 100644 --- a/rollup-interface/Cargo.toml +++ b/rollup-interface/Cargo.toml @@ -24,15 +24,21 @@ serde = { workspace = true } bytes = { workspace = true } hex = { workspace = true, features = ["serde"] } digest = { workspace = true } - sha2 = { workspace = true, optional = true } +# TODO: Replace with serde-compatible borsh implementation when it becomes availabile +# see https://github.com/Sovereign-Labs/sovereign-sdk/issues/215 +bincode = { workspace = true } + anyhow = { workspace = true } # Proptest should be a dev-dependency, but those can't be optional proptest = { workspace = true, optional = true } proptest-derive = { workspace = true, optional = true } +tokio = { workspace = true, optional = true} + + [dev-dependencies] serde_json = "1" proptest = { workspace = true } @@ -40,5 +46,6 @@ proptest-derive = { workspace = true } [features] default = [] +native = ["tokio"] fuzzing = ["proptest", "proptest-derive", "sha2"] -mocks = ["sha2", "bytes/serde"] +mocks = ["sha2", "bytes/serde", "tokio"] diff --git a/rollup-interface/specs/overview.md b/rollup-interface/specs/overview.md index 6a57a2787..2fa5b8f5e 100644 --- a/rollup-interface/specs/overview.md +++ b/rollup-interface/specs/overview.md @@ -33,5 +33,5 @@ Once a proof for a given batch has been posted on chain, the batch is subjective (2) a block header of the underlying L1 ("Data Availability") chain or (3) a batch header. - Batch: a group of 1 or more rollup transactions which are submitted as a single data blob on the DA chain. - Batch Header: A summary of a given batch, posted on the L1 alongside the transactions. Rollups may define this header - to contain any relevant information, but may also choose omit it entirely. + to contain any relevant information, but may also choose to omit it entirely. - JMT: Jellyfish Merkle Tree - an optimized sparse merkle tree invented by Diem and used in many modern blockchains. diff --git a/rollup-interface/src/node/rpc/mod.rs b/rollup-interface/src/node/rpc/mod.rs index 7796045d3..ebe004958 100644 --- a/rollup-interface/src/node/rpc/mod.rs +++ b/rollup-interface/src/node/rpc/mod.rs @@ -1,9 +1,14 @@ //! The rpc module defines types and traits for querying chain history //! via an RPC interface. +#[cfg(feature = "native")] use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; +#[cfg(feature = "native")] +use tokio::sync::broadcast::Receiver; -use crate::stf::{Event, EventKey}; +#[cfg(feature = "native")] +use crate::stf::Event; +use crate::stf::EventKey; /// A struct containing enough information to uniquely specify single batch. #[derive(Debug, PartialEq, Serialize, Deserialize)] @@ -187,6 +192,7 @@ pub enum ItemOrHash { } /// A LedgerRpcProvider provides a way to query the ledger for information about slots, batches, transactions, and events. +#[cfg(feature = "native")] pub trait LedgerRpcProvider { /// Get the latest slot in the ledger. fn get_head( @@ -294,6 +300,9 @@ pub trait LedgerRpcProvider { end: u64, query_mode: QueryMode, ) -> Result>>, anyhow::Error>; + + /// Get a notification each time a slot is processed + fn subscribe_slots(&self) -> Result, anyhow::Error>; } mod rpc_hex { diff --git a/rollup-interface/src/node/services/da.rs b/rollup-interface/src/node/services/da.rs index 39221f0f1..267d1411f 100644 --- a/rollup-interface/src/node/services/da.rs +++ b/rollup-interface/src/node/services/da.rs @@ -1,11 +1,12 @@ //! The da module defines traits used by the full node to interact with the DA layer. -use std::fmt; +use std::fmt::{self, Display}; use async_trait::async_trait; use serde::de::DeserializeOwned; use serde::Serialize; use crate::da::{BlockHeaderTrait, DaSpec}; +use crate::zk::ValidityCondition; /// A DaService is the local side of an RPC connection talking to a node of the DA layer /// It is *not* part of the logic that is zk-proven. @@ -13,24 +14,18 @@ use crate::da::{BlockHeaderTrait, DaSpec}; /// The DaService has two responsibilities - fetching data from the DA layer, transforming the /// data into a representation that can be efficiently verified in circuit. #[async_trait] -pub trait DaService { - /// A handle to the types used by the DA layer. - type RuntimeConfig: DeserializeOwned; - +pub trait DaService: Send + Sync + 'static { /// A handle to the types used by the DA layer. type Spec: DaSpec; /// A DA layer block, possibly excluding some irrelevant information. - type FilteredBlock: SlotData::BlockHeader>; + type FilteredBlock: SlotData< + BlockHeader = ::BlockHeader, + Cond = ::ValidityCondition, + >; /// The error type for fallible methods. - type Error: fmt::Debug + Send + Sync; - - /// Create a new instance of the DaService - async fn new( - config: Self::RuntimeConfig, - chain_params: ::ChainParams, - ) -> Self; + type Error: fmt::Debug + Send + Sync + Display; /// Retrieve the data for the given height, waiting for it to be /// finalized if necessary. The block, once returned, must not be reverted @@ -100,8 +95,14 @@ pub trait SlotData: /// For these fields, we only ever store their *serialized* representation in memory or on disk. Only a few special /// fields like `data_root` are stored in decoded form in the `CelestiaHeader` struct. type BlockHeader: BlockHeaderTrait; + + /// The validity condition associated with the slot data. + type Cond: ValidityCondition; + /// The canonical hash of the DA layer block. fn hash(&self) -> [u8; 32]; /// The header of the DA layer block. fn header(&self) -> &Self::BlockHeader; + /// Get the validity condition set associated with the slot + fn validity_condition(&self) -> Self::Cond; } diff --git a/rollup-interface/src/state_machine/da.rs b/rollup-interface/src/state_machine/da.rs index 68386c115..d73328c9d 100644 --- a/rollup-interface/src/state_machine/da.rs +++ b/rollup-interface/src/state_machine/da.rs @@ -11,10 +11,10 @@ use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use crate::zk::ValidityCondition; -use crate::AddressTrait; +use crate::BasicAddress; /// A specification for the types used by a DA layer. -pub trait DaSpec { +pub trait DaSpec: 'static { /// The hash of a DA layer block type SlotHash: BlockHashTrait; @@ -24,6 +24,9 @@ pub trait DaSpec { /// The transaction type used by the DA layer. type BlobTransaction: BlobReaderTrait; + /// Any conditions imposed by the DA layer which need to be checked outside of the SNARK + type ValidityCondition: ValidityCondition; + /// A proof that each tx in a set of blob transactions is included in a given block. type InclusionMultiProof: Serialize + DeserializeOwned; @@ -52,9 +55,6 @@ pub trait DaVerifier { /// TODO: Should we add `std::Error` bound so it can be `()?` ? type Error: Debug; - /// Any conditions imposed by the DA layer which need to be checked outside of the SNARK - type ValidityCondition: ValidityCondition; - /// Create a new da verifier with the given chain parameters fn new(params: ::ChainParams) -> Self; @@ -65,7 +65,18 @@ pub trait DaVerifier { txs: &[::BlobTransaction], inclusion_proof: ::InclusionMultiProof, completeness_proof: ::CompletenessProof, - ) -> Result; + ) -> Result<::ValidityCondition, Self::Error>; +} + +/// [`AccumulatorStatus`] is a wrapper around an accumulator vector that specifies +/// whether a [`CountedBufReader`] has finished reading the underlying buffer. +#[derive(BorshDeserialize, BorshSerialize, Serialize, Deserialize, Debug, Clone, PartialEq)] +pub enum Accumulator { + /// The underlying buffer has been completely read and [`Vec`] contains the result + Completed(Vec), + /// The underlying buffer still contains elements to be read. [`Vec`] contains the + /// accumulated elements. + InProgress(Vec), } #[derive(Debug, Clone, Serialize, Deserialize, BorshDeserialize, BorshSerialize, PartialEq)] @@ -83,7 +94,7 @@ pub struct CountedBufReader { /// An accumulator that stores the data read from the blob buffer into a vector. /// Allows easy access to the data that has already been read - reading_acc: Vec, + accumulator: Accumulator, } impl CountedBufReader { @@ -93,7 +104,7 @@ impl CountedBufReader { CountedBufReader { inner, counter: 0, - reading_acc: Vec::with_capacity(buf_size), + accumulator: Accumulator::InProgress(Vec::with_capacity(buf_size)), } } @@ -103,9 +114,13 @@ impl CountedBufReader { } /// Getter: returns a reference to an accumulator of the blob data read by the rollup - /// TODO: Refactor - pub fn acc(&self) -> &Vec { - &self.reading_acc + pub fn accumulator(&self) -> &Accumulator { + &self.accumulator + } + + /// Contains the total length of the data (length already read + length remaining) + pub fn total_len(&self) -> usize { + self.inner.remaining() + self.counter } } @@ -119,7 +134,25 @@ impl Read for CountedBufReader { let num_read = len_before_reading - self.inner.remaining(); - self.reading_acc.extend_from_slice(&buf[..buf_end]); + let inner_acc_vec = match &mut self.accumulator { + Accumulator::Completed(_) => { + // The accumulator is completed, we return 0 as no data was read into self + return Ok(0); + } + + Accumulator::InProgress(inner_vec) => inner_vec, + }; + + inner_acc_vec.extend_from_slice(&buf[..buf_end]); + + match self.inner.remaining() { + 0 => { + self.accumulator = Accumulator::Completed(inner_acc_vec.to_vec()); + } + _ => { + self.accumulator = Accumulator::InProgress(inner_acc_vec.to_vec()); + } + } self.counter += num_read; @@ -133,7 +166,7 @@ pub trait BlobReaderTrait: Serialize + DeserializeOwned + Send + Sync + 'static type Data: Buf; /// The type used to represent addresses on the DA layer. - type Address: AddressTrait; + type Address: BasicAddress; /// Returns the address (on the DA layer) of the entity which submitted the blob transaction fn sender(&self) -> Self::Address; @@ -144,10 +177,8 @@ pub trait BlobReaderTrait: Serialize + DeserializeOwned + Send + Sync + 'static /// This function returns a mutable reference to the blob data fn data_mut(&mut self) -> &mut CountedBufReader; - /// The raw data of the blob. For example, the "calldata" of an Ethereum rollup transaction - /// This function clones the data of the blob to an external BufWithCounter - /// - /// This function returns a simple reference to the blob data + /// Returns a reference to a `CountedBufReader`, which allows the caller to re-read + /// any data read so far, but not to advance the buffer fn data(&self) -> &CountedBufReader; /// Returns the hash of the blob. If not provided with a hint, it is computed by hashing the blob data diff --git a/rollup-interface/src/state_machine/mocks.rs b/rollup-interface/src/state_machine/mocks.rs deleted file mode 100644 index 9962e86b5..000000000 --- a/rollup-interface/src/state_machine/mocks.rs +++ /dev/null @@ -1,271 +0,0 @@ -//! Defines mock instantiations of many important traits, which are useful -//! for testing, fuzzing, and benchmarking. -use std::fmt::Display; -use std::io::Write; - -use anyhow::{ensure, Error}; -use borsh::{BorshDeserialize, BorshSerialize}; -use bytes::Bytes; -use serde::{Deserialize, Serialize}; -use sha2::Digest; - -use crate::da::{BlobReaderTrait, BlockHashTrait, BlockHeaderTrait, CountedBufReader, DaSpec}; -use crate::services::da::SlotData; -use crate::zk::{Matches, Zkvm}; -use crate::AddressTrait; - -/// A mock commitment to a particular zkVM program. -#[derive(Debug, Clone, PartialEq, Eq, BorshDeserialize, BorshSerialize, Serialize, Deserialize)] -pub struct MockCodeCommitment(pub [u8; 32]); - -impl Matches for MockCodeCommitment { - fn matches(&self, other: &MockCodeCommitment) -> bool { - self.0 == other.0 - } -} - -/// A mock proof generated by a zkVM. -#[derive(Debug, Clone, PartialEq, Eq, BorshDeserialize, BorshSerialize, Serialize, Deserialize)] -pub struct MockProof<'a> { - /// The ID of the program this proof might be valid for. - pub program_id: MockCodeCommitment, - /// Whether the proof is valid. - pub is_valid: bool, - /// The tamper-proof outputs of the proof. - pub log: &'a [u8], -} - -impl<'a> MockProof<'a> { - /// Serializes a proof into a writer. - pub fn encode(&self, mut writer: impl Write) { - writer.write_all(&self.program_id.0).unwrap(); - let is_valid_byte = if self.is_valid { 1 } else { 0 }; - writer.write_all(&[is_valid_byte]).unwrap(); - writer.write_all(self.log).unwrap(); - } - - /// Serializes a proof into a vector. - pub fn encode_to_vec(&self) -> Vec { - let mut encoded = Vec::new(); - self.encode(&mut encoded); - encoded - } - - /// Tries to deserialize a proof from a byte slice. - pub fn decode(input: &'a [u8]) -> Result { - ensure!(input.len() >= 33, "Input is too short"); - let program_id = MockCodeCommitment(input[0..32].try_into().unwrap()); - let is_valid = input[32] == 1; - let log = &input[33..]; - Ok(Self { - program_id, - is_valid, - log, - }) - } -} - -/// A mock implementing the zkVM trait. -pub struct MockZkvm; - -impl Zkvm for MockZkvm { - type CodeCommitment = MockCodeCommitment; - - type Error = anyhow::Error; - - fn verify<'a>( - serialized_proof: &'a [u8], - code_commitment: &Self::CodeCommitment, - ) -> Result<&'a [u8], Self::Error> { - let proof = MockProof::decode(serialized_proof)?; - anyhow::ensure!( - proof.program_id.matches(code_commitment), - "Proof failed to verify against requested code commitment" - ); - anyhow::ensure!(proof.is_valid, "Proof is not valid"); - Ok(proof.log) - } -} - -#[test] -fn test_mock_proof_roundtrip() { - let proof = MockProof { - program_id: MockCodeCommitment([1; 32]), - is_valid: true, - log: &[2; 50], - }; - - let mut encoded = Vec::new(); - proof.encode(&mut encoded); - - let decoded = MockProof::decode(&encoded).unwrap(); - assert_eq!(proof, decoded); -} - -/// A mock address type used for testing. Internally, this type is standard 32 byte array. -#[derive(Debug, PartialEq, Clone, Eq, Copy, serde::Serialize, serde::Deserialize, Hash)] -pub struct MockAddress { - addr: [u8; 32], -} - -impl core::str::FromStr for MockAddress { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - let addr = hex::decode(s)?; - if addr.len() != 32 { - return Err(anyhow::anyhow!("Invalid address length")); - } - - let mut array = [0; 32]; - array.copy_from_slice(&addr); - Ok(MockAddress { addr: array }) - } -} - -impl<'a> TryFrom<&'a [u8]> for MockAddress { - type Error = Error; - - fn try_from(addr: &'a [u8]) -> Result { - if addr.len() != 32 { - anyhow::bail!("Address must be 32 bytes long"); - } - let mut addr_bytes = [0u8; 32]; - addr_bytes.copy_from_slice(addr); - Ok(Self { addr: addr_bytes }) - } -} - -impl AsRef<[u8]> for MockAddress { - fn as_ref(&self) -> &[u8] { - &self.addr - } -} - -impl From<[u8; 32]> for MockAddress { - fn from(addr: [u8; 32]) -> Self { - MockAddress { addr } - } -} - -impl Display for MockAddress { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{:?}", self.addr) - } -} - -impl AddressTrait for MockAddress {} - -#[derive( - Debug, - Clone, - PartialEq, - borsh::BorshDeserialize, - borsh::BorshSerialize, - serde::Serialize, - serde::Deserialize, -)] - -/// A mock BlobTransaction from a DA layer used for testing. -pub struct TestBlob

{ - address: Address, - hash: [u8; 32], - data: CountedBufReader, -} - -impl BlobReaderTrait for TestBlob
{ - type Data = Bytes; - type Address = Address; - - fn sender(&self) -> Self::Address { - self.address.clone() - } - - fn hash(&self) -> [u8; 32] { - self.hash - } - - fn data_mut(&mut self) -> &mut CountedBufReader { - &mut self.data - } - - fn data(&self) -> &CountedBufReader { - &self.data - } -} - -impl TestBlob
{ - /// Creates a new mock blob with the given data, claiming to have been published by the provided address. - pub fn new(data: Vec, address: Address, hash: [u8; 32]) -> Self { - Self { - address, - data: CountedBufReader::new(bytes::Bytes::from(data)), - hash, - } - } -} - -/// A mock hash digest. -#[derive(Clone, Copy, Debug, PartialEq, serde::Serialize, serde::Deserialize)] -pub struct TestHash(pub [u8; 32]); - -impl AsRef<[u8]> for TestHash { - fn as_ref(&self) -> &[u8] { - &self.0 - } -} - -impl BlockHashTrait for TestHash {} - -/// A mock block header used for testing. -#[derive(Serialize, Deserialize, PartialEq, core::fmt::Debug, Clone)] -pub struct TestBlockHeader { - /// The hash of the previous block. - pub prev_hash: TestHash, -} - -impl BlockHeaderTrait for TestBlockHeader { - type Hash = TestHash; - - fn prev_hash(&self) -> Self::Hash { - self.prev_hash - } - - fn hash(&self) -> Self::Hash { - TestHash(sha2::Sha256::digest(self.prev_hash.0).into()) - } -} - -/// A mock block type used for testing. -#[derive(Serialize, Deserialize, PartialEq, core::fmt::Debug, Clone)] -pub struct TestBlock { - /// The hash of this block. - pub curr_hash: [u8; 32], - /// The header of this block. - pub header: TestBlockHeader, - /// The height of this block - pub height: u64, -} - -impl SlotData for TestBlock { - type BlockHeader = TestBlockHeader; - fn hash(&self) -> [u8; 32] { - self.curr_hash - } - - fn header(&self) -> &Self::BlockHeader { - &self.header - } -} - -/// A [`DaSpec`] suitable for testing. -pub struct MockDaSpec; - -impl DaSpec for MockDaSpec { - type SlotHash = TestHash; - type BlockHeader = TestBlockHeader; - type BlobTransaction = TestBlob; - type InclusionMultiProof = [u8; 32]; - type CompletenessProof = (); - type ChainParams = (); -} diff --git a/rollup-interface/src/state_machine/mocks/da.rs b/rollup-interface/src/state_machine/mocks/da.rs new file mode 100644 index 000000000..c778d5c28 --- /dev/null +++ b/rollup-interface/src/state_machine/mocks/da.rs @@ -0,0 +1,290 @@ +use std::fmt::Display; +use std::sync::Arc; + +use async_trait::async_trait; +use bytes::Bytes; +use serde::{Deserialize, Serialize}; +use sha2::Digest; + +use crate::da::{BlobReaderTrait, BlockHashTrait, BlockHeaderTrait, CountedBufReader, DaSpec}; +use crate::mocks::MockValidityCond; +use crate::services::da::{DaService, SlotData}; +use crate::{BasicAddress, RollupAddress}; + +/// A mock address type used for testing. Internally, this type is standard 32 byte array. +#[derive( + Debug, + PartialEq, + Clone, + Eq, + Copy, + serde::Serialize, + serde::Deserialize, + Hash, + Default, + borsh::BorshDeserialize, + borsh::BorshSerialize, +)] +pub struct MockAddress { + /// Underlying mock address. + pub addr: [u8; 32], +} + +impl core::str::FromStr for MockAddress { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let addr = hex::decode(s)?; + if addr.len() != 32 { + return Err(anyhow::anyhow!("Invalid address length")); + } + + let mut array = [0; 32]; + array.copy_from_slice(&addr); + Ok(MockAddress { addr: array }) + } +} + +impl<'a> TryFrom<&'a [u8]> for MockAddress { + type Error = anyhow::Error; + + fn try_from(addr: &'a [u8]) -> Result { + if addr.len() != 32 { + anyhow::bail!("Address must be 32 bytes long"); + } + let mut addr_bytes = [0u8; 32]; + addr_bytes.copy_from_slice(addr); + Ok(Self { addr: addr_bytes }) + } +} + +impl AsRef<[u8]> for MockAddress { + fn as_ref(&self) -> &[u8] { + &self.addr + } +} + +impl From<[u8; 32]> for MockAddress { + fn from(addr: [u8; 32]) -> Self { + MockAddress { addr } + } +} + +impl Display for MockAddress { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.addr) + } +} + +impl BasicAddress for MockAddress {} +impl RollupAddress for MockAddress {} + +#[derive( + Debug, + Clone, + PartialEq, + borsh::BorshDeserialize, + borsh::BorshSerialize, + serde::Serialize, + serde::Deserialize, +)] + +/// A mock BlobTransaction from a DA layer used for testing. +pub struct MockBlob { + address: MockAddress, + hash: [u8; 32], + data: CountedBufReader, +} + +impl BlobReaderTrait for MockBlob { + type Data = Bytes; + type Address = MockAddress; + + fn sender(&self) -> Self::Address { + self.address + } + + fn data_mut(&mut self) -> &mut CountedBufReader { + &mut self.data + } + + fn data(&self) -> &CountedBufReader { + &self.data + } + + fn hash(&self) -> [u8; 32] { + self.hash + } +} + +impl MockBlob { + /// Creates a new mock blob with the given data, claiming to have been published by the provided address. + pub fn new(data: Vec, address: MockAddress, hash: [u8; 32]) -> Self { + Self { + address, + data: CountedBufReader::new(bytes::Bytes::from(data)), + hash, + } + } +} + +/// A mock hash digest. +#[derive(Clone, Copy, Debug, PartialEq, serde::Serialize, serde::Deserialize)] +pub struct MockHash(pub [u8; 32]); + +impl AsRef<[u8]> for MockHash { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +impl BlockHashTrait for MockHash {} + +/// A mock block header used for testing. +#[derive(Serialize, Deserialize, PartialEq, core::fmt::Debug, Clone, Copy)] +pub struct MockBlockHeader { + /// The hash of the previous block. + pub prev_hash: MockHash, +} + +impl BlockHeaderTrait for MockBlockHeader { + type Hash = MockHash; + + fn prev_hash(&self) -> Self::Hash { + self.prev_hash + } + + fn hash(&self) -> Self::Hash { + MockHash(sha2::Sha256::digest(self.prev_hash.0).into()) + } +} + +/// A mock block type used for testing. +#[derive(Serialize, Deserialize, PartialEq, core::fmt::Debug, Clone)] +pub struct MockBlock { + /// The hash of this block. + pub curr_hash: [u8; 32], + /// The header of this block. + pub header: MockBlockHeader, + /// The height of this block + pub height: u64, + /// Validity condition + pub validity_cond: MockValidityCond, + /// Blobs + pub blobs: Vec, +} + +impl Default for MockBlock { + fn default() -> Self { + Self { + curr_hash: [0; 32], + header: MockBlockHeader { + prev_hash: MockHash([0; 32]), + }, + height: 0, + validity_cond: Default::default(), + blobs: Default::default(), + } + } +} + +impl SlotData for MockBlock { + type BlockHeader = MockBlockHeader; + type Cond = MockValidityCond; + + fn hash(&self) -> [u8; 32] { + self.curr_hash + } + + fn header(&self) -> &Self::BlockHeader { + &self.header + } + + fn validity_condition(&self) -> MockValidityCond { + self.validity_cond + } +} + +/// A [`DaSpec`] suitable for testing. +#[derive(serde::Serialize, serde::Deserialize)] +pub struct MockDaSpec; + +impl DaSpec for MockDaSpec { + type SlotHash = MockHash; + type BlockHeader = MockBlockHeader; + type BlobTransaction = MockBlob; + type ValidityCondition = MockValidityCond; + type InclusionMultiProof = [u8; 32]; + type CompletenessProof = (); + type ChainParams = (); +} + +use tokio::sync::mpsc::{self, Receiver, Sender}; +use tokio::sync::Mutex; + +#[derive(Clone)] +/// DaService used in tests. +pub struct MockDaService { + sender: Sender>, + receiver: Arc>>>, + sequencer_da_address: MockAddress, +} + +impl MockDaService { + /// Creates a new MockDaService. + pub fn new(sequencer_da_address: MockAddress) -> Self { + let (sender, receiver) = mpsc::channel(100); + Self { + sender, + receiver: Arc::new(Mutex::new(receiver)), + sequencer_da_address, + } + } +} + +#[async_trait] +impl DaService for MockDaService { + type Spec = MockDaSpec; + type FilteredBlock = MockBlock; + type Error = anyhow::Error; + + async fn get_finalized_at(&self, _height: u64) -> Result { + let data = self.receiver.lock().await.recv().await; + let data = data.unwrap(); + let hash = [0; 32]; + + let blob = MockBlob::new(data, self.sequencer_da_address, hash); + + Ok(MockBlock { + blobs: vec![blob], + ..Default::default() + }) + } + + async fn get_block_at(&self, height: u64) -> Result { + self.get_finalized_at(height).await + } + + fn extract_relevant_txs( + &self, + block: &Self::FilteredBlock, + ) -> Vec<::BlobTransaction> { + block.blobs.clone() + } + + async fn get_extraction_proof( + &self, + _block: &Self::FilteredBlock, + _blobs: &[::BlobTransaction], + ) -> ( + ::InclusionMultiProof, + ::CompletenessProof, + ) { + todo!() + } + + async fn send_transaction(&self, blob: &[u8]) -> Result<(), Self::Error> { + self.sender.send(blob.to_vec()).await.unwrap(); + Ok(()) + } +} diff --git a/rollup-interface/src/state_machine/mocks/mod.rs b/rollup-interface/src/state_machine/mocks/mod.rs new file mode 100644 index 000000000..ea32bcf5a --- /dev/null +++ b/rollup-interface/src/state_machine/mocks/mod.rs @@ -0,0 +1,11 @@ +//! Defines mock instantiations of many important traits, which are useful +//! for testing, fuzzing, and benchmarking. + +mod da; +mod validity_condition; +mod zk_vm; +pub use da::{ + MockAddress, MockBlob, MockBlock, MockBlockHeader, MockDaService, MockDaSpec, MockHash, +}; +pub use validity_condition::{MockValidityCond, MockValidityCondChecker}; +pub use zk_vm::{MockCodeCommitment, MockProof, MockZkvm}; diff --git a/rollup-interface/src/state_machine/mocks/validity_condition.rs b/rollup-interface/src/state_machine/mocks/validity_condition.rs new file mode 100644 index 000000000..250afb532 --- /dev/null +++ b/rollup-interface/src/state_machine/mocks/validity_condition.rs @@ -0,0 +1,59 @@ +use std::marker::PhantomData; + +use anyhow::Error; +use borsh::{BorshDeserialize, BorshSerialize}; +use serde::{Deserialize, Serialize}; +use sha2::Digest; + +use crate::zk::{ValidityCondition, ValidityConditionChecker}; + +/// A trivial test validity condition structure that only contains a boolean +#[derive( + Debug, BorshDeserialize, BorshSerialize, Serialize, Deserialize, PartialEq, Clone, Copy, Default, +)] +pub struct MockValidityCond { + /// The associated validity condition field. If it is true, the validity condition is verified + pub is_valid: bool, +} + +impl ValidityCondition for MockValidityCond { + type Error = Error; + fn combine(&self, rhs: Self) -> Result { + Ok(MockValidityCond { + is_valid: self.is_valid & rhs.is_valid, + }) + } +} + +#[derive(BorshDeserialize, BorshSerialize, Debug)] +/// A mock validity condition checker that always evaluate to cond +pub struct MockValidityCondChecker { + phantom: PhantomData, +} + +impl ValidityConditionChecker for MockValidityCondChecker { + type Error = Error; + + fn check(&mut self, condition: &MockValidityCond) -> Result<(), Self::Error> { + if condition.is_valid { + Ok(()) + } else { + Err(anyhow::format_err!("Invalid mock validity condition")) + } + } +} + +impl MockValidityCondChecker { + /// Creates new test validity condition + pub fn new() -> Self { + Self { + phantom: Default::default(), + } + } +} + +impl Default for MockValidityCondChecker { + fn default() -> Self { + Self::new() + } +} diff --git a/rollup-interface/src/state_machine/mocks/zk_vm.rs b/rollup-interface/src/state_machine/mocks/zk_vm.rs new file mode 100644 index 000000000..e066c0fc1 --- /dev/null +++ b/rollup-interface/src/state_machine/mocks/zk_vm.rs @@ -0,0 +1,95 @@ +use std::io::Write; + +use anyhow::ensure; +use borsh::{BorshDeserialize, BorshSerialize}; +use serde::{Deserialize, Serialize}; + +use crate::zk::{Matches, Zkvm}; + +/// A mock commitment to a particular zkVM program. +#[derive(Debug, Clone, PartialEq, Eq, BorshDeserialize, BorshSerialize, Serialize, Deserialize)] +pub struct MockCodeCommitment(pub [u8; 32]); + +impl Matches for MockCodeCommitment { + fn matches(&self, other: &MockCodeCommitment) -> bool { + self.0 == other.0 + } +} + +/// A mock proof generated by a zkVM. +#[derive(Debug, Clone, PartialEq, Eq, BorshDeserialize, BorshSerialize, Serialize, Deserialize)] +pub struct MockProof<'a> { + /// The ID of the program this proof might be valid for. + pub program_id: MockCodeCommitment, + /// Whether the proof is valid. + pub is_valid: bool, + /// The tamper-proof outputs of the proof. + pub log: &'a [u8], +} + +impl<'a> MockProof<'a> { + /// Serializes a proof into a writer. + pub fn encode(&self, mut writer: impl Write) { + writer.write_all(&self.program_id.0).unwrap(); + let is_valid_byte = if self.is_valid { 1 } else { 0 }; + writer.write_all(&[is_valid_byte]).unwrap(); + writer.write_all(self.log).unwrap(); + } + + /// Serializes a proof into a vector. + pub fn encode_to_vec(&self) -> Vec { + let mut encoded = Vec::new(); + self.encode(&mut encoded); + encoded + } + + /// Tries to deserialize a proof from a byte slice. + pub fn decode(input: &'a [u8]) -> Result { + ensure!(input.len() >= 33, "Input is too short"); + let program_id = MockCodeCommitment(input[0..32].try_into().unwrap()); + let is_valid = input[32] == 1; + let log = &input[33..]; + Ok(Self { + program_id, + is_valid, + log, + }) + } +} + +/// A mock implementing the zkVM trait. +pub struct MockZkvm; + +impl Zkvm for MockZkvm { + type CodeCommitment = MockCodeCommitment; + + type Error = anyhow::Error; + + fn verify<'a>( + serialized_proof: &'a [u8], + code_commitment: &Self::CodeCommitment, + ) -> Result<&'a [u8], Self::Error> { + let proof = MockProof::decode(serialized_proof)?; + anyhow::ensure!( + proof.program_id.matches(code_commitment), + "Proof failed to verify against requested code commitment" + ); + anyhow::ensure!(proof.is_valid, "Proof is not valid"); + Ok(proof.log) + } +} + +#[test] +fn test_mock_proof_roundtrip() { + let proof = MockProof { + program_id: MockCodeCommitment([1; 32]), + is_valid: true, + log: &[2; 50], + }; + + let mut encoded = Vec::new(); + proof.encode(&mut encoded); + + let decoded = MockProof::decode(&encoded).unwrap(); + assert_eq!(proof, decoded); +} diff --git a/rollup-interface/src/state_machine/mod.rs b/rollup-interface/src/state_machine/mod.rs index 52196cd6f..e5242e2e1 100644 --- a/rollup-interface/src/state_machine/mod.rs +++ b/rollup-interface/src/state_machine/mod.rs @@ -12,21 +12,26 @@ use serde::Serialize; #[cfg(feature = "mocks")] pub mod mocks; -/// A marker trait for addresses. -pub trait AddressTrait: - PartialEq +pub mod optimistic; + +/// A marker trait for general addresses. +pub trait BasicAddress: + Eq + + PartialEq + core::fmt::Debug + + core::fmt::Display + + Send + + Sync + Clone + + std::hash::Hash + AsRef<[u8]> + for<'a> TryFrom<&'a [u8], Error = anyhow::Error> - + Eq + + std::str::FromStr + Serialize + DeserializeOwned - + From<[u8; 32]> - + Send - + Sync - + core::fmt::Display - + std::hash::Hash + 'static { } + +/// An address used inside rollup +pub trait RollupAddress: BasicAddress + From<[u8; 32]> {} diff --git a/rollup-interface/src/state_machine/optimistic.rs b/rollup-interface/src/state_machine/optimistic.rs new file mode 100644 index 000000000..84668f7df --- /dev/null +++ b/rollup-interface/src/state_machine/optimistic.rs @@ -0,0 +1,46 @@ +//! Utilities for building an optimistic state machine +use borsh::{BorshDeserialize, BorshSerialize}; +use serde::{Deserialize, Serialize}; + +use crate::zk::StateTransition; + +/// A proof that the attester was bonded at the transition num `transition_num`. +/// For rollups using the `jmt`, this will be a `jmt::SparseMerkleProof` +#[derive( + Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Default, +)] +pub struct ProofOfBond { + /// The transition number for which the proof of bond applies + pub claimed_transition_num: u64, + /// The actual state proof that the attester was bonded + pub proof: StateProof, +} + +/// An attestation that a particular DA layer block transitioned the rollup state to some value +#[derive( + Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize, Serialize, Deserialize, Default, +)] +pub struct Attestation { + /// The alleged state root before applying the contents of the da block + pub initial_state_root: [u8; 32], + /// The hash of the block in which the transition occurred + pub da_block_hash: [u8; 32], + /// The alleged post-state root + pub post_state_root: [u8; 32], + /// A proof that the attester was bonded at some point in time before the attestation is generated + pub proof_of_bond: ProofOfBond, +} + +/// The contents of a challenge to an attestation, which are contained as a public output of the proof +/// Generic over an address type and a validity condition +#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize, Serialize, Deserialize)] +pub struct ChallengeContents { + /// The rollup address of the originator of this challenge + pub challenger_address: Address, + /// The state transition that was proven + pub state_transition: StateTransition, +} + +#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, Serialize, Deserialize)] +/// This struct contains the challenge as a raw blob +pub struct Challenge<'a>(&'a [u8]); diff --git a/rollup-interface/src/state_machine/stf.rs b/rollup-interface/src/state_machine/stf.rs index 744b456f3..06fcc747b 100644 --- a/rollup-interface/src/state_machine/stf.rs +++ b/rollup-interface/src/state_machine/stf.rs @@ -8,7 +8,8 @@ use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use crate::da::BlobReaderTrait; -use crate::zk::Zkvm; +use crate::services::da::SlotData; +use crate::zk::{ValidityCondition, Zkvm}; #[cfg(any(test, feature = "fuzzing"))] pub mod fuzzing; @@ -42,6 +43,7 @@ mod sealed { /// and may be queried via RPC. Receipts are generic over a type `R` which the rollup can use to /// store additional data, such as the status code of the transaction or the amout of gas used.s #[derive(Debug, Clone, Serialize, Deserialize)] +/// A receipt showing the result of a transaction pub struct TransactionReceipt { /// The canonical hash of this transaction pub tx_hash: [u8; 32], @@ -60,6 +62,7 @@ pub struct TransactionReceipt { /// can use to store arbitrary typed data, like the gas used by the batch. They are also generic over a type `TxReceiptContents`, /// since they contain a vectors of [`TransactionReceipt`]s. #[derive(Debug, Clone, Serialize, Deserialize)] +/// A receipt giving the outcome of a batch of transactions pub struct BatchReceipt { /// The canonical hash of this batch pub batch_hash: [u8; 32], @@ -99,6 +102,7 @@ pub trait StateTransitionFunction { /// The contents of a transaction receipt. This is the data that is persisted in the database type TxReceiptContents: Serialize + DeserializeOwned + Clone; + /// The contents of a batch receipt. This is the data that is persisted in the database type BatchReceiptContents: Serialize + DeserializeOwned + Clone; @@ -106,8 +110,12 @@ pub trait StateTransitionFunction { /// or validated together with proof during verification type Witness: Default + Serialize; - /// Perform one-time initialization for the genesis block. - fn init_chain(&mut self, params: Self::InitialState); + /// The validity condition that must be verified outside of the Vm + type Condition: ValidityCondition; + + /// Perform one-time initialization for the genesis block and returns the resulting root hash wrapped in a result. + /// If the init chain fails we panic. + fn init_chain(&mut self, params: Self::InitialState) -> Self::StateRoot; /// Called at each **DA-layer block** - whether or not that block contains any /// data relevant to the rollup. @@ -116,13 +124,16 @@ pub trait StateTransitionFunction { /// /// Applies batches of transactions to the rollup, /// slashing the sequencer who proposed the blob on failure. + /// The blobs are contained into a slot whose data is contained within the `slot_data` parameter, + /// this parameter is mainly used within the begin_slot hook. /// The concrete blob type is defined by the DA layer implementation, /// which is why we use a generic here instead of an associated type. /// /// Commits state changes to the database - fn apply_slot<'a, I>( + fn apply_slot<'a, I, Data>( &mut self, witness: Self::Witness, + slot_data: &Data, blobs: I, ) -> SlotResult< Self::StateRoot, @@ -131,7 +142,12 @@ pub trait StateTransitionFunction { Self::Witness, > where - I: IntoIterator; + I: IntoIterator, + Data: SlotData; + + /// Gets the state root from the associated state. If not available (because the chain has not been initialized yet), + /// return None. + fn get_current_state_root(&self) -> anyhow::Result; } /// A key-value pair representing a change to the rollup state diff --git a/rollup-interface/src/state_machine/zk/mod.rs b/rollup-interface/src/state_machine/zk/mod.rs index 04a7a9c5d..5241c08d4 100644 --- a/rollup-interface/src/state_machine/zk/mod.rs +++ b/rollup-interface/src/state_machine/zk/mod.rs @@ -8,10 +8,13 @@ //! maintained by the Sovereign Labs team. use core::fmt::Debug; +use borsh::{BorshDeserialize, BorshSerialize}; use digest::Digest; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; +use crate::RollupAddress; + /// A trait implemented by the prover ("host") of a zkVM program. pub trait ZkvmHost: Zkvm { /// Give the guest a piece of advice non-deterministically @@ -29,7 +32,7 @@ pub trait Zkvm { + DeserializeOwned; /// The error type which is returned when a proof fails to verify - type Error: Debug; + type Error: Debug + From; /// Interpret a sequence of a bytes as a proof and attempt to verify it against the code commitment. /// If the proof is valid, return a reference to the public outputs of the proof. @@ -37,6 +40,43 @@ pub trait Zkvm { serialized_proof: &'a [u8], code_commitment: &Self::CodeCommitment, ) -> Result<&'a [u8], Self::Error>; + + /// Same as [`verify`], except that instead of returning the output as a serialized array, + /// it returns a state transition structure. + /// TODO: specify a deserializer for the output + fn verify_and_extract_output< + C: ValidityCondition, + Add: RollupAddress + BorshDeserialize + BorshSerialize, + >( + serialized_proof: &[u8], + code_commitment: &Self::CodeCommitment, + ) -> Result, Self::Error> { + let mut output = Self::verify(serialized_proof, code_commitment)?; + Ok(BorshDeserialize::deserialize_reader(&mut output)?) + } +} + +/// A wrapper around a code commitment which implements borsh serialization +#[derive(Clone, Debug)] +pub struct StoredCodeCommitment { + /// The inner field of the wrapper that contains the code commitment. + pub commitment: Vm::CodeCommitment, +} + +impl BorshSerialize for StoredCodeCommitment { + fn serialize(&self, writer: &mut W) -> std::io::Result<()> { + bincode::serialize_into(writer, &self.commitment) + .expect("Serialization to vec is infallible"); + Ok(()) + } +} + +impl BorshDeserialize for StoredCodeCommitment { + fn deserialize_reader(reader: &mut R) -> std::io::Result { + let commitment: Vm::CodeCommitment = bincode::deserialize_from(reader) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; + Ok(Self { commitment }) + } } /// A trait which is accessible from within a zkVM program. @@ -48,7 +88,18 @@ pub trait ZkvmGuest: Zkvm { } /// This trait is implemented on the struct/enum which expresses the validity condition -pub trait ValidityCondition: Serialize + DeserializeOwned { +pub trait ValidityCondition: + Serialize + + DeserializeOwned + + BorshDeserialize + + BorshSerialize + + Debug + + Clone + + Copy + + PartialEq + + Send + + Sync +{ /// The error type returned when two [`ValidityCondition`]s cannot be combined. type Error: Into; /// Combine two conditions into one (typically run inside a recursive proof). @@ -61,12 +112,18 @@ pub trait ValidityCondition: Serialize + DeserializeOwned { /// if and only if the condition `validity_condition` is satisfied. /// /// The period of time covered by a state transition proof may be a single slot, or a range of slots on the DA layer. -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct StateTransition { +#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize, PartialEq, Eq)] +pub struct StateTransition { /// The state of the rollup before the transition pub initial_state_root: [u8; 32], /// The state of the rollup after the transition pub final_state_root: [u8; 32], + /// The slot hash of the state transition + pub slot_hash: [u8; 32], + + /// Rewarded address: the account that has produced the transition proof. + pub rewarded_address: Address, + /// An additional validity condition for the state transition which needs /// to be checked outside of the zkVM circuit. This typically corresponds to /// some claim about the DA layer history, such as (X) is a valid block on the DA layer @@ -74,7 +131,9 @@ pub struct StateTransition { } /// This trait expresses that a type can check a validity condition. -pub trait ValidityConditionChecker { +pub trait ValidityConditionChecker: + BorshDeserialize + BorshSerialize + Debug +{ /// The error type returned when a [`ValidityCondition`] is invalid. type Error: Into; /// Check a validity condition diff --git a/utils/README.md b/utils/README.md new file mode 100644 index 000000000..2ff10d9a2 --- /dev/null +++ b/utils/README.md @@ -0,0 +1,4 @@ +## Utils +This folder contains a general set of utils that are used as helpers for the SDK. +* Non-SDK macros (eg: macro for wrapping a function to track the number of risc0 cycles) +* Binary to perform stack analysis on the risc0 ELF and count the number of instruction cyles \ No newline at end of file diff --git a/utils/zk-cycle-macros/Cargo.toml b/utils/zk-cycle-macros/Cargo.toml new file mode 100644 index 000000000..6539e9e7a --- /dev/null +++ b/utils/zk-cycle-macros/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "zk-cycle-macros" +description = "cycle counting utils" +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } +version = { workspace = true } +readme = "README.md" +resolver = "2" +autotests = false + +[lib] +proc-macro = true + +[[test]] +name = "tests" +path = "tests/all_tests.rs" + +[dependencies] +anyhow = { workspace = true } +syn = { version = "1.0", features = ["full"] } +quote = "1.0" +proc-macro2 = "1.0" +borsh = { workspace = true } + +[dev-dependencies] +trybuild = "1.0" +zk-cycle-macros = {path = "../../utils/zk-cycle-macros"} +risc0-zkvm = { version = "0.16", default-features = false, features = ["std"]} +risc0-zkvm-platform = { version = "0.16"} +zk-cycle-utils = {path = "../../utils/zk-cycle-utils"} + +[features] +bench=[] \ No newline at end of file diff --git a/utils/zk-cycle-macros/README.md b/utils/zk-cycle-macros/README.md new file mode 100644 index 000000000..b2e4323f4 --- /dev/null +++ b/utils/zk-cycle-macros/README.md @@ -0,0 +1,39 @@ +## Zk-cycle-macros +* Contains the `cycle-tracker` macro which can be used to annotate functions that run inside the risc0 vm +* In order to use the macro, the following changes need to be made +* Cargo.toml +```toml +[dependencies] +zk-cycle-macros = {path = "../../utils/zk-cycle-macros", optional=true} +risc0-zkvm = { version = "0.16", default-features = false, features = ["std"], optional=true} +risc0-zkvm-platform = { version = "0.16", optional=true} +zk-cycle-utils = {path = "../../utils/zk-cycle-utils", optional=true} + +[features] +bench = ["zk-cycle-macros/bench","zk-cycle-utils", "risc0-zkvm","risc0-zkvm-platform"] +``` +* The feature gating is needed because we don't want the cycle tracker scaffolding to be used unless the `bench` feature is enabled +* If the `bench` feature is not enabled, the risc0 host will not be built with the necessary syscalls to support tracking cycles +* The additional imports are necessary because the macro wraps the user function with the necessary code for tracking the number of cycles before and after the function execution +* The rust code that needs to use the `cycle_tracker` macro needs to import it and then annotate the function with it +```rust,ignore +// +#[cfg(all(target_os = "zkvm", feature = "bench"))] +use zk_cycle_macros::cycle_tracker; +// +// +#[cfg_attr(all(target_os = "zkvm", feature = "bench"), cycle_tracker)] +fn begin_slot( + &mut self, + slot_data: &impl SlotData, + witness: >::Witness, +) { + let state_checkpoint = StateCheckpoint::with_witness(self.current_storage.clone(), witness); + + let mut working_set = state_checkpoint.to_revertable(); + + self.runtime.begin_slot_hook(slot_data, &mut working_set); + + self.checkpoint = Some(working_set.checkpoint()); +} +``` \ No newline at end of file diff --git a/utils/zk-cycle-macros/src/lib.rs b/utils/zk-cycle-macros/src/lib.rs new file mode 100644 index 000000000..3b9fa72f0 --- /dev/null +++ b/utils/zk-cycle-macros/src/lib.rs @@ -0,0 +1,68 @@ +#![deny(missing_docs)] +#![doc = include_str!("../README.md")] +extern crate proc_macro; + +use proc_macro::TokenStream; +use quote::quote; +use syn::{parse_macro_input, ItemFn}; + +/// This macro is used to annotate functions that we want to track the number of riscV cycles being +/// generated inside the VM. The purpose of the this macro is to measure how many cycles a rust +/// function takes because prover time is directly proportional to the number of riscv cycles +/// generated. It does this by making use of a risc0 provided function +/// ```rust,ignore +/// risc0_zkvm::guest::env::get_cycle_count +/// ``` +/// The macro essentially generates new function with the same name by wrapping the body with a get_cycle_count +/// at the beginning and end of the function, subtracting it and then emitting it out using the +/// a custom syscall that is generated when the prover is run with the `bench` feature. +/// `send_recv_slice` is used to communicate and pass a slice to the syscall that we defined. +/// The handler for the syscall can be seen in adapters/risc0/src/host.rs and adapters/risc0/src/metrics.rs +#[proc_macro_attribute] +pub fn cycle_tracker(_attr: TokenStream, item: TokenStream) -> TokenStream { + let input = parse_macro_input!(item as ItemFn); + + match wrap_function(input) { + Ok(ok) => ok, + Err(err) => err.to_compile_error().into(), + } +} + +fn wrap_function(input: ItemFn) -> Result { + let visibility = &input.vis; + let name = &input.sig.ident; + let inputs = &input.sig.inputs; + let output = &input.sig.output; + let block = &input.block; + let generics = &input.sig.generics; + let where_clause = &input.sig.generics.where_clause; + let risc0_zkvm = syn::Ident::new("risc0_zkvm", proc_macro2::Span::call_site()); + let risc0_zkvm_platform = + syn::Ident::new("risc0_zkvm_platform", proc_macro2::Span::call_site()); + + let result = quote! { + #visibility fn #name #generics (#inputs) #output #where_clause { + let before = #risc0_zkvm::guest::env::get_cycle_count(); + let result = (|| #block)(); + let after = #risc0_zkvm::guest::env::get_cycle_count(); + + // simple serialization to avoid pulling in bincode or other libs + let tuple = (stringify!(#name).to_string(), (after - before) as u64); + let mut serialized = Vec::new(); + serialized.extend(tuple.0.as_bytes()); + serialized.push(0); + let size_bytes = tuple.1.to_ne_bytes(); + serialized.extend(&size_bytes); + + // calculate the syscall name. + let cycle_string = String::from("cycle_metrics\0"); + let metrics_syscall_name = unsafe { + #risc0_zkvm_platform::syscall::SyscallName::from_bytes_with_nul(cycle_string.as_ptr()) + }; + + #risc0_zkvm::guest::env::send_recv_slice::(metrics_syscall_name, &serialized); + result + } + }; + Ok(result.into()) +} diff --git a/utils/zk-cycle-macros/tests/all_tests.rs b/utils/zk-cycle-macros/tests/all_tests.rs new file mode 100644 index 000000000..b46f71d6c --- /dev/null +++ b/utils/zk-cycle-macros/tests/all_tests.rs @@ -0,0 +1,5 @@ +#[test] +fn cycle_macro_tests() { + let t = trybuild::TestCases::new(); + t.pass("tests/cycle_macro.rs"); +} diff --git a/utils/zk-cycle-macros/tests/cycle_macro.rs b/utils/zk-cycle-macros/tests/cycle_macro.rs new file mode 100644 index 000000000..5248fbe09 --- /dev/null +++ b/utils/zk-cycle-macros/tests/cycle_macro.rs @@ -0,0 +1,74 @@ +use zk_cycle_macros::cycle_tracker; +use std::marker::PhantomData; + +struct TestStruct; + +impl TestStruct { + #[cycle_tracker] + fn _struct_method() {} + + #[cycle_tracker] + fn _struct_method_with_self(&self) {} +} + +struct TestStructGeneric { + _phantom_t: PhantomData, + _phantom_u: PhantomData, +} + +impl TestStructGeneric { + #[cycle_tracker] + fn _generic_method(&self, _t: T, _u: U) {} +} + +#[cycle_tracker] +fn _generic_function(_t: T, _u: U) {} + +#[cycle_tracker] +fn _lifetime_function<'a>(s: &'a str) -> &'a str { + s +} + +struct TestStructLifetime<'a> { + s: &'a str, +} + +impl<'a> TestStructLifetime<'a> { + #[cycle_tracker] + fn _lifetime_method(&self) -> &'a str { + self.s + } +} + +struct TestStructAssociated; + +impl TestStructAssociated { + #[cycle_tracker] + fn _associated_function_no_self(_value: T) {} +} + +#[cycle_tracker] +fn _type_param_clause_function(_t: T) {} + +#[cycle_tracker] +fn _where_clause_function(value: T) + where T: Clone + std::fmt::Debug { + println!("{:?}", value.clone()); +} + + +#[cycle_tracker] +fn _function_without_params() { +} + +#[cycle_tracker] +fn _function_with_params(_a: u32, _b: usize) { +} + +#[cycle_tracker] +pub fn _function_with_access_specifier(_a: u32, _b: usize) { +} + +fn main() { + +} \ No newline at end of file diff --git a/utils/zk-cycle-utils/Cargo.toml b/utils/zk-cycle-utils/Cargo.toml new file mode 100644 index 000000000..17cd6f00b --- /dev/null +++ b/utils/zk-cycle-utils/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "zk-cycle-utils" +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } +version = { workspace = true } +readme = "README.md" +resolver = "2" +autotests = false + +[dependencies] +risc0-zkvm = { version = "0.16", default-features = false, features = ['std'] } +risc0-zkvm-platform = { version = "0.16" } diff --git a/utils/zk-cycle-utils/README.md b/utils/zk-cycle-utils/README.md new file mode 100644 index 000000000..8a426efdc --- /dev/null +++ b/utils/zk-cycle-utils/README.md @@ -0,0 +1,64 @@ +## Tracer +* The tracer binary makes use of goblin, capstone and rustc-demangle to disassemble the riscV ELF and build a symbol table +* The execution trace for the rollup ELF is generated by running the `demo-prover` with the `ROLLUP_TRACE` environment variable set to the desired path +```bash +cd examples/demo-prover/host +ROLLUP_TRACE=rollup.trace cargo bench --bench prover_bench --features bench +``` +* `rollup.trace` now contains the program counter values for the run of the demo-prover (how many ever blocks, blobs and transactions we configured it for) +* The corresponding ELF used for the trace is found at +```bash +$ file target/riscv-guest/riscv32im-risc0-zkvm-elf/release/rollup +target/riscv-guest/riscv32im-risc0-zkvm-elf/release/rollup: ELF 32-bit LSB executable, UCB RISC-V, soft-float ABI, version 1 (SYSV), statically linked, with debug_info, not stripped +``` +* Both the above files are passed as input to the tracer +```bash +% cargo run -- --rollup-elf ../../../examples/demo-prover/target/riscv-guest/riscv32im-risc0-zkvm-elf/release/rollup --rollup-trace ../../../examples/demo-prover/host/rollup.trace + Finished dev [unoptimized + debuginfo] target(s) in 0.11s + Running `target/debug/tracer --rollup-elf ../../../examples/demo-prover/target/riscv-guest/riscv32im-risc0-zkvm-elf/release/rollup --rollup-trace ../../../examples/demo-prover/rollup.trace` + [00:01:04] [########################################] 16154861/16154861 (0s) + +Total instructions in trace: 16154861 + + + Instruction counts considering call graph ++--------------------------------------------------------------------------------------------+-------------------+ +| Function Name | Instruction Count | ++============================================================================================+===================+ +| __start | 16154854 | ++--------------------------------------------------------------------------------------------+-------------------+ +| main | 16153565 | ++--------------------------------------------------------------------------------------------+-------------------+ +| rollup::main::h089e03181dac539a | 16153559 | ++--------------------------------------------------------------------------------------------+-------------------+ +| sov_modules_stf_template:: for | | +| sov_modules_stf_template::app_template::AppTemplate>::apply_slot::h820293d | | +| c8d744201 | | ++--------------------------------------------------------------------------------------------+-------------------+ +| ed25519_dalek::public::PublicKey::verify_strict::hfad17f99efbd2cd0 | 13528566 | ++--------------------------------------------------------------------------------------------+-------------------+ +| <&curve25519_dalek::backend::serial::u64::field::FieldElement51 as | 8295218 | +| core::ops::arith::Mul<&curve25519_dalek::backend::serial::u64::field::FieldElement51>>::mu | | +| l::h39351372a97422dd | | +. +. +. ++--------------------------------------------------------------------------------------------+-------------------+ +``` +* The tracer also generates a pure count of instructions without considering the stack +* There are other options that can be passed which are self documented in the cli +```bash +Usage: tracer [OPTIONS] --rollup-elf --rollup-trace + +Options: + -t, --top Include the "top" number of functions [default: 30] + --no-stack-counts Don't print stack aware instruction counts + --no-raw-counts Don't print raw (stack un-aware) instruction counts + --rollup-elf Path to the riscv32 elf + --rollup-trace Path to the rollup trace. File must be one u64 program counter per line + -s, --strip-hashes Strip the hashes from the function name while printing + -h, --help Print help + -V, --version Print version + +``` \ No newline at end of file diff --git a/utils/zk-cycle-utils/src/lib.rs b/utils/zk-cycle-utils/src/lib.rs new file mode 100644 index 000000000..4837645c9 --- /dev/null +++ b/utils/zk-cycle-utils/src/lib.rs @@ -0,0 +1,30 @@ +use risc0_zkvm_platform::syscall::SyscallName; + +pub fn get_syscall_name() -> SyscallName { + let cycle_string = "cycle_metrics\0"; + let bytes = cycle_string.as_bytes(); + unsafe { SyscallName::from_bytes_with_nul(bytes.as_ptr()) } +} + +pub fn cycle_count_callback(input: &[u8]) -> Vec { + if input.len() == std::mem::size_of::() { + let mut array = [0u8; std::mem::size_of::()]; + array.copy_from_slice(input); + println!("== syscall ==> {}", usize::from_le_bytes(array)); + } else { + println!("NONE"); + } + vec![] +} + +pub fn get_syscall_name_cycles() -> SyscallName { + let cycle_string = "cycle_count\0"; + let bytes = cycle_string.as_bytes(); + unsafe { SyscallName::from_bytes_with_nul(bytes.as_ptr()) } +} + +pub fn print_cycle_count() { + let metrics_syscall_name = get_syscall_name_cycles(); + let serialized = (risc0_zkvm::guest::env::get_cycle_count() as u64).to_le_bytes(); + risc0_zkvm::guest::env::send_recv_slice::(metrics_syscall_name, &serialized); +} diff --git a/utils/zk-cycle-utils/tracer/Cargo.lock b/utils/zk-cycle-utils/tracer/Cargo.lock new file mode 100644 index 000000000..0969d930a --- /dev/null +++ b/utils/zk-cycle-utils/tracer/Cargo.lock @@ -0,0 +1,782 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8f9420f797f2d9e935edf629310eb938a0d839f984e25327f3c7eed22300c" +dependencies = [ + "memchr", +] + +[[package]] +name = "anstream" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is-terminal", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" + +[[package]] +name = "anstyle-parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +dependencies = [ + "windows-sys 0.48.0", +] + +[[package]] +name = "anstyle-wincon" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c" +dependencies = [ + "anstyle", + "windows-sys 0.48.0", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" + +[[package]] +name = "capstone" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1097e608594dad3bad608295567f757742b883606fe150faf7a9740b849730d8" +dependencies = [ + "capstone-sys", + "libc", +] + +[[package]] +name = "capstone-sys" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e7f651d5ec4c2a2e6c508f2c8032655003cd728ec85663e9796616990e25b5a" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "cc" +version = "1.0.82" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01" +dependencies = [ + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clap" +version = "4.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c27cdf28c0f604ba3f512b0c9a409f8de8513e4816705deb0498b627e7c3a3fd" +dependencies = [ + "clap_builder", + "clap_derive", + "once_cell", +] + +[[package]] +name = "clap_builder" +version = "4.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08a9f1ab5e9f01a9b81f202e8562eb9a10de70abf9eaeac1be465c28b75aa4aa" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.3.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "clap_lex" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "console" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +dependencies = [ + "encode_unicode 0.3.6", + "lazy_static", + "libc", + "unicode-width", + "windows-sys 0.45.0", +] + +[[package]] +name = "csv" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626ae34994d3d8d668f4269922248239db4ae42d538b14c398b74a52208e8086" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" +dependencies = [ + "memchr", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users", + "winapi", +] + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "errno" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "goblin" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d20fd25aa456527ce4f544271ae4fea65d2eda4a6561ea56f39fb3ee4f7e3884" +dependencies = [ + "log", + "plain", + "scroll", +] + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "hermit-abi" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" + +[[package]] +name = "indicatif" +version = "0.17.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b297dc40733f23a0e52728a58fa9489a5b7638a324932de16b41adc3ef80730" +dependencies = [ + "console", + "instant", + "number_prefix", + "portable-atomic", + "unicode-width", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "is-terminal" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +dependencies = [ + "hermit-abi", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.147" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" + +[[package]] +name = "linux-raw-sys" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" + +[[package]] +name = "log" +version = "0.4.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" + +[[package]] +name = "memchr" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" + +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "plain" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" + +[[package]] +name = "portable-atomic" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e" + +[[package]] +name = "prettytable-rs" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eea25e07510aa6ab6547308ebe3c036016d162b8da920dbb079e3ba8acf3d95a" +dependencies = [ + "csv", + "encode_unicode 1.0.0", + "is-terminal", + "lazy_static", + "term", + "unicode-width", +] + +[[package]] +name = "proc-macro2" +version = "1.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "redox_users" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +dependencies = [ + "getrandom", + "redox_syscall", + "thiserror", +] + +[[package]] +name = "regex" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustix" +version = "0.38.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "172891ebdceb05aa0005f533a6cbfca599ddd7d966f6f5d4d9b2e70478e70399" +dependencies = [ + "bitflags 2.3.3", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustversion" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "scroll" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fda28d4b4830b807a8b43f7b0e6b5df875311b3e7621d84577188c175b6ec1ec" +dependencies = [ + "scroll_derive", +] + +[[package]] +name = "scroll_derive" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aaaae8f38bb311444cfb7f1979af0bc9240d95795f75f9ceddf6a59b79ceffa0" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "serde" +version = "1.0.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32ac8da02677876d532745a130fc9d8e6edfa81a269b107c5b00829b91d8eb3c" + +[[package]] +name = "smawk" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f67ad224767faa3c7d8b6d91985b78e70a1324408abcb1cfcc2be4c06bc06043" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "term" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59df8ac95d96ff9bede18eb7300b0fda5e5d8d90960e76f8e14ae765eedbf1f" +dependencies = [ + "dirs-next", + "rustversion", + "winapi", +] + +[[package]] +name = "textwrap" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" +dependencies = [ + "smawk", + "unicode-linebreak", + "unicode-width", +] + +[[package]] +name = "thiserror" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "611040a08a0439f8248d1990b111c95baa9c704c805fa1f62104b39655fd7f90" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "090198534930841fab3a5d1bb637cde49e339654e606195f8d9c76eeb081dc96" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.28", +] + +[[package]] +name = "tracer" +version = "0.1.0" +dependencies = [ + "capstone", + "clap", + "goblin", + "indicatif", + "prettytable-rs", + "regex", + "rustc-demangle", + "textwrap", +] + +[[package]] +name = "unicode-ident" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" + +[[package]] +name = "unicode-linebreak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + +[[package]] +name = "unicode-width" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.1", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" diff --git a/utils/zk-cycle-utils/tracer/Cargo.toml b/utils/zk-cycle-utils/tracer/Cargo.toml new file mode 100644 index 000000000..82aa8d505 --- /dev/null +++ b/utils/zk-cycle-utils/tracer/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "tracer" +version = "0.1.0" +edition = "2018" + +[workspace] + +[dependencies] +rustc-demangle = "0.1.18" +goblin = "0.2" +capstone = "0.11.0" +regex = "1.5.4" +prettytable-rs = "^0.10" +textwrap = "0.16.0" +indicatif = "0.17.6" +clap = {version = "4.3.21", features = ['derive']} \ No newline at end of file diff --git a/utils/zk-cycle-utils/tracer/src/main.rs b/utils/zk-cycle-utils/tracer/src/main.rs new file mode 100644 index 000000000..2f6e5dfd5 --- /dev/null +++ b/utils/zk-cycle-utils/tracer/src/main.rs @@ -0,0 +1,367 @@ +use std::process::Command; +use std::str; +use std::cmp::Ordering; +use std::collections::HashMap; +use std::fs::read_to_string; +use prettytable::{Table, Row, Cell, format}; +use textwrap::wrap; +use indicatif::{ProgressBar, ProgressStyle}; +use goblin::elf::{Elf, sym::STT_FUNC}; +use rustc_demangle::demangle; +use regex::Regex; +use clap::Parser; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + #[arg(short, long, default_value_t = 30)] + /// Include the "top" number of functions + top: usize, + + #[arg(long)] + /// Don't print stack aware instruction counts + no_stack_counts: bool, + + #[arg(long)] + /// Don't print raw (stack un-aware) instruction counts + no_raw_counts: bool, + + #[arg(long, required=true)] + /// Path to the riscv32 elf + rollup_elf: String, + + #[arg(long, required=true)] + /// Path to the rollup trace. + /// File must be one u64 program counter per line + rollup_trace: String, + + #[arg(short, long)] + /// Strip the hashes from the function name while printing + strip_hashes: bool, + + #[arg(short, long)] + /// Function name to target for getting stack counts + function_name: Option, + + #[arg(short, long)] + /// Exclude functions matching these patterns from display + /// usage: -e func1 -e func2 -e func3 + exclude_view: Vec, +} + +fn strip_hash(name_with_hash: &str) -> String { + let re = Regex::new(r"::h[0-9a-fA-F]+$").unwrap(); + re.replace(name_with_hash, "").to_string() +} + +fn get_cycle_count(insn: &str) -> Result { + // The opcodes and their cycle counts are taken from + // https://github.com/risc0/risc0/blob/main/risc0/zkvm/src/host/server/opcode.rs + match insn { + "LB" | "LH" | "LW" | "LBU" | "LHU" | "ADDI" | "SLLI" | "SLTI" | "SLTIU" | + "AUIPC" | "SB" | "SH" | "SW" | "ADD" | "SUB" | "SLL" | "SLT" | "SLTU" | + "XOR" | "SRL" | "SRA" | "OR" | "AND" | "MUL" | "MULH" | "MULSU" | "MULU" | + "LUI" | "BEQ" | "BNE" | "BLT" | "BGE" | "BLTU" | "BGEU" | "JALR" | "JAL" | + "ECALL" | "EBREAK" => Ok(1), + + // Don't see this in the risc0 code base, but MUL, MULH, MULSU, and MULU all take 1 cycle, + // so going with that for MULHU as well. + "MULHU" => Ok(1), + + "XORI" | "ORI" | "ANDI" | "SRLI" | "SRAI" | "DIV" | "DIVU" | "REM" | "REMU" => Ok(2), + + _ => Err("Decode error"), + } +} + +fn print_intruction_counts(first_header: &str, + count_vec: Vec<(String, usize)>, + top_n: usize, + strip_hashes: bool, + exclude_list: Option<&[String]>) { + let mut table = Table::new(); + table.set_format(*format::consts::FORMAT_DEFAULT); + table.set_titles(Row::new(vec![ + Cell::new(first_header), + Cell::new("Instruction Count"), + ])); + + let wrap_width = 90; + let mut row_count = 0; + for (key, value) in count_vec { + let mut cont = false; + if let Some(ev) = exclude_list { + for e in ev { + if key.contains(e) { + cont = true; + break + } + } + if cont { + continue + } + } + let mut stripped_key = key.clone(); + if strip_hashes { + stripped_key = strip_hash(&key); + } + row_count+=1; + if row_count > top_n { + break; + } + let wrapped_key = wrap(&stripped_key, wrap_width); + let key_cell_content = wrapped_key.join("\n"); + table.add_row(Row::new(vec![ + Cell::new(&key_cell_content), + Cell::new(&value.to_string()), + ])); + } + + table.printstd(); +} + +fn focused_stack_counts(function_stack: &[String], + filtered_stack_counts: &mut HashMap, usize>, + function_name: &str, + instruction: &str) { + if let Some(index) = function_stack.iter().position(|s| s == function_name) { + let truncated_stack = &function_stack[0..=index]; + let count = filtered_stack_counts.entry(truncated_stack.to_vec()).or_insert(0); + *count += get_cycle_count(instruction).unwrap(); + } +} + +fn _build_radare2_lookups( + start_lookup: &mut HashMap, + end_lookup: &mut HashMap, + func_range_lookup: &mut HashMap, + elf_name: &str +) -> std::io::Result<()> { + let output = Command::new("r2") + .arg("-q") + .arg("-c") + .arg("aa;afl") + .arg(elf_name) + .output()?; + + if output.status.success() { + let result_str = str::from_utf8(&output.stdout).unwrap(); + for line in result_str.lines() { + let parts: Vec<&str> = line.split_whitespace().collect(); + let address = u64::from_str_radix(&parts[0][2..], 16).unwrap(); + let size = parts[2].parse::().unwrap(); + let end_address = address + size - 4; + let function_name = parts[3]; + start_lookup.insert(address, function_name.to_string()); + end_lookup.insert(end_address, function_name.to_string()); + func_range_lookup.insert(function_name.to_string(), (address,end_address)); + } + } else { + eprintln!("Error executing command: {}", str::from_utf8(&output.stderr).unwrap()); + } + Ok(()) +} + +fn build_goblin_lookups( + start_lookup: &mut HashMap, + end_lookup: &mut HashMap, + func_range_lookup: &mut HashMap, + elf_name: &str +) -> std::io::Result<()> { + let buffer = std::fs::read(elf_name).unwrap(); + let elf = Elf::parse(&buffer).unwrap(); + + for sym in &elf.syms { + if sym.st_type() == STT_FUNC { + let name = elf.strtab.get(sym.st_name).unwrap_or(Ok("")).unwrap_or(""); + let demangled_name = demangle(name); + let size = sym.st_size; + let start_address = sym.st_value; + let end_address = start_address + size - 4; + start_lookup.insert(start_address, demangled_name.to_string()); + end_lookup.insert(end_address, demangled_name.to_string()); + func_range_lookup.insert(demangled_name.to_string(), (start_address,end_address)); + } + } + Ok(()) +} + +fn increment_stack_counts(instruction_counts: &mut HashMap, + function_stack: &[String], + filtered_stack_counts: &mut HashMap, usize>, + function_name: &Option, + instruction: &str) { + for f in function_stack { + *instruction_counts.entry(f.clone()).or_insert(0) += get_cycle_count(instruction).unwrap(); + } + if let Some(f) = function_name { + focused_stack_counts(function_stack, filtered_stack_counts, &f, instruction) + } + +} + +fn main() -> std::io::Result<()> { + + let args = Args::parse(); + let top_n = args.top; + let rollup_elf_path = args.rollup_elf; + let rollup_trace_path = args.rollup_trace; + let no_stack_counts = args.no_stack_counts; + let no_raw_counts = args.no_raw_counts; + let strip_hashes = args.strip_hashes; + let function_name = args.function_name; + let exclude_view = args.exclude_view; + + let mut start_lookup = HashMap::new(); + let mut end_lookup = HashMap::new(); + let mut func_range_lookup = HashMap::new(); + build_goblin_lookups(&mut start_lookup, &mut end_lookup, &mut func_range_lookup, &rollup_elf_path).unwrap(); + + let mut function_ranges: Vec<(u64, u64, String)> = func_range_lookup + .iter() + .map(|(f, &(start, end))| (start, end, f.clone())) + .collect(); + + function_ranges.sort_by_key(|&(start, _, _)| start); + + let file_content = read_to_string(&rollup_trace_path).unwrap(); + let mut function_stack: Vec = Vec::new(); + let mut instruction_counts: HashMap = HashMap::new(); + let mut counts_without_callgraph: HashMap = HashMap::new(); + let mut filtered_stack_counts: HashMap, usize> = HashMap::new(); + let total_lines = file_content.lines().count() as u64; + let mut current_function_range : (u64,u64) = (0,0); + + let update_interval = 1000usize; + let pb = ProgressBar::new(total_lines); + pb.set_style(ProgressStyle::default_bar() + .template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({eta})").unwrap() + .progress_chars("#>-")); + + + for (c,line) in file_content.lines().enumerate() { + if c % &update_interval == 0 { + pb.inc(update_interval as u64); + } + let mut parts = line.split("\t"); + let pc = parts.next().unwrap_or_default().parse().unwrap(); + let instruction = parts.next().unwrap_or_default(); + + // Raw counts without considering the callgraph at all + // we're just checking if the PC belongs to a function + // if so we're incrementing. This would ignore the call stack + // so for example "main" would only have a hundred instructions or so + if let Ok(index) = function_ranges.binary_search_by( + |&(start, end, _)| { + if pc < start { + Ordering::Greater + } else if pc > end { + Ordering::Less + } else { + Ordering::Equal + } }) + { + let (_, _, fname) = &function_ranges[index]; + *counts_without_callgraph.entry(fname.clone()).or_insert(0) += get_cycle_count(instruction).unwrap(); + } else { + *counts_without_callgraph.entry("anonymous".to_string()).or_insert(0) += get_cycle_count(instruction).unwrap(); + } + + // The next section considers the callstack + // We build a callstack and maintain it based on some rules + // Functions lower in the stack get their counts incremented + + // we are still in the current function + if pc > current_function_range.0 && pc <= current_function_range.1 { + increment_stack_counts(&mut instruction_counts, &function_stack, &mut filtered_stack_counts, &function_name,instruction); + continue; + } + + // jump to a new function (or the same one) + if let Some(f) = start_lookup.get(&pc) { + increment_stack_counts(&mut instruction_counts, &function_stack, &mut filtered_stack_counts, &function_name,instruction); + // jump to a new function (not recursive) + if !function_stack.contains(&f) { + function_stack.push(f.clone()); + current_function_range = *func_range_lookup.get(f).unwrap(); + } + } else { + // this means pc now points to an instruction that is + // 1. not in the current function's range + // 2. not a new function call + // we now account for a new possibility where we're returning to a function in the stack + // this need not be the immediate parent and can be any of the existing functions in the stack + // due to some optimizations that the compiler can make + let mut unwind_point = 0; + let mut unwind_found = false; + for (c,f) in function_stack.iter().enumerate() { + let (s, e) = func_range_lookup.get(f).unwrap(); + if pc > *s && pc <=*e { + unwind_point = c; + unwind_found = true; + break + } + } + // unwinding until the parent + if unwind_found { + + function_stack.truncate(unwind_point + 1); + increment_stack_counts(&mut instruction_counts, &function_stack, &mut filtered_stack_counts, &function_name,instruction); + continue; + } + + // if no unwind point has been found, that means we jumped to some random location + // so we'll just increment the counts for everything in the stack + increment_stack_counts(&mut instruction_counts, &function_stack, &mut filtered_stack_counts, &function_name,instruction); + } + + } + + pb.finish_with_message("done"); + + let mut raw_counts: Vec<(String, usize)> = instruction_counts + .iter() + .map(|(key, value)| (key.clone(), value.clone())) + .collect(); + raw_counts.sort_by(|a, b| b.1.cmp(&a.1)); + + println!("\n\nTotal instructions in trace: {}", total_lines); + if !no_stack_counts { + println!("\n\n Instruction counts considering call graph"); + print_intruction_counts("Function Name", raw_counts, top_n, strip_hashes,Some(&exclude_view)); + } + + let mut raw_counts: Vec<(String, usize)> = counts_without_callgraph + .iter() + .map(|(key, value)| (key.clone(), value.clone())) + .collect(); + raw_counts.sort_by(|a, b| b.1.cmp(&a.1)); + if !no_raw_counts { + println!("\n\n Instruction counts ignoring call graph"); + print_intruction_counts("Function Name",raw_counts, top_n, strip_hashes,Some(&exclude_view)); + } + + let mut raw_counts: Vec<(String, usize)> = filtered_stack_counts + .iter() + .map(|(stack, count)| { + let numbered_stack = stack + .iter() + .rev() + .enumerate() + .map(|(index, line)| { + let modified_line = if strip_hashes { strip_hash(line) } else { line.clone() }; + format!("({}) {}", index + 1, modified_line) + }) + .collect::>() + .join("\n"); + (numbered_stack, *count) + }) + .collect(); + + raw_counts.sort_by(|a, b| b.1.cmp(&a.1)); + if let Some(f) = function_name { + println!("\n\n Stack patterns for function '{f}' "); + print_intruction_counts("Function Stack",raw_counts, top_n, strip_hashes,None); + } + Ok(()) +}