diff --git a/.cargo/config.toml b/.cargo/config.toml index e9173e0425d9..df41b62502d0 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,9 +1,15 @@ +[alias] +# Permits `cargo cli --chain calibnet ...` +cli = "run --bin forest-cli --" +daemon = "run --bin forest --" + [build] incremental = true # TODO(aatifsyed): remove - this can be pushed out to readme # In all cases, pass --cfg=tokio_unstable for tokio console integration # See (https://github.com/ChainSafe/forest/pull/2245) +# Note that this may be overriden by user configuration at ~/.cargo/config.toml rustflags = ["--cfg=tokio_unstable"] [net] diff --git a/.clippy.toml b/.clippy.toml index cfcb0c672085..ca62fc64107e 100644 --- a/.clippy.toml +++ b/.clippy.toml @@ -1,7 +1,21 @@ -# https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_types -disallowed-types = [ - { path = "std::collections::HashMap", reason = "use ahash::HashMap instead" }, - { path = "std::collections::HashSet", reason = "use ahash::HashSet instead" }, - { path = "std::sync::RwLock", reason = "use parking_lot::RwLock instead" }, - { path = "std::sync::Mutex", reason = "use parking_lot::Mutex instead" }, -] +# Banned in #2454, trading security/being idiomatic for speed +[[disallowed-types]] +path = "std::collections::HashMap" +reason = """the standard library hasher is secure by default, but not very fast. +use ahash::HashMap instead.""" + +[[disallowed-types]] +path = "std::collections::HashSet" +reason = """the standard library hasher is secure by default, but not very fast. +use ahash::HashSet instead.""" + +# Banned in #2600, presumably so that poisoning won't need to be user-handled +[[disallowed-types]] +path = "std::sync::RwLock" +reason = """the standard library synchronization primitives are poisoned when aquiring threads panic. +use parking_lot::RwLock instead to silently ignore panics.""" + +[[disallowed-types]] +path = "std::sync::Mutex" +reason = """the standard library synchronization primitives are poisoned when aquiring threads panic. +use parking_lot::Mutex instead to silently ignore panics.""" diff --git a/.config/forest.dic b/.config/forest.dic index 896a415dadb7..8ab432e36767 100644 --- a/.config/forest.dic +++ b/.config/forest.dic @@ -26,6 +26,7 @@ Ethereum exa FIL Filecoin/M +Filops FVM GC GiB @@ -58,7 +59,10 @@ serializable serializer/SM statediff stateful +stderr +stdout synchronizer +syscall/S TCP tipset/SM tipsetkey/S diff --git a/.config/spellcheck.md b/.config/spellcheck.md new file mode 100644 index 000000000000..eff81b740180 --- /dev/null +++ b/.config/spellcheck.md @@ -0,0 +1,72 @@ +# Spellcheck + +We run spellchecks using +[`cargo-spellcheck`](https://crates.io/crates/cargo-spellcheck). + +It delegates to a program called +[`Hunspell`](https://github.com/hunspell/hunspell). + +Hunspell accepts uses `dictionary` files for words and `affix` files to define +acceptable modifications to those words. + +Note that cargo-spellcheck comes with +[default dictionary and affix files](https://github.com/drahnr/cargo-spellcheck/blob/dff48db8ca954fce14a0cd5aea127ce59a929624/src/checker/hunspell.rs#L32). +Our vendored `en_US.dic` is larger than theirs. + +## How it works + +See `forest.dic` in this directory: + +```dic +Filecoin/M +``` + +`Filecoin` is the word, and `/M` applies the `M` flag in the +[affix file](https://github.com/drahnr/cargo-spellcheck/blob/dff48db8ca954fce14a0cd5aea127ce59a929624/hunspell-data/en_US.aff#L103): + +```aff +SFX M 0 's . +``` + +In this case, `'s` and `s` are acceptable suffixes, so the following are +allowed: + +- `Filecoin` +- `Filecoins` +- `Filecoin's` + +As another example, take the following entry: + +```dic +syscall/S +``` + +Where the `S` flag is +[as follows](https://github.com/drahnr/cargo-spellcheck/blob/dff48db8ca954fce14a0cd5aea127ce59a929624/hunspell-data/en_US.aff#L91-L95): + + + +```c +// Define a suffix, called S, allow mixing prefixes and suffixes, with 4 rules. +SFX S Y 4 +// Remove a trailing `y`, replace it with `ies`, if the word ends in a `y` not preceded by a vowel. +SFX S y ies [^aeiou]y +// Don't remove any trailing characters, add an s, if the word ends in a `y` preceded by a vowel. +SFX S 0 s [aeiou]y +SFX S 0 es [sxzh] +SFX S 0 s [^sxzhy] +``` + +So the following would be allowed: + +- `syscall` +- `syscalls` + +Flags may be combined - you will often see `/SM`, for example. + +For more information see +[the `Hunspell` manual](https://manpages.ubuntu.com/manpages/bionic/man5/hunspell.5.html) + +## Tips + +- Include symbols in `backticks` - they won't have to be added to the dictionary diff --git a/CHANGELOG.md b/CHANGELOG.md index 27aa0d18f5fa..ec2da5506c08 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,12 @@ ## Forest unreleased +### Breaking + +- [#2873](https://github.com/ChainSafe/forest/issues/2873) + - remove `--compressed` from the CLI. Snapshots are now always compressed. + - Remove snapshot ops - snapshots fetched to the current directory by default. + ### Added - [#2706](https://github.com/ChainSafe/forest/issues/2706): implement diff --git a/Cargo.lock b/Cargo.lock index 5fc115014ccb..2e77b1f06a7c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -542,23 +542,6 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1181e1e0d1fce796a03db1ae795d67167da795f9cf4a39c37589e85ef57f26d3" -[[package]] -name = "attohttpc" -version = "0.19.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "262c3f7f5d61249d8c00e5546e2685cd15ebeeb1bc0f3cc5449350a1cb07319e" -dependencies = [ - "http", - "log", - "rustls 0.20.8", - "serde", - "serde_json", - "url", - "webpki 0.22.0", - "webpki-roots", - "wildmatch", -] - [[package]] name = "atty" version = "0.2.14" @@ -576,31 +559,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" -[[package]] -name = "aws-creds" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeeee1a5defa63cba39097a510dfe63ef53658fc8995202a610f6a8a4d03639" -dependencies = [ - "attohttpc", - "dirs", - "rust-ini", - "serde", - "serde-xml-rs", - "thiserror", - "time 0.3.20", - "url", -] - -[[package]] -name = "aws-region" -version = "0.25.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "056557a61427d0e5ba29dd931031c8ffed4ee7a550e7cd55692a9d8deb0a9dba" -dependencies = [ - "thiserror", -] - [[package]] name = "axum" version = "0.6.18" @@ -1464,26 +1422,6 @@ version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b62c6d3ea43cbe0bc5a081f276fd477e4291d168aacc9f9d98073325333c0d4" -[[package]] -name = "const_format" -version = "0.2.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c990efc7a285731f9a4378d81aff2f0e85a2c8781a05ef0f8baa8dac54d0ff48" -dependencies = [ - "const_format_proc_macros", -] - -[[package]] -name = "const_format_proc_macros" -version = "0.2.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e026b6ce194a874cb9cf32cd5772d1ef9767cc8fcb5765948d74f37a9d8b2bf6" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - [[package]] name = "constant_time_eq" version = "0.1.5" @@ -2196,15 +2134,6 @@ dependencies = [ "dirs-sys", ] -[[package]] -name = "dirs" -version = "4.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" -dependencies = [ - "dirs-sys", -] - [[package]] name = "dirs-next" version = "2.0.0" @@ -2248,12 +2177,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "dlv-list" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0688c2a7f92e427f44895cd63841bff7b29f8d7a1648b9e7e07a4a365b2e1257" - [[package]] name = "doc-comment" version = "0.3.3" @@ -3035,7 +2958,6 @@ dependencies = [ "serde", "serde_json", "serde_tuple", - "strfmt", "tempfile", "ticker", "tokio", @@ -3048,7 +2970,7 @@ version = "0.8.2" dependencies = [ "anyhow", "assert_cmd", - "atty", + "byte-unit", "cfg-if 1.0.0", "clap", "daemonize-me", @@ -3191,6 +3113,7 @@ dependencies = [ "log", "lru", "multihash 0.16.3", + "nonzero_ext", "num", "parking_lot 0.12.1", "serde", @@ -3233,6 +3156,7 @@ dependencies = [ "log", "lru", "nonempty", + "nonzero_ext", "num", "num-bigint", "parking_lot 0.12.1", @@ -3252,7 +3176,6 @@ dependencies = [ "ahash 0.8.3", "anyhow", "atty", - "axum", "byte-unit", "cfg-if 1.0.0", "chrono", @@ -3267,29 +3190,27 @@ dependencies = [ "forest_utils", "futures", "gethostname", - "hex", - "http", + "indicatif", "log", "mimalloc", + "nom", "num", "quickcheck", "quickcheck_macros", - "rand 0.8.5", - "regex", - "rust-s3", + "reqwest", "serde", "serde_with", - "sha2 0.10.6", - "tempfile", + "strum", + "tap", "tikv-jemallocator", "tokio", + "tokio-util", "toml 0.7.4", - "tower-http", + "tracing", "tracing-appender", "tracing-loki", "tracing-subscriber", "url", - "which", ] [[package]] @@ -3637,6 +3558,7 @@ dependencies = [ "lazy_static", "log", "lru", + "nonzero_ext", "num", "num-rational", "num-traits", @@ -3845,6 +3767,7 @@ dependencies = [ "fvm_shared 2.4.0", "lazy_static", "lru", + "nonzero_ext", "num", "num-traits", "once_cell", @@ -3936,7 +3859,6 @@ dependencies = [ "blake2b_simd", "chrono", "cid", - "const_format", "cs_serde_bytes", "digest 0.10.7", "filecoin-proofs-api", @@ -3949,7 +3871,8 @@ dependencies = [ "git-version", "human-repr", "hyper", - "hyper-rustls", + "hyper-rustls 0.23.2", + "indicatif", "libc", "log", "memory-stats", @@ -3966,6 +3889,7 @@ dependencies = [ "serde_json", "serde_yaml", "sha2 0.10.6", + "smart-default", "tempfile", "termios", "thiserror", @@ -4734,9 +4658,6 @@ name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" -dependencies = [ - "ahash 0.7.6", -] [[package]] name = "hashbrown" @@ -4890,12 +4811,6 @@ dependencies = [ "pin-project-lite 0.2.9", ] -[[package]] -name = "http-range-header" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" - [[package]] name = "httparse" version = "1.8.0" @@ -4956,7 +4871,20 @@ dependencies = [ "rustls 0.20.8", "rustls-native-certs", "tokio", - "tokio-rustls", + "tokio-rustls 0.23.4", +] + +[[package]] +name = "hyper-rustls" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0646026eb1b3eea4cd9ba47912ea5ce9cc07713d105b1a14698f4e6433d348b7" +dependencies = [ + "http", + "hyper", + "rustls 0.21.1", + "tokio", + "tokio-rustls 0.24.0", ] [[package]] @@ -5062,6 +4990,19 @@ dependencies = [ "serde", ] +[[package]] +name = "indicatif" +version = "0.17.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cef509aa9bc73864d6756f0d34d35504af3cf0844373afe9b8669a5b8005a729" +dependencies = [ + "console", + "number_prefix", + "portable-atomic 0.3.20", + "tokio", + "unicode-width", +] + [[package]] name = "inout" version = "0.1.3" @@ -6057,17 +5998,6 @@ dependencies = [ "rawpointer", ] -[[package]] -name = "maybe-async" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f1b8c13cb1f814b634a96b2c725449fe7ed464a7b8781de8688be5ffbd3f305" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "md-5" version = "0.10.5" @@ -6077,12 +6007,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "md5" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" - [[package]] name = "memchr" version = "2.5.0" @@ -6176,25 +6100,6 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" -[[package]] -name = "mime_guess" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" -dependencies = [ - "mime", - "unicase", -] - -[[package]] -name = "minidom" -version = "0.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e9ce45d459e358790a285e7609ff5ae4cfab88b75f237e8838e62029dda397b" -dependencies = [ - "rxml", -] - [[package]] name = "minimal-lexical" version = "0.2.1" @@ -6529,6 +6434,12 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aeaf4ad7403de93e699c191202f017118df734d3850b01e13a3a8b2e6953d3c9" +[[package]] +name = "nonzero_ext" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" + [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -6639,6 +6550,12 @@ dependencies = [ "libc", ] +[[package]] +name = "number_prefix" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" + [[package]] name = "object" version = "0.30.3" @@ -6687,16 +6604,6 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" -[[package]] -name = "ordered-multimap" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccd746e37177e1711c20dd619a1620f34f5c8b569c53590a72dedd5344d8924a" -dependencies = [ - "dlv-list", - "hashbrown 0.12.3", -] - [[package]] name = "output_vt100" version = "0.1.3" @@ -7067,6 +6974,21 @@ dependencies = [ "universal-hash 0.5.0", ] +[[package]] +name = "portable-atomic" +version = "0.3.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e30165d31df606f5726b090ec7592c308a0eaf61721ff64c9a3018e344a8753e" +dependencies = [ + "portable-atomic 1.3.2", +] + +[[package]] +name = "portable-atomic" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc59d1bcc64fc5d021d67521f818db868368028108d37f0e98d74e33f68297b5" + [[package]] name = "positioned-io" version = "0.3.1" @@ -7527,7 +7449,7 @@ dependencies = [ "cfg-if 0.1.10", "libc", "nix 0.26.2", - "rand 0.7.3", + "rand 0.8.5", "winapi", ] @@ -7674,9 +7596,9 @@ checksum = "e3a8614ee435691de62bcffcf4a66d91b3594bf1428a5722e79103249a095690" [[package]] name = "reqwest" -version = "0.11.16" +version = "0.11.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27b71749df584b7f4cac2c426c127a7c785a5106cc98f7a8feb044115f0fa254" +checksum = "cde824a14b7c14f85caff81225f411faacc04a2013f41670f41443742b1c1c55" dependencies = [ "base64 0.21.2", "bytes 1.4.0", @@ -7687,7 +7609,7 @@ dependencies = [ "http", "http-body", "hyper", - "hyper-rustls", + "hyper-rustls 0.24.0", "ipnet", "js-sys", "log", @@ -7695,17 +7617,19 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite 0.2.9", - "rustls 0.20.8", + "rustls 0.21.1", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.0", + "tokio-util", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", + "wasm-streams", "web-sys", "webpki-roots", "winreg", @@ -7806,45 +7730,6 @@ dependencies = [ "webrtc-util", ] -[[package]] -name = "rust-ini" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6d5f2436026b4f6e79dc829837d467cc7e9a55ee40e750d716713540715a2df" -dependencies = [ - "cfg-if 1.0.0", - "ordered-multimap", -] - -[[package]] -name = "rust-s3" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6009d9d4cf910505534d62d380a0aa305805a2af0b5c3ad59a3024a0715b847" -dependencies = [ - "async-trait", - "attohttpc", - "aws-creds", - "aws-region", - "base64 0.13.1", - "cfg-if 1.0.0", - "hex", - "hmac 0.12.1", - "http", - "log", - "maybe-async", - "md5", - "minidom", - "percent-encoding", - "serde", - "serde-xml-rs", - "serde_derive", - "sha2 0.10.6", - "thiserror", - "time 0.3.20", - "url", -] - [[package]] name = "rustc-demangle" version = "0.1.23" @@ -7928,6 +7813,18 @@ dependencies = [ "webpki 0.22.0", ] +[[package]] +name = "rustls" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c911ba11bc8433e811ce56fde130ccf32f5127cab0e0194e9c68c5a5b671791e" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct 0.7.0", +] + [[package]] name = "rustls-native-certs" version = "0.6.2" @@ -7949,6 +7846,16 @@ dependencies = [ "base64 0.21.2", ] +[[package]] +name = "rustls-webpki" +version = "0.100.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6207cd5ed3d8dca7816f8f3725513a34609c0c765bf652b8c3cb4cfd87db46b" +dependencies = [ + "ring", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.12" @@ -7989,25 +7896,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "rxml" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a071866b8c681dc2cfffa77184adc32b57b0caad4e620b6292609703bceb804" -dependencies = [ - "bytes 1.4.0", - "pin-project-lite 0.2.9", - "rxml_validation", - "smartstring", - "tokio", -] - -[[package]] -name = "rxml_validation" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53bc79743f9a66c2fb1f951cd83735f275d46bfe466259fbc5897bb60a0d00ee" - [[package]] name = "ryu" version = "1.0.13" @@ -8167,18 +8055,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde-xml-rs" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65162e9059be2f6a3421ebbb4fef3e74b7d9e7c60c50a0e292c6239f19f1edfa" -dependencies = [ - "log", - "serde", - "thiserror", - "xml-rs", -] - [[package]] name = "serde_bytes" version = "0.11.9" @@ -8546,12 +8422,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] -name = "smartstring" -version = "0.2.10" +name = "smart-default" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e714dff2b33f2321fdcd475b71cec79781a692d846f37f415fb395a1d2bcd48e" +checksum = "0eb01866308440fc64d6c44d9e86c5cc17adfe33c4d6eed55da9145044d0ffc1" dependencies = [ - "static_assertions", + "proc-macro2", + "quote", + "syn 2.0.18", ] [[package]] @@ -8786,18 +8664,21 @@ version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e08d8363704e6c71fc928674353e6b7c23dcea9d82d7012c8faf2a3a025f8d0" -[[package]] -name = "strfmt" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a8348af2d9fc3258c8733b8d9d8db2e56f54b2363a4b5b81585c7875ed65e65" - [[package]] name = "strsim" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros", +] + [[package]] name = "strum_macros" version = "0.24.3" @@ -9135,6 +9016,16 @@ dependencies = [ "webpki 0.22.0", ] +[[package]] +name = "tokio-rustls" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0d409377ff5b1e3ca6437aa86c1eb7d40c134bfec254e44c830defa92669db5" +dependencies = [ + "rustls 0.21.1", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.14" @@ -9264,31 +9155,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "tower-http" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d1d42a9b3f3ec46ba828e8d376aec14592ea199f70a06a548587ecd1c4ab658" -dependencies = [ - "bitflags", - "bytes 1.4.0", - "futures-core", - "futures-util", - "http", - "http-body", - "http-range-header", - "httpdate", - "mime", - "mime_guess", - "percent-encoding", - "pin-project-lite 0.2.9", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "tower-layer" version = "0.3.2" @@ -9526,15 +9392,6 @@ dependencies = [ "static_assertions", ] -[[package]] -name = "unicase" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" -dependencies = [ - "version_check", -] - [[package]] name = "unicode-bidi" version = "0.3.13" @@ -9836,6 +9693,19 @@ dependencies = [ "leb128", ] +[[package]] +name = "wasm-streams" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bbae3363c08332cadccd13b67db371814cd214c2524020932f0804b8cf7c078" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "wasm-timer" version = "0.2.5" @@ -10337,12 +10207,6 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983" -[[package]] -name = "wildmatch" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee583bdc5ff1cf9db20e9db5bb3ff4c3089a8f6b8b31aff265c9aba85812db86" - [[package]] name = "win-sys" version = "0.3.1" @@ -10668,12 +10532,6 @@ dependencies = [ "time 0.3.20", ] -[[package]] -name = "xml-rs" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52839dc911083a8ef63efa4d039d1f58b5e409f923e44c80828f206f66e5541c" - [[package]] name = "xsalsa20poly1305" version = "0.9.1" diff --git a/Cargo.toml b/Cargo.toml index 042230b6d9ad..ea64783f323b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -59,6 +59,7 @@ base64 = "0.21" blake2b_simd = "1.0" # blst-portable flag avoids using CPU instructions which aren't available on older CPUs bls-signatures = { version = "0.13", default-features = false, features = ["blst-portable"] } +byte-unit = "4.0" byteorder = "1.4.3" bytes = "1.2" cfg-if = "1" @@ -93,8 +94,10 @@ human-repr = "1.0" hyper = { version = "0.14", features = ["client", "stream", "http1"] } hyper-rustls = "0.23" indexmap = { version = "1.9", features = ["serde-1"] } +indicatif = { version = "0.17.3", features = ["tokio"] } jsonrpc-v2 = { version = "0.11", default-features = false, features = ["easy-errors", "macros", "bytes-v05"] } lazy_static = "1.4" +libc = "0.2" libipld = { version = "0.14", default-features = false, features = ["dag-cbor", "dag-json", "derive"] } libipld-core = "0.14" libipld-macro = "0.14" @@ -104,7 +107,9 @@ log = "0.4" lru = "0.9" multibase = "0.9" multihash = { version = "0.16", default-features = false } +nom = "7.1.3" nonempty = "0.8.0" +nonzero_ext = "0.3.0" num = "0.4.0" num-bigint = "0.4" num-derive = "0.3" @@ -115,13 +120,15 @@ once_cell = "1.15" parking_lot = "0.12" pbr = "1.1" pin-project-lite = "0.2" +pretty_assertions = "1.3.0" prometheus = "0.13" prometheus-client = "0.19" quickcheck = "1" quickcheck_macros = "1" rand = "0.8" rayon = "1.5" -regex = "1.8" +# use rustls instead of native (openSSL) tls to drop the number of build dependencies +reqwest = { version = "0.11.18", default-features = false, features = ["stream", "rustls-tls"] } serde = { version = "1.0", default-features = false } serde_ipld_dagcbor = "0.2" serde_json = "1.0" @@ -129,20 +136,22 @@ serde_tuple = "0.5" serde_with = { version = "2.0.1", features = ["chrono_0_4"] } serde_yaml = "0.9" sha2 = { version = "0.10.5", default-features = false } +smart-default = "0.7.1" +strum = { version = "0.24", features = ["derive"] } +tap = "1" tempfile = "3.4" thiserror = "1.0" tokio = "1.28" -tokio-stream = "0.1" -tokio-util = "0.7.0" +tokio-stream = { version = "0.1", features = ["fs", "io-util"] } +tokio-util = { version = "0.7.0", features = ["compat"] } toml = "0.7" -tower-http = "0.4" tracing = "0.1" tracing-appender = "0.2" tracing-loki = { version = "0.2", default-features = false, features = ["compat-0-2-1", "rustls"] } tracing-subscriber = { version = "0.3", features = ["env-filter"] } unsigned-varint = { version = "0.7", default-features = false } url = { version = "2.3", features = ["serde"] } -which = "4.3" +walkdir = "2" fil_actor_account_state = "5" fil_actor_init_state = "5" diff --git a/Makefile b/Makefile index 62cac6b31cfc..94374afabff2 100644 --- a/Makefile +++ b/Makefile @@ -82,7 +82,7 @@ audit: cargo audit --ignore RUSTSEC-2020-0071 spellcheck: - cargo spellcheck --code 1 + if ! cargo spellcheck --code 1; then echo "See .config/spellcheck.md for tips" && false; fi lint: license clean lint-clippy cargo fmt --all --check @@ -175,9 +175,10 @@ mdbook-build: # This isn't included by default, so we use a nightly toolchain, and the # (unstable) `--enable-index-page` option. # https://doc.rust-lang.org/nightly/rustdoc/unstable-features.html#--index-page-provide-a-top-level-landing-page-for-docs +# We document private items to ensure internal documentation is up-to-date (i.e passes lints) vendored-docs: rustup toolchain install $(VENDORED_DOCS_TOOLCHAIN) - RUSTDOCFLAGS="-Dwarnings -Zunstable-options --enable-index-page" \ + RUSTDOCFLAGS="--deny=warnings --allow=rustdoc::private-intra-doc-links --document-private-items -Zunstable-options --enable-index-page" \ cargo +$(VENDORED_DOCS_TOOLCHAIN) doc --workspace --no-deps .PHONY: clean clean-all lint lint-docker lint-clippy build release test test-all test-all-release test-release license test-vectors run-vectors pull-serialization-tests install-cli install-daemon install install-deps install-lint-tools docs run-serialization-vectors vendored-docs diff --git a/blockchain/chain/Cargo.toml b/blockchain/chain/Cargo.toml index 59b68361c40e..84be59e8a767 100644 --- a/blockchain/chain/Cargo.toml +++ b/blockchain/chain/Cargo.toml @@ -37,6 +37,7 @@ lazy_static.workspace = true libipld.workspace = true log.workspace = true lru.workspace = true +nonzero_ext.workspace = true num.workspace = true parking_lot.workspace = true serde = { workspace = true, features = ["derive"] } diff --git a/blockchain/chain/src/store/chain_store.rs b/blockchain/chain/src/store/chain_store.rs index d9a99dd33840..6b3b732a51c2 100644 --- a/blockchain/chain/src/store/chain_store.rs +++ b/blockchain/chain/src/store/chain_store.rs @@ -41,6 +41,7 @@ use fvm_ipld_encoding::{Cbor, CborStore}; use fvm_shared::clock::ChainEpoch; use log::{debug, info, trace, warn}; use lru::LruCache; +use nonzero_ext::nonzero; use parking_lot::Mutex; use serde::{de::DeserializeOwned, Serialize}; use tokio::sync::{ @@ -58,8 +59,7 @@ use crate::Scale; // A cap on the size of the future_sink const SINK_CAP: usize = 200; -const DEFAULT_TIPSET_CACHE_SIZE: NonZeroUsize = - forest_utils::const_option!(NonZeroUsize::new(8192)); +const DEFAULT_TIPSET_CACHE_SIZE: NonZeroUsize = nonzero!(8192usize); /// `Enum` for `pubsub` channel that defines message type variant and data /// contained in message type. diff --git a/blockchain/chain/src/store/index.rs b/blockchain/chain/src/store/index.rs index 6d56d0e05747..ef61e824e161 100644 --- a/blockchain/chain/src/store/index.rs +++ b/blockchain/chain/src/store/index.rs @@ -10,12 +10,12 @@ use fvm_ipld_blockstore::Blockstore; use fvm_shared::clock::ChainEpoch; use log::info; use lru::LruCache; +use nonzero_ext::nonzero; use parking_lot::Mutex; use crate::{tipset_from_keys, Error, TipsetCache}; -const DEFAULT_CHAIN_INDEX_CACHE_SIZE: NonZeroUsize = - forest_utils::const_option!(NonZeroUsize::new(32 << 10)); +const DEFAULT_CHAIN_INDEX_CACHE_SIZE: NonZeroUsize = nonzero!(32usize << 10); /// Configuration which sets the length of tipsets to skip in between each /// cached entry. diff --git a/blockchain/chain_sync/Cargo.toml b/blockchain/chain_sync/Cargo.toml index 88d25b2dbc3c..feef9349c3e2 100644 --- a/blockchain/chain_sync/Cargo.toml +++ b/blockchain/chain_sync/Cargo.toml @@ -33,6 +33,7 @@ lazy_static.workspace = true log.workspace = true lru.workspace = true nonempty.workspace = true +nonzero_ext.workspace = true num-bigint.workspace = true num.workspace = true parking_lot.workspace = true diff --git a/blockchain/chain_sync/src/bad_block_cache.rs b/blockchain/chain_sync/src/bad_block_cache.rs index 76ce9892caf0..a49c35e47737 100644 --- a/blockchain/chain_sync/src/bad_block_cache.rs +++ b/blockchain/chain_sync/src/bad_block_cache.rs @@ -5,6 +5,7 @@ use std::num::NonZeroUsize; use cid::Cid; use lru::LruCache; +use nonzero_ext::nonzero; use parking_lot::Mutex; /// Thread-safe cache for tracking bad blocks. @@ -17,7 +18,7 @@ pub struct BadBlockCache { impl Default for BadBlockCache { fn default() -> Self { - Self::new(forest_utils::const_option!(NonZeroUsize::new(1 << 15))) + Self::new(nonzero!(1usize << 15)) } } diff --git a/blockchain/chain_sync/src/network_context.rs b/blockchain/chain_sync/src/network_context.rs index f5a4220f2d3c..f4b547d8be76 100644 --- a/blockchain/chain_sync/src/network_context.rs +++ b/blockchain/chain_sync/src/network_context.rs @@ -40,7 +40,7 @@ const CHAIN_EXCHANGE_TIMEOUT: Duration = Duration::from_secs(5); const MAX_CONCURRENT_CHAIN_EXCHANGE_REQUESTS: usize = 2; /// Context used in chain sync to handle network requests. -/// This contains the peer manager, P2P service interface, and [`BlockStore`] +/// This contains the peer manager, P2P service interface, and [`Blockstore`] /// required to make network requests. pub(crate) struct SyncNetworkContext { /// Channel to send network messages through P2P service diff --git a/blockchain/consensus/fil_cns/src/validation.rs b/blockchain/consensus/fil_cns/src/validation.rs index 3d193c25dc76..181c0eb3c941 100644 --- a/blockchain/consensus/fil_cns/src/validation.rs +++ b/blockchain/consensus/fil_cns/src/validation.rs @@ -34,7 +34,7 @@ fn to_errs>(e: E) -> NonEmpty /// Returns all encountered errors, so they can be merged with the common /// validations performed by the synchronizer. /// diff --git a/blockchain/message_pool/Cargo.toml b/blockchain/message_pool/Cargo.toml index 2cd9c25b5642..e54fc0d5df8b 100644 --- a/blockchain/message_pool/Cargo.toml +++ b/blockchain/message_pool/Cargo.toml @@ -30,6 +30,7 @@ fvm_shared3 = { workspace = true, default-features = false } lazy_static.workspace = true log.workspace = true lru.workspace = true +nonzero_ext.workspace = true num-rational.workspace = true num-traits.workspace = true num.workspace = true diff --git a/blockchain/message_pool/src/msgpool/msg_pool.rs b/blockchain/message_pool/src/msgpool/msg_pool.rs index e0f5dae59998..b4154f796c0a 100644 --- a/blockchain/message_pool/src/msgpool/msg_pool.rs +++ b/blockchain/message_pool/src/msgpool/msg_pool.rs @@ -24,11 +24,11 @@ use forest_shim::{ gas::{price_list_by_network_version, Gas}, }; use forest_state_manager::is_valid_for_sending; -use forest_utils::const_option; use futures::StreamExt; use fvm_ipld_encoding::Cbor; use log::warn; use lru::LruCache; +use nonzero_ext::nonzero; use num::BigInt; use parking_lot::{Mutex, RwLock as SyncRwLock}; use tokio::{sync::broadcast::error::RecvError, task::JoinSet, time::interval}; @@ -46,8 +46,8 @@ use crate::{ }; // LruCache sizes have been taken from the lotus implementation -const BLS_SIG_CACHE_SIZE: NonZeroUsize = const_option!(NonZeroUsize::new(40000)); -const SIG_VAL_CACHE_SIZE: NonZeroUsize = const_option!(NonZeroUsize::new(32000)); +const BLS_SIG_CACHE_SIZE: NonZeroUsize = nonzero!(40000usize); +const SIG_VAL_CACHE_SIZE: NonZeroUsize = nonzero!(32000usize); /// Simple structure that contains a hash-map of messages where k: a message /// from address, v: a message which corresponds to that address. diff --git a/blockchain/state_manager/Cargo.toml b/blockchain/state_manager/Cargo.toml index fe4707fbf802..08c248eb10d4 100644 --- a/blockchain/state_manager/Cargo.toml +++ b/blockchain/state_manager/Cargo.toml @@ -35,6 +35,7 @@ fvm_ipld_encoding.workspace = true fvm_shared = { workspace = true } lazy_static.workspace = true lru.workspace = true +nonzero_ext.workspace = true num-traits.workspace = true num.workspace = true once_cell.workspace = true diff --git a/blockchain/state_manager/src/lib.rs b/blockchain/state_manager/src/lib.rs index db17b30fffda..c55f78f49d05 100644 --- a/blockchain/state_manager/src/lib.rs +++ b/blockchain/state_manager/src/lib.rs @@ -40,6 +40,7 @@ use fvm_ipld_encoding::Cbor; use fvm_ipld_encoding::CborStore; use fvm_shared::clock::ChainEpoch; use lru::LruCache; +use nonzero_ext::nonzero; use num::BigInt; use num_traits::identities::Zero; use once_cell::unsync::Lazy; @@ -51,8 +52,7 @@ use vm_circ_supply::GenesisInfo; pub use self::errors::*; -const DEFAULT_TIPSET_CACHE_SIZE: NonZeroUsize = - forest_utils::const_option!(NonZeroUsize::new(1024)); +const DEFAULT_TIPSET_CACHE_SIZE: NonZeroUsize = nonzero!(1024usize); /// Intermediary for retrieving state objects and updating actor states. type CidPair = (Cid, Cid); diff --git a/forest/cli/Cargo.toml b/forest/cli/Cargo.toml index 23a20c9d15f8..99297828cae2 100644 --- a/forest/cli/Cargo.toml +++ b/forest/cli/Cargo.toml @@ -48,13 +48,12 @@ human-repr.workspace = true jsonrpc-v2.workspace = true log.workspace = true multibase.workspace = true -nom = "7.1.3" +nom.workspace = true num.workspace = true rustyline = "10.1.1" serde = { workspace = true, features = ["derive"] } serde_json.workspace = true serde_tuple.workspace = true -strfmt = "0.2.2" tempfile.workspace = true ticker = "0.1" tokio = { workspace = true, features = ["sync"] } @@ -62,7 +61,7 @@ toml.workspace = true [dev-dependencies] assert_cmd.workspace = true -pretty_assertions = "1.3.0" +pretty_assertions.workspace = true quickcheck.workspace = true rand.workspace = true diff --git a/forest/cli/src/cli/snapshot_cmd.rs b/forest/cli/src/cli/snapshot_cmd.rs index ffb104116274..42a3164faf91 100644 --- a/forest/cli/src/cli/snapshot_cmd.rs +++ b/forest/cli/src/cli/snapshot_cmd.rs @@ -1,147 +1,53 @@ // Copyright 2019-2023 ChainSafe Systems // SPDX-License-Identifier: Apache-2.0, MIT -use std::{fs, path::PathBuf, sync::Arc, time::Duration}; +use std::{path::PathBuf, sync::Arc}; use anyhow::bail; -use chrono::{Datelike, Utc}; +use chrono::Utc; use clap::Subcommand; use dialoguer::{theme::ColorfulTheme, Confirm}; use forest_blocks::{tipset_keys_json::TipsetKeysJson, Tipset, TipsetKeys}; use forest_chain::ChainStore; -use forest_cli_shared::cli::{ - default_snapshot_dir, is_car_or_zst_or_tmp, snapshot_fetch, SnapshotServer, SnapshotStore, -}; +use forest_cli_shared::snapshot; use forest_db::db_engine::{db_root, open_proxy_db}; use forest_genesis::{forest_load_car, read_genesis_header}; use forest_ipld::{recurse_links_hash, CidHashSet}; use forest_networks::NetworkChain; use forest_rpc_api::{chain_api::ChainExportParams, progress_api::GetProgressType}; use forest_rpc_client::{chain_ops::*, progress_ops::get_progress}; -use forest_utils::{ - io::{parser::parse_duration, ProgressBar}, - net::get_fetch_progress_from_file, - retry, -}; +use forest_utils::{io::ProgressBar, net::get_fetch_progress_from_file}; use fvm_shared::clock::ChainEpoch; -use log::info; -use strfmt::strfmt; use tempfile::TempDir; -use tokio::time::sleep; use super::*; use crate::cli::{cli_error_and_die, handle_rpc_err}; -pub(crate) const OUTPUT_PATH_DEFAULT_FORMAT: &str = - "forest_snapshot_{chain}_{year}-{month}-{day}_height_{height}.car"; - -pub(crate) const OUTPUT_PATH_DEFAULT_COMPRESSED_FORMAT: &str = - "forest_snapshot_{chain}_{year}-{month}-{day}_height_{height}.car.zst"; - #[derive(Debug, Subcommand)] pub enum SnapshotCommands { /// Export a snapshot of the chain to `` Export { - /// Snapshot output path. Default to - /// `forest_snapshot_{chain}_{year}-{month}-{day}_height_{height}.car(. - /// zst)` Date is in ISO 8601 date format. - /// Arguments: - /// - chain - chain name e.g. `mainnet` - /// - year - /// - month - /// - day - /// - height - the epoch + /// Snapshot output filename or directory. Defaults to + /// `./forest_snapshot_{chain}_{year}-{month}-{day}_height_{epoch}.car.zst`. #[arg(short, default_value = ".", verbatim_doc_comment)] output_path: PathBuf, - /// Export in zstd compressed format - #[arg(long)] - compressed: bool, - /// Skip creating the checksum file. Only valid when `--compressed` is - /// not supplied. + /// Skip creating the checksum file. #[arg(long)] skip_checksum: bool, - /// Skip writing to the snapshot `.car` file specified by - /// `--output-path`. + /// Don't write the archive. #[arg(long)] dry_run: bool, }, /// Fetches the most recent snapshot from a trusted, pre-defined location. Fetch { - /// Directory to which the snapshot should be downloaded. If not - /// provided, it will be saved in default Forest data location. - #[arg(short, long)] - snapshot_dir: Option, - /// Snapshot trusted source - #[arg(short, long, value_enum)] - provider: Option, - /// Download zstd compressed snapshot, only supported by the Filecoin - /// provider for now. default is false. - #[arg(long)] - compressed: bool, - /// Use [`aria2`](https://aria2.github.io/) for downloading, default is false. Requires `aria2c` in PATH. - #[arg(long)] - aria2: bool, - /// Maximum number of times to retry the fetch - #[arg(short, long, default_value = "3")] - max_retries: i32, - /// Duration to wait between the retries in seconds - #[arg(short, long, default_value = "60", value_parser = parse_duration)] - delay: Duration, - }, - - /// Shows default snapshot dir - Dir, - - /// List local snapshots - List { - /// Directory to which the snapshots are downloaded. If not provided, it - /// will be the default Forest data location. - #[arg(short, long)] - snapshot_dir: Option, - }, - - /// Remove local snapshot - Remove { - /// Snapshot filename to remove - filename: PathBuf, - - /// Directory to which the snapshots are downloaded. If not provided, it - /// will be the default Forest data location. - #[arg(short, long)] - snapshot_dir: Option, - - /// Answer yes to all forest-cli yes/no questions without prompting - #[arg(long)] - force: bool, - }, - - /// Prune local snapshot, keeps the latest only. - /// Note that file names that do not match - /// forest_snapshot_{chain}_{year}-{month}-{day}_height_{height}.car - /// pattern will be ignored - Prune { - /// Directory to which the snapshots are downloaded. If not provided, it - /// will be the default Forest data location. - #[arg(short, long)] - snapshot_dir: Option, - - /// Answer yes to all forest-cli yes/no questions without prompting - #[arg(long)] - force: bool, + #[arg(short, long, default_value = ".")] + directory: PathBuf, + /// Vendor to fetch the snapshot from + #[arg(short, long, value_enum, default_value_t = snapshot::Vendor::default())] + vendor: snapshot::Vendor, }, - /// Clean all local snapshots, use with care. - Clean { - /// Directory to which the snapshots are downloaded. If not provided, it - /// will be the default Forest data location. - #[arg(short, long)] - snapshot_dir: Option, - - /// Answer yes to all forest-cli yes/no questions without prompting - #[arg(long)] - force: bool, - }, /// Validates the snapshot. Validate { /// Number of block headers to validate from the tip @@ -160,7 +66,6 @@ impl SnapshotCommands { match self { Self::Export { output_path, - compressed, skip_checksum, dry_run, } => { @@ -171,39 +76,17 @@ impl SnapshotCommands { let epoch = chain_head.epoch(); - let now = Utc::now(); - - let month_string = format!("{:02}", now.month() as u8); - let year = now.year(); - let day_string = format!("{:02}", now.day()); let chain_name = chain_get_name((), &config.client.rpc_token) .await .map_err(handle_rpc_err)?; - #[allow(clippy::disallowed_types)] - let vars = std::collections::HashMap::from([ - ("year".to_string(), year.to_string()), - ("month".to_string(), month_string), - ("day".to_string(), day_string), - ("chain".to_string(), chain_name), - ("height".to_string(), epoch.to_string()), - ]); - - let output_path = if output_path.is_dir() { - output_path.join(if *compressed { - OUTPUT_PATH_DEFAULT_COMPRESSED_FORMAT - } else { - OUTPUT_PATH_DEFAULT_FORMAT - }) - } else { - output_path.clone() - }; - - let output_path = match strfmt(&output_path.display().to_string(), &vars) { - Ok(path) => path.into(), - Err(e) => { - cli_error_and_die(format!("Unparsable string error: {e}"), 1); - } + let output_path = match output_path.is_dir() { + true => output_path.join(snapshot::filename( + chain_name, + Utc::now().date_naive(), + chain_head.epoch(), + )), + false => output_path.clone(), }; let params = ChainExportParams { @@ -211,7 +94,6 @@ impl SnapshotCommands { recent_roots: config.chain.recent_state_roots, output_path, tipset_keys: TipsetKeysJson(chain_head.key().clone()), - compressed: *compressed, skip_checksum: *skip_checksum, dry_run: *dry_run, }; @@ -252,59 +134,15 @@ impl SnapshotCommands { )); Ok(()) } - Self::Fetch { - snapshot_dir, - provider, - compressed: use_compressed, - aria2: use_aria2, - max_retries, - delay, - } => { - let snapshot_dir = snapshot_dir - .clone() - .unwrap_or_else(|| default_snapshot_dir(&config)); - match retry!( - snapshot_fetch, - *max_retries, - *delay, - &snapshot_dir, - &config, - provider, - *use_compressed, - *use_aria2 - ) { + Self::Fetch { directory, vendor } => { + match snapshot::fetch(directory, &config.chain.network, *vendor).await { Ok(out) => { - println!("Snapshot successfully downloaded at {}", out.display()); + println!("{}", out.display()); Ok(()) } Err(e) => cli_error_and_die(format!("Failed fetching the snapshot: {e}"), 1), } } - Self::Dir => { - let dir = default_snapshot_dir(&config); - println!("{}", dir.display()); - Ok(()) - } - Self::List { snapshot_dir } => list(&config, snapshot_dir), - Self::Remove { - filename, - snapshot_dir, - force, - } => { - remove(&config, filename, snapshot_dir, *force); - Ok(()) - } - Self::Prune { - snapshot_dir, - force, - } => { - prune(&config, snapshot_dir, *force); - Ok(()) - } - Self::Clean { - snapshot_dir, - force, - } => clean(&config, snapshot_dir, *force), Self::Validate { recent_stateroots, snapshot, @@ -314,110 +152,6 @@ impl SnapshotCommands { } } -fn list(config: &Config, snapshot_dir: &Option) -> anyhow::Result<()> { - let snapshot_dir = snapshot_dir - .clone() - .unwrap_or_else(|| default_snapshot_dir(config)); - println!("Snapshot dir: {}", snapshot_dir.display()); - let store = SnapshotStore::new(config, &snapshot_dir); - if store.snapshots.is_empty() { - println!("No local snapshots"); - } else { - println!("Local snapshots:"); - store.display(); - } - Ok(()) -} - -fn remove(config: &Config, filename: &PathBuf, snapshot_dir: &Option, force: bool) { - let snapshot_dir = snapshot_dir - .clone() - .unwrap_or_else(|| default_snapshot_dir(config)); - let snapshot_path = snapshot_dir.join(filename); - if snapshot_path.exists() && snapshot_path.is_file() && is_car_or_zst_or_tmp(&snapshot_path) { - println!("Deleting {}", snapshot_path.display()); - if !force && !prompt_confirm() { - println!("Aborted."); - return; - } - - delete_snapshot(&snapshot_path); - } else { - println!( - "{} is not a valid snapshot file path, to list all snapshots, run forest-cli snapshot list", - snapshot_path.display()); - } -} - -fn prune(config: &Config, snapshot_dir: &Option, force: bool) { - let snapshot_dir = snapshot_dir - .clone() - .unwrap_or_else(|| default_snapshot_dir(config)); - println!("Snapshot dir: {}", snapshot_dir.display()); - let mut store = SnapshotStore::new(config, &snapshot_dir); - if store.snapshots.len() < 2 { - println!("No files to delete"); - return; - } - store.snapshots.sort_by_key(|s| (s.date, s.height)); - store.snapshots.pop(); // Keep the latest snapshot - - println!("Files to delete:"); - store.display(); - - if !force && !prompt_confirm() { - println!("Aborted."); - } else { - for snapshot_path in store.snapshots { - delete_snapshot(&snapshot_path.path); - } - } -} - -fn clean(config: &Config, snapshot_dir: &Option, force: bool) -> anyhow::Result<()> { - let snapshot_dir = snapshot_dir - .clone() - .unwrap_or_else(|| default_snapshot_dir(config)); - println!("Snapshot dir: {}", snapshot_dir.display()); - - let read_dir = match fs::read_dir(snapshot_dir) { - Ok(read_dir) => read_dir, - // basically have the same behavior as in `rm -f` which doesn't fail if the target - // directory doesn't exist. - Err(_) if force => { - println!("Target directory not accessible. Skipping."); - return Ok(()); - } - Err(e) => bail!(e), - }; - - let snapshots_to_delete: Vec<_> = read_dir - .flatten() - .map(|entry| entry.path()) - .filter(|p| is_car_or_zst_or_tmp(p)) - .collect(); - - if snapshots_to_delete.is_empty() { - println!("No files to delete"); - return Ok(()); - } - println!("Files to delete:"); - snapshots_to_delete - .iter() - .for_each(|f| println!("{}", f.display())); - - if !force && !prompt_confirm() { - println!("Aborted."); - return Ok(()); - } - - for snapshot_path in snapshots_to_delete { - delete_snapshot(&snapshot_path); - } - - Ok(()) -} - async fn validate( config: &Config, recent_stateroots: &i64, @@ -551,16 +285,3 @@ where Ok(()) } - -fn delete_snapshot(snapshot_path: &PathBuf) { - let checksum_path = snapshot_path.with_extension("sha256sum"); - for path in [snapshot_path, &checksum_path] { - if path.exists() { - if let Err(err) = fs::remove_file(path) { - println!("Failed to delete {}\n{err}", path.display()); - } else { - println!("Deleted {}", path.display()); - } - } - } -} diff --git a/forest/cli/tests/config_tests.rs b/forest/cli/tests/config.rs similarity index 100% rename from forest/cli/tests/config_tests.rs rename to forest/cli/tests/config.rs diff --git a/forest/cli/tests/snapshot_tests.rs b/forest/cli/tests/snapshot_tests.rs deleted file mode 100644 index b1a237cd45bb..000000000000 --- a/forest/cli/tests/snapshot_tests.rs +++ /dev/null @@ -1,335 +0,0 @@ -// Copyright 2019-2023 ChainSafe Systems -// SPDX-License-Identifier: Apache-2.0, MIT - -use std::fs; - -use anyhow::{ensure, Result}; -use assert_cmd::Command; -use tempfile::TempDir; - -#[test] -fn test_snapshot_subcommand_dir() -> Result<()> { - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("dir") - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)? - // Normalize path for windows - .replace('\\', "/"); - ensure!(output.contains("/snapshots/calibnet"), output); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_list() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let filenames = [ - "forest_snapshot_calibnet_2022-11-22_height_1.car", - "forest_snapshot_calibnet_2022-11-22_height_2.car", - ]; - setup_data_dir(&tmp_dir, filenames.as_slice())?; - - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("list") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - for filename in filenames { - ensure!(output.contains(filename), output); - } - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_list_invalid_dir() -> Result<()> { - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("list") - .arg("--snapshot-dir") - .arg("/this/is/dummy/path") - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.trim_end(); - ensure!(output.ends_with("No local snapshots")); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_remove_invalid() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let filenames = ["snapshot1.car", "snapshot2.car"]; - setup_data_dir(&tmp_dir, filenames.as_slice())?; - - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("remove") - .arg("dummy.car") - .arg("--force") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - ensure!(output.contains("is not a valid snapshot file path"), output); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_remove_success() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let filenames = ["snapshot1.car", "snapshot2.car"]; - setup_data_dir(&tmp_dir, filenames.as_slice())?; - - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("remove") - .arg(filenames[0]) - .arg("--force") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - ensure!(output.contains(filenames[0]), output); - ensure!( - output.contains(&filenames[0].replace(".car", ".sha256sum")), - output - ); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_prune_empty() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("prune") - .arg("--force") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - ensure!(output.contains("No files to delete"), output); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_prune_single() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let filenames = ["forest_snapshot_calibnet_2022-09-28_height_1342143.car"]; - setup_data_dir(&tmp_dir, filenames.as_slice())?; - - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("prune") - .arg("--force") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - ensure!(output.contains("No files to delete"), output); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_prune_single_with_custom() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let filenames = [ - "forest_snapshot_calibnet_2022-09-28_height_1342143.car", - "custom.car", - ]; - setup_data_dir(&tmp_dir, filenames.as_slice())?; - - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("prune") - .arg("--force") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - ensure!(output.contains("No files to delete"), output); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_prune_double() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let filenames = [ - "forest_snapshot_calibnet_2022-10-10_height_1376736.car", - "forest_snapshot_calibnet_2022-09-28_height_1342143.car", - "custom.car", - ]; - setup_data_dir(&tmp_dir, filenames.as_slice())?; - - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("prune") - .arg("--force") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - ensure!( - output.contains("forest_snapshot_calibnet_2022-09-28_height_1342143.car"), - output - ); - ensure!( - output.contains("forest_snapshot_calibnet_2022-09-28_height_1342143.sha256sum"), - output - ); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_clean_empty() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("clean") - .arg("--force") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - ensure!(output.contains("No files to delete"), output); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_clean_snapshot_dir_not_accessible() -> Result<()> { - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("clean") - .arg("--force") - .arg("--snapshot-dir") - .arg("/turbo-cthulhu") - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - ensure!( - output.contains("Target directory not accessible. Skipping."), - output - ); - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_clean_one() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let filenames = ["forest_snapshot_calibnet_2022-10-10_height_1376736.car"]; - setup_data_dir(&tmp_dir, filenames.as_slice())?; - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("clean") - .arg("--force") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - for filename in filenames { - ensure!(output.contains(filename), output); - } - - Ok(()) -} - -#[test] -fn test_snapshot_subcommand_clean_more() -> Result<()> { - let tmp_dir = TempDir::new().unwrap(); - let filenames = [ - "forest_snapshot_calibnet_2022-10-10_height_1376736.car", - "forest_snapshot_calibnet_2022-09-28_height_1342143.car", - "custom.car", - ]; - setup_data_dir(&tmp_dir, filenames.as_slice())?; - let cmd = cli()? - .arg("--chain") - .arg("calibnet") - .arg("snapshot") - .arg("clean") - .arg("--force") - .arg("--snapshot-dir") - .arg(tmp_dir.path().as_os_str().to_str().unwrap_or_default()) - .assert() - .success(); - - let output = std::str::from_utf8(&cmd.get_output().stdout)?.to_owned(); - for filename in filenames { - ensure!(output.contains(filename), output); - } - - Ok(()) -} - -fn cli() -> Result { - Ok(Command::cargo_bin("forest-cli")?) -} - -fn setup_data_dir(tmp_dir: &TempDir, filenames: &[&str]) -> Result<()> { - for filename in filenames { - let mut path = tmp_dir.path().to_path_buf(); - path.push(filename); - fs::write(&path, "dummy")?; - path.set_extension("sha256sum"); - fs::write(&path, "dummy")?; - } - Ok(()) -} diff --git a/forest/daemon/Cargo.toml b/forest/daemon/Cargo.toml index f6ad6ad92f8a..661e3e0f3ba9 100644 --- a/forest/daemon/Cargo.toml +++ b/forest/daemon/Cargo.toml @@ -13,7 +13,7 @@ path = "src/main.rs" [dependencies] anyhow.workspace = true -atty.workspace = true +byte-unit.workspace = true cfg-if.workspace = true clap.workspace = true daemonize-me = "2.0" diff --git a/forest/daemon/src/daemon.rs b/forest/daemon/src/daemon.rs index c66988feb79c..8d8c80605e8c 100644 --- a/forest/daemon/src/daemon.rs +++ b/forest/daemon/src/daemon.rs @@ -1,20 +1,25 @@ // Copyright 2019-2023 ChainSafe Systems // SPDX-License-Identifier: Apache-2.0, MIT -use std::{net::TcpListener, path::PathBuf, sync::Arc, time, time::Duration}; +use std::{ + cell::RefCell, + net::TcpListener, + path::{Path, PathBuf}, + sync::Arc, + time, + time::Duration, +}; -use anyhow::Context; -use dialoguer::{theme::ColorfulTheme, Confirm, Password}; +use anyhow::{bail, Context}; +use dialoguer::{console::Term, theme::ColorfulTheme}; use forest_auth::{create_token, generate_priv_key, ADMIN, JWT_IDENTIFIER}; use forest_blocks::Tipset; use forest_chain::ChainStore; use forest_chain_sync::{consensus::SyncGossipSubmitter, ChainMuxer}; use forest_cli_shared::{ chain_path, - cli::{ - default_snapshot_dir, is_aria2_installed, snapshot_fetch, snapshot_fetch_size, - to_size_string, CliOpts, Client, Config, SnapshotServer, - }, + cli::{CliOpts, Config}, + snapshot, }; use forest_daemon::bundle::load_bundles; use forest_db::{ @@ -32,14 +37,15 @@ use forest_libp2p::{get_keypair, Libp2pConfig, Libp2pService, PeerId, PeerManage use forest_message_pool::{MessagePool, MpoolConfig, MpoolRpcProvider}; use forest_rpc::start_rpc; use forest_rpc_api::data_types::RPCState; -use forest_shim::version::NetworkVersion; +use forest_shim::{clock::ChainEpoch, version::NetworkVersion}; use forest_state_manager::StateManager; use forest_utils::{ io::write_to_file, monitoring::MemStatsTracker, proofs_api::paramfetch::ensure_params_downloaded, retry, version::FOREST_VERSION_STRING, + RetryArgs, }; -use futures::{select, FutureExt}; -use log::{debug, error, info, warn}; +use futures::{select, Future, FutureExt}; +use log::{debug, info, warn}; use raw_sync::events::{Event, EventInit, EventState}; use tokio::{ signal::{ @@ -48,7 +54,6 @@ use tokio::{ }, sync::{mpsc, RwLock}, task::JoinSet, - time::sleep, }; // Initialize Consensus @@ -141,7 +146,7 @@ pub(super) async fn start( // from. info!("PeerId: {}", PeerId::from(net_keypair.public())); - let mut keystore = create_keystore(&config).await?; + let mut keystore = load_or_create_keystore(&config).await?; if keystore.get(JWT_IDENTIFIER).is_err() { keystore.put(JWT_IDENTIFIER.to_owned(), generate_priv_key())?; @@ -272,17 +277,7 @@ pub(super) async fn start( return Ok(()); } - // XXX: This code has to be run before starting the background services. - // If it isn't, several threads will be competing for access to stdout. - // Terminate if no snapshot is provided or DB isn't recent enough - let epoch = chain_store.heaviest_tipset().epoch(); - let nv = config.chain.network_version(epoch); - let should_fetch_snapshot = if nv < NetworkVersion::V16 { - prompt_snapshot_or_die(opts.auto_download_snapshot, &config).await? - } else { - false - }; load_bundles(epoch, &config, db.clone()).await?; @@ -398,7 +393,8 @@ pub(super) async fn start( ); } - let config = maybe_fetch_snapshot(should_fetch_snapshot, config).await?; + let mut config = config; + fetch_snapshot_if_required(&mut config, epoch, opts.auto_download_snapshot).await?; if let Some(path) = &config.client.snapshot_path { let stopwatch = time::Instant::now(); @@ -436,8 +432,90 @@ pub(super) async fn start( services.spawn(p2p_service.run()); // blocking until any of the services returns an error, - let err = propagate_error(&mut services).await; - anyhow::bail!("services failure: {}", err); + propagate_error(&mut services) + .await + .context("services failure") + .map(|_| {}) +} + +/// If our current chain is below a supported height, we need a snapshot to bring it up +/// to a supported height. If we've not been given a snapshot by the user, get one. +/// +/// An [`Err`] should be considered fatal. +async fn fetch_snapshot_if_required( + config: &mut Config, + epoch: ChainEpoch, + auto_download_snapshot: bool, +) -> anyhow::Result<()> { + let vendor = snapshot::Vendor::default(); + let path = Path::new("."); + let chain = &config.chain.network; + + // What height is our chain at right now, and what network version does that correspond to? + let network_version = config.chain.network_version(epoch); + let network_version_is_small = network_version < NetworkVersion::V16; + + // We don't support small network versions (we can't validate from e.g genesis). + // So we need a snapshot (which will be from a recent network version) + let require_a_snapshot = network_version_is_small; + let have_a_snapshot = config.client.snapshot_path.is_some(); + + match (require_a_snapshot, have_a_snapshot, auto_download_snapshot) { + (false, _, _) => Ok(()), // noop - don't need a snapshot + (true, true, _) => Ok(()), // noop - we need a snapshot, and we have one + (true, false, true) => { + // we need a snapshot, don't have one, and have permission to download one, so do that + let max_retries = 3; + match retry( + RetryArgs { + timeout: None, + max_retries: Some(max_retries), + delay: Some(Duration::from_secs(60)), + }, + || forest_cli_shared::snapshot::fetch(path, chain, vendor), + ) + .await + { + Ok(path) => { + config.client.snapshot_path = Some(path); + config.client.snapshot = true; + Ok(()) + } + Err(_) => bail!("failed to fetch snapshot after {max_retries} attempts"), + } + } + (true, false, false) => { + // we need a snapshot, don't have one, and don't have permission to download one, so ask the user + let (num_bytes, _url) = + forest_cli_shared::snapshot::peek(vendor, &config.chain.network) + .await + .context("couldn't get snapshot size")?; + let num_bytes = byte_unit::Byte::from(num_bytes) + .get_appropriate_unit(true) + .format(2); + let message = format!("Forest requires a snapshot to sync with the network, but automatic fetching is disabled. Fetch a {num_bytes} snapshot to the current directory? (denying will exit the program). "); + let have_permission = asyncify(|| { + dialoguer::Confirm::with_theme(&ColorfulTheme::default()) + .with_prompt(message) + .default(false) + .interact() + // e.g not a tty (or some other error), so haven't got permission. + .unwrap_or(false) + }) + .await; + if !have_permission { + bail!("Forest requires a snapshot to sync with the network, but automatic fetching is disabled.") + } + match forest_cli_shared::snapshot::fetch(path, chain, vendor).await { + Ok(path) => { + config.client.snapshot_path = Some(path); + config.client.snapshot = true; + Ok(()) + } + Err(e) => Err(e).context("downloading required snapshot failed"), + } + } + } } /// Generates, prints and optionally writes to a file the administrator JWT @@ -454,90 +532,21 @@ fn handle_admin_token(opts: &CliOpts, config: &Config, keystore: &KeyStore) -> a Ok(()) } -// returns the first error with which any of the services end -// in case all services finished without an error sleeps for more than 2 years -// and then returns with an error -async fn propagate_error(services: &mut JoinSet>) -> anyhow::Error { +/// returns the first error with which any of the services end, or never returns at all +// This should return anyhow::Result once the `Never` type is stabilized +async fn propagate_error( + services: &mut JoinSet>, +) -> anyhow::Result { while !services.is_empty() { select! { option = services.join_next().fuse() => { if let Some(Ok(Err(error_message))) = option { - return error_message + return Err(error_message) } }, } } - // In case all services are down without errors we are still willing - // to wait indefinitely for CTRL-C signal. As `tokio::time::sleep` has - // a limit of approximately 2.2 years we have to loop - loop { - tokio::time::sleep(Duration::new(64000000, 0)).await; - } -} - -/// Optionally fetches the snapshot. Returns the configuration (modified -/// accordingly if a snapshot was fetched). -async fn maybe_fetch_snapshot( - should_fetch_snapshot: bool, - config: Config, -) -> anyhow::Result { - if should_fetch_snapshot { - let snapshot_path = default_snapshot_dir(&config); - let provider = SnapshotServer::try_get_default(&config.chain.network)?; - // FIXME: change this to `true` once zstd compressed snapshots is supported by - // the forest provider - let use_compressed = provider == SnapshotServer::Filecoin; - let path = retry!( - snapshot_fetch, - config.daemon.default_retry, - config.daemon.default_delay, - &snapshot_path, - &config, - &Some(provider), - use_compressed, - is_aria2_installed() - )?; - Ok(Config { - client: Client { - snapshot_path: Some(path), - snapshot: true, - ..config.client - }, - ..config - }) - } else { - Ok(config) - } -} - -/// Last resort in case a snapshot is needed. If it is not to be downloaded, -/// this method fails and exits the process. -async fn prompt_snapshot_or_die( - auto_download_snapshot: bool, - config: &Config, -) -> anyhow::Result { - if config.client.snapshot_path.is_some() { - return Ok(false); - } - let should_download = if !auto_download_snapshot && atty::is(atty::Stream::Stdin) { - let required_size: u64 = snapshot_fetch_size(config).await?; - let required_size = to_size_string(&required_size.into())?; - tokio::task::spawn_blocking(move || Confirm::with_theme(&ColorfulTheme::default()) - .with_prompt( - format!("Forest needs a snapshot to sync with the network. Would you like to download one now? Required disk space {required_size}."), - ) - .default(false) - .interact() - ).await?? - } else { - auto_download_snapshot - }; - - if should_download { - Ok(true) - } else { - anyhow::bail!("Forest cannot sync without a snapshot. Download a snapshot from a trusted source and import with --import-snapshot=[file] or --auto-download-snapshot to download one automatically"); - } + std::future::pending().await } fn get_actual_chain_name(internal_network_name: &str) -> &str { @@ -548,68 +557,140 @@ fn get_actual_chain_name(internal_network_name: &str) -> &str { } } -async fn create_keystore(config: &Config) -> anyhow::Result { - let passphrase = std::env::var(FOREST_KEYSTORE_PHRASE_ENV); - let is_interactive = atty::is(atty::Stream::Stdin); - - // encrypted keystore, headless - if config.client.encrypt_keystore && passphrase.is_err() && !is_interactive { - anyhow::bail!("Passphrase for the keystore was not provided and the encryption was not explicitly disabled. Please set the {FOREST_KEYSTORE_PHRASE_ENV} environmental variable and re-run the command"); - // encrypted keystore, either headless or interactive, passphrase provided - } else if config.client.encrypt_keystore && passphrase.is_ok() { - let passphrase = passphrase.unwrap(); +/// This may: +/// - create a [`KeyStore`] +/// - load a [`KeyStore`] +/// - ask a user for password input +async fn load_or_create_keystore(config: &Config) -> anyhow::Result { + use std::env::VarError; + + let passphrase_from_env = std::env::var(FOREST_KEYSTORE_PHRASE_ENV); + let require_encryption = config.client.encrypt_keystore; + let keystore_already_exists = config + .client + .data_dir + .join(ENCRYPTED_KEYSTORE_NAME) + .is_dir(); + + match (require_encryption, passphrase_from_env) { + // don't need encryption, we can implicitly create a keystore + (false, maybe_passphrase) => { + warn!("Forest has encryption disabled"); + if let Ok(_) | Err(VarError::NotUnicode(_)) = maybe_passphrase { + warn!( + "Ignoring passphrase provided in {} - encryption is disabled", + FOREST_KEYSTORE_PHRASE_ENV + ) + } + KeyStore::new(KeyStoreConfig::Persistent(config.client.data_dir.clone())) + .map_err(anyhow::Error::new) + } - let keystore = KeyStore::new(KeyStoreConfig::Encrypted( - PathBuf::from(&config.client.data_dir), + // need encryption, the user has provided the password through env + (true, Ok(passphrase)) => KeyStore::new(KeyStoreConfig::Encrypted( + config.client.data_dir.clone(), passphrase, - )); + )) + .map_err(anyhow::Error::new), - keystore.map_err(|_| anyhow::anyhow!("Incorrect passphrase. Please verify the {FOREST_KEYSTORE_PHRASE_ENV} environmental variable.")) - // encrypted keystore, interactive, passphrase not provided - } else if config.client.encrypt_keystore && passphrase.is_err() && is_interactive { - loop { - let passphrase = password_prompt("Enter the keystore passphrase").await?; + // need encryption, we've not been given a password + (true, Err(error)) => { + // prompt for passphrase and try and load the keystore - let data_dir = PathBuf::from(&config.client.data_dir).join(ENCRYPTED_KEYSTORE_NAME); - if !data_dir.exists() { - let passphrase_again = password_prompt("Confirm passphrase").await?; + if let VarError::NotUnicode(_) = error { + // If we're ignoring the user's password, tell them why + warn!( + "Ignoring passphrase provided in {} - it's not utf-8", + FOREST_KEYSTORE_PHRASE_ENV + ) + } - if passphrase != passphrase_again { - error!("Passphrases do not match. Please retry."); - continue; + let data_dir = config.client.data_dir.clone(); + + match keystore_already_exists { + true => asyncify(move || input_password_to_load_encrypted_keystore(data_dir)) + .await + .context("Couldn't load keystore"), + false => { + let password = + asyncify(|| create_password("Create a password for Forest's keystore")) + .await?; + KeyStore::new(KeyStoreConfig::Encrypted(data_dir, password)) + .context("Couldn't create keystore") } } + } + } +} - let key_store_init_result = KeyStore::new(KeyStoreConfig::Encrypted( - config.client.data_dir.clone(), - passphrase, - )); +/// Run the closure on a thread where blocking is allowed +/// +/// # Panics +/// If the closure panics +fn asyncify(f: impl FnOnce() -> T + Send + 'static) -> impl Future +where + T: Send + 'static, +{ + tokio::task::spawn_blocking(f).then(|res| async { res.expect("spawned task panicked") }) +} - match key_store_init_result { - Ok(ks) => break Ok(ks), - Err(_) => { - error!("Incorrect passphrase entered. Please try again.") - } - }; - } - } else { - warn!("Warning: Keystore encryption disabled!"); - Ok(KeyStore::new(KeyStoreConfig::Persistent( - config.client.data_dir.clone(), - ))?) +/// Prompts for password, looping until the [`KeyStore`] is successfully loaded. +/// +/// This code makes blocking syscalls. +fn input_password_to_load_encrypted_keystore(data_dir: PathBuf) -> std::io::Result { + let keystore = RefCell::new(None); + let term = Term::stderr(); + + // Unlike `dialoguer::Confirm`, `dialoguer::Password` doesn't fail if the terminal is not a tty + // so do that check ourselves. + // This means users can't pipe their password from stdin. + if !term.is_term() { + return Err(std::io::Error::new( + std::io::ErrorKind::NotConnected, + "cannot read password from non-terminal", + )); } + + dialoguer::Password::new() + .with_prompt("Enter the password for Forest's keystore") + .allow_empty_password(true) // let validator do validation + .validate_with(|input: &String| { + KeyStore::new(KeyStoreConfig::Encrypted(data_dir.clone(), input.clone())) + .map(|created| *keystore.borrow_mut() = Some(created)) + .context( + "Error: couldn't load keystore with this password. Try again or press Ctrl+C to abort.", + ) + }) + .interact_on(&term)?; + + Ok(keystore + .into_inner() + .expect("validation succeeded, so keystore must be emplaced")) } -// Prompt for password in a blocking thread such that tokio can still process interrupts. -async fn password_prompt(prompt: impl Into) -> anyhow::Result { - let prompt: String = prompt.into(); - Ok(tokio::task::spawn_blocking(|| { - Password::with_theme(&ColorfulTheme::default()) - .allow_empty_password(true) - .with_prompt(prompt) - .interact() - }) - .await??) +/// Loops until the user provides two matching passwords. +/// +/// This code makes blocking syscalls +fn create_password(prompt: &str) -> std::io::Result { + let term = Term::stderr(); + + // Unlike `dialoguer::Confirm`, `dialoguer::Password` doesn't fail if the terminal is not a tty + // so do that check ourselves. + // This means users can't pipe their password from stdin. + if !term.is_term() { + return Err(std::io::Error::new( + std::io::ErrorKind::NotConnected, + "cannot read password from non-terminal", + )); + } + dialoguer::Password::new() + .with_prompt(prompt) + .allow_empty_password(false) + .with_confirmation( + "Confirm password", + "Error: the passwords do not match. Try again or press Ctrl+C to abort.", + ) + .interact_on(&term) } #[cfg(test)] @@ -623,39 +704,34 @@ mod test { use super::*; #[tokio::test] - async fn import_snapshot_from_file_valid() -> anyhow::Result<()> { - anyhow::ensure!(import_snapshot_from_file("test_files/chain4.car") + async fn import_snapshot_from_file_valid() { + import_snapshot_from_file("test_files/chain4.car") .await - .is_ok()); - Ok(()) + .unwrap(); } #[tokio::test] - async fn import_snapshot_from_compressed_file_valid() -> anyhow::Result<()> { - anyhow::ensure!(import_snapshot_from_file("test_files/chain4.car.zst") + async fn import_snapshot_from_compressed_file_valid() { + import_snapshot_from_file("test_files/chain4.car.zst") .await - .is_ok()); - Ok(()) + .unwrap() } #[tokio::test] - async fn import_snapshot_from_file_invalid() -> anyhow::Result<()> { - anyhow::ensure!(import_snapshot_from_file("Cargo.toml").await.is_err()); - Ok(()) + async fn import_snapshot_from_file_invalid() { + import_snapshot_from_file("Cargo.toml").await.unwrap_err(); } #[tokio::test] - async fn import_snapshot_from_file_not_found() -> anyhow::Result<()> { - anyhow::ensure!(import_snapshot_from_file("dummy.car").await.is_err()); - Ok(()) + async fn import_snapshot_from_file_not_found() { + import_snapshot_from_file("dummy.car").await.unwrap_err(); } #[tokio::test] - async fn import_snapshot_from_url_not_found() -> anyhow::Result<()> { - anyhow::ensure!(import_snapshot_from_file("https://dummy.com/dummy.car") + async fn import_snapshot_from_url_not_found() { + import_snapshot_from_file("https://dummy.com/dummy.car") .await - .is_err()); - Ok(()) + .unwrap_err(); } async fn import_snapshot_from_file(file_path: &str) -> anyhow::Result<()> { @@ -706,7 +782,7 @@ mod test { )?); import_chain::<_>(&sm, "test_files/chain4.car", false) .await - .expect("Failed to import chain"); + .context("Failed to import chain")?; Ok(()) } diff --git a/forest/daemon/tests/keystore_tests.rs b/forest/daemon/tests/keystore_tests.rs index 40f8c300d3d0..646e9f437be0 100644 --- a/forest/daemon/tests/keystore_tests.rs +++ b/forest/daemon/tests/keystore_tests.rs @@ -46,7 +46,7 @@ fn forest_headless_no_encrypt_no_passphrase_should_succeed() -> Result<()> { fn forest_headless_encrypt_keystore_with_passphrase_should_succeed() -> Result<()> { let (config_file, data_dir) = create_tmp_config()?; cli()? - .env(FOREST_KEYSTORE_PHRASE_ENV, "yuggoth") + .env(FOREST_KEYSTORE_PHRASE_ENV, "hunter2") .common_args() .arg("--config") .arg(config_file) @@ -54,7 +54,6 @@ fn forest_headless_encrypt_keystore_with_passphrase_should_succeed() -> Result<( .success(); assert!(data_dir.path().join(ENCRYPTED_KEYSTORE_NAME).exists()); - Ok(()) } diff --git a/forest/shared/Cargo.toml b/forest/shared/Cargo.toml index 68c528f6dc67..112931a5b1cc 100644 --- a/forest/shared/Cargo.toml +++ b/forest/shared/Cargo.toml @@ -11,7 +11,7 @@ edition = "2021" ahash.workspace = true anyhow.workspace = true atty.workspace = true -byte-unit = "4.0" +byte-unit.workspace = true cfg-if.workspace = true chrono.workspace = true clap.workspace = true @@ -25,35 +25,32 @@ forest_rpc-client.workspace = true forest_utils.workspace = true futures.workspace = true gethostname.workspace = true -hex.workspace = true +indicatif.workspace = true log = { workspace = true, features = ["serde"] } +nom.workspace = true num.workspace = true -regex.workspace = true -s3 = { package = "rust-s3", version = "0.32", default-features = false, features = ["sync-rustls-tls", "tags"] } +reqwest.workspace = true serde = { workspace = true, features = ["derive"] } serde_with.workspace = true -sha2.workspace = true -tokio = { workspace = true, features = ["sync"] } +strum.workspace = true +tap.workspace = true +tokio = { workspace = true, features = ["sync", "process"] } +tokio-util.workspace = true toml.workspace = true tracing-appender.workspace = true tracing-loki.workspace = true tracing-subscriber.workspace = true +tracing.workspace = true url.workspace = true -which.workspace = true # optional mimalloc = { version = "0.1.34", default-features = false, optional = true } tikv-jemallocator = { version = "0.5", optional = true } [dev-dependencies] -axum.workspace = true -http.workspace = true quickcheck.workspace = true quickcheck_macros.workspace = true -rand.workspace = true -tempfile.workspace = true tokio.workspace = true -tower-http = { workspace = true, features = ["fs"] } [features] rocksdb = ["forest_db/rocksdb"] diff --git a/forest/shared/src/cli/config.rs b/forest/shared/src/cli/config.rs index 3ba43cae7575..3d3a843c0d84 100644 --- a/forest/shared/src/cli/config.rs +++ b/forest/shared/src/cli/config.rs @@ -5,16 +5,16 @@ use core::time::Duration; use std::{path::PathBuf, sync::Arc}; use forest_chain_sync::SyncConfig; +#[cfg(any(feature = "paritydb", feature = "rocksdb"))] use forest_db::db_engine::DbConfig; use forest_libp2p::Libp2pConfig; use forest_networks::ChainConfig; use log::LevelFilter; use serde::{Deserialize, Serialize}; -use url::Url; use super::client::Client; -#[derive(Serialize, Deserialize, PartialEq, Eq)] +#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct LogConfig { pub filters: Vec, } @@ -46,7 +46,7 @@ impl Default for LogConfig { } } -#[derive(Serialize, Deserialize, PartialEq, Eq, Hash)] +#[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Debug, Clone)] pub struct LogValue { pub module: String, pub level: LevelFilter, @@ -61,79 +61,8 @@ impl LogValue { } } -#[derive(Serialize, Deserialize, PartialEq, Eq, Default)] -pub struct SnapshotFetchConfig { - pub forest: ForestSnapshotFetchConfig, - pub filecoin: FilecoinSnapshotFetchConfig, -} - -#[derive(Serialize, Deserialize, PartialEq, Eq)] -pub struct FilecoinSnapshotFetchConfig { - pub mainnet_compressed: Url, - pub calibnet_compressed: Url, -} - -impl Default for FilecoinSnapshotFetchConfig { - fn default() -> Self { - // unfallible unwrap as we know that the value is correct - // - Self { - mainnet_compressed: Url::try_from( - "https://snapshots.mainnet.filops.net/minimal/latest", - ) - .unwrap(), - calibnet_compressed: Url::try_from( - "https://snapshots.calibrationnet.filops.net/minimal/latest", - ) - .unwrap(), - } - } -} - -#[derive(Serialize, Deserialize, PartialEq, Eq)] -pub struct ForestSnapshotFetchConfig { - pub mainnet: ForestFetchConfig, - pub calibnet: ForestFetchConfig, -} - -#[derive(Serialize, Deserialize, PartialEq, Eq)] -pub struct ForestFetchConfig { - pub snapshot_spaces_url: Url, - pub bucket_name: String, - pub region: String, - pub path: String, -} - -impl Default for ForestSnapshotFetchConfig { - fn default() -> Self { - // unfallible unwrap as we know that the value is correct - Self { - // Forest does not support snapshot service for mainnet yet. - // TODO: Update config when mainnet snapshot service is available - mainnet: ForestFetchConfig { - snapshot_spaces_url: Url::try_from( - "https://forest-snapshots.fra1.digitaloceanspaces.com", - ) - .unwrap(), - bucket_name: "forest-snapshots".to_string(), - region: "fra1".to_string(), - path: "mainnet/".to_string(), - }, - calibnet: ForestFetchConfig { - snapshot_spaces_url: Url::try_from( - "https://forest-snapshots.fra1.digitaloceanspaces.com", - ) - .unwrap(), - bucket_name: "forest-snapshots".to_string(), - region: "fra1".to_string(), - path: "calibnet/".to_string(), - }, - } - } -} - /// Structure that defines daemon configuration when process is detached -#[derive(Deserialize, Serialize, PartialEq, Eq)] +#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)] pub struct DaemonConfig { pub user: Option, pub group: Option, @@ -142,8 +71,6 @@ pub struct DaemonConfig { pub stderr: PathBuf, pub work_dir: PathBuf, pub pid_file: Option, - pub default_retry: i32, - pub default_delay: Duration, } impl Default for DaemonConfig { @@ -156,13 +83,11 @@ impl Default for DaemonConfig { stderr: "forest.err".into(), work_dir: ".".into(), pid_file: None, - default_retry: 3, - default_delay: Duration::from_secs(60), } } } -#[derive(Deserialize, Serialize, PartialEq, Eq, Clone, Default)] +#[derive(Deserialize, Serialize, PartialEq, Eq, Clone, Default, Debug)] pub struct TokioConfig { pub worker_threads: Option, pub max_blocking_threads: Option, @@ -171,7 +96,7 @@ pub struct TokioConfig { pub global_queue_interval: Option, } -#[derive(Serialize, Deserialize, PartialEq, Default)] +#[derive(Serialize, Deserialize, PartialEq, Default, Debug, Clone)] #[serde(default)] pub struct Config { pub client: Client, @@ -182,7 +107,6 @@ pub struct Config { pub chain: Arc, pub daemon: DaemonConfig, pub log: LogConfig, - pub snapshot_fetch: SnapshotFetchConfig, pub tokio: TokioConfig, } @@ -217,6 +141,7 @@ mod test { /// Partial configuration, as some parts of the proper one don't implement /// required traits (i.e. Debug) + // This should be removed in #2965 #[derive(Clone, Debug)] struct ConfigPartial { client: Client, @@ -237,7 +162,6 @@ mod test { chain: Arc::new(ChainConfig::default()), daemon: DaemonConfig::default(), log: Default::default(), - snapshot_fetch: Default::default(), tokio: Default::default(), } } diff --git a/forest/shared/src/cli/mod.rs b/forest/shared/src/cli/mod.rs index 05c1b851162e..8ff9f0f483f4 100644 --- a/forest/shared/src/cli/mod.rs +++ b/forest/shared/src/cli/mod.rs @@ -3,7 +3,6 @@ mod client; mod config; -mod snapshot_fetch; use std::{ net::SocketAddr, @@ -20,7 +19,7 @@ use forest_utils::io::{read_file_to_string, read_toml, ProgressBarVisibility}; use log::error; use num::BigInt; -pub use self::{client::*, config::*, snapshot_fetch::*}; +pub use self::{client::*, config::*}; use crate::logger::LoggingColor; pub static HELP_MESSAGE: &str = "\ @@ -309,14 +308,6 @@ pub fn check_for_unknown_keys(path: &Path, config: &Config) { } } -pub fn default_snapshot_dir(config: &Config) -> PathBuf { - config - .client - .data_dir - .join("snapshots") - .join(config.chain.network.to_string()) -} - /// Gets chain data directory pub fn chain_path(config: &Config) -> PathBuf { PathBuf::from(&config.client.data_dir).join(config.chain.network.to_string()) diff --git a/forest/shared/src/cli/snapshot_fetch.rs b/forest/shared/src/cli/snapshot_fetch.rs deleted file mode 100644 index 6042d381ca23..000000000000 --- a/forest/shared/src/cli/snapshot_fetch.rs +++ /dev/null @@ -1,851 +0,0 @@ -// Copyright 2019-2023 ChainSafe Systems -// SPDX-License-Identifier: Apache-2.0, MIT -use std::{ - path::{Path, PathBuf}, - process::Command, - str::FromStr, - time::Duration, -}; - -use anyhow::bail; -use chrono::{DateTime, NaiveDate}; -use forest_networks::NetworkChain; -use forest_utils::{ - io::{progress_bar::Units, ProgressBar, TempFile}, - net::{ - https_client, - hyper::{self, client::connect::Connect, Body, Response}, - }, -}; -use hex::{FromHex, ToHex}; -use log::info; -use regex::Regex; -use s3::Bucket; -use sha2::{Digest, Sha256}; -use tokio::{ - fs::{create_dir_all, File}, - io::{AsyncWriteExt, BufWriter}, -}; -use url::Url; - -use super::Config; -use crate::cli::to_size_string; - -/// Snapshot fetch service provider -#[derive(Debug, Clone, Copy, Eq, PartialEq)] -pub enum SnapshotServer { - Forest, - Filecoin, -} - -impl SnapshotServer { - pub fn try_get_default(chain: &NetworkChain) -> anyhow::Result { - Ok(match chain { - NetworkChain::Mainnet => SnapshotServer::Filecoin, - NetworkChain::Calibnet => SnapshotServer::Forest, - NetworkChain::Devnet(name) => anyhow::bail!("Fetch not supported for chain {name}"), - }) - } -} - -impl FromStr for SnapshotServer { - type Err = anyhow::Error; - - fn from_str(provider: &str) -> Result { - match provider.to_lowercase().as_str() { - "forest" => Ok(SnapshotServer::Forest), - "filecoin" => Ok(SnapshotServer::Filecoin), - _ => bail!( - "Failed to fetch snapshot from: {provider}, Must be one of `forest`|`filecoin`." - ), - } - } -} - -/// Snapshot attributes -pub struct SnapshotInfo { - pub network: String, - pub date: NaiveDate, - pub height: i64, - pub path: PathBuf, -} - -/// Collection of snapshots -pub struct SnapshotStore { - pub snapshots: Vec, -} - -impl SnapshotStore { - pub fn new(config: &Config, snapshot_dir: &PathBuf) -> SnapshotStore { - let mut snapshots = Vec::new(); - let pattern = Regex::new( - r"^([^_]+?)_snapshot_(?P[^_]+?)_(?P\d{4}-\d{2}-\d{2})_height_(?P\d+)\.car(\.zst)?(\.tmp|\.aria2)?$", - ).unwrap(); - if let Ok(dir) = std::fs::read_dir(snapshot_dir) { - dir.flatten() - .map(|entry| entry.path()) - .filter(|p| is_car_or_zst_or_tmp(p)) - .for_each(|path| { - if let Some(filename) = path.file_name().and_then(|n| n.to_str()) { - if let Some(captures) = pattern.captures(filename) { - let network: String = captures.name("network").unwrap().as_str().into(); - if network == config.chain.network.to_string() { - let date = NaiveDate::parse_from_str( - captures.name("date").unwrap().as_str(), - "%Y-%m-%d", - ) - .unwrap(); - let height = captures - .name("height") - .unwrap() - .as_str() - .parse::() - .unwrap(); - let snapshot = SnapshotInfo { - network, - date, - height, - path, - }; - snapshots.push(snapshot); - } - } - } - }); - } - SnapshotStore { snapshots } - } - - pub fn display(&self) { - self.snapshots - .iter() - .for_each(|s| println!("{}", s.path.display())); - } -} - -pub fn is_car_or_zst_or_tmp(path: &Path) -> bool { - let ext = path.extension().unwrap_or_default(); - ext == "car" || ext == "zst" || ext == "tmp" || ext == "aria2" -} - -/// gets the size of a snapshot from Filecoin. -pub async fn snapshot_fetch_size(config: &Config) -> anyhow::Result { - let service_url = match &config.chain.network { - NetworkChain::Mainnet => config.snapshot_fetch.filecoin.mainnet_compressed.clone(), - NetworkChain::Calibnet => config.snapshot_fetch.filecoin.calibnet_compressed.clone(), - NetworkChain::Devnet(name) => bail!("Fetch not supported for chain {name}"), - }; - let client = https_client(); - - let snapshot_url = { - let head_response = client - .request(hyper::Request::head(service_url.as_str()).body("".into())?) - .await?; - - // Use the redirect if available. - match head_response.headers().get("location") { - Some(url) => url.to_str()?.try_into()?, - None => service_url, - } - }; - - let snapshot_response = client.get(snapshot_url.as_str().try_into()?).await?; - let total_size = snapshot_response - .headers() - .get("content-length") - .and_then(|ct_len| ct_len.to_str().ok()) - .and_then(|ct_len| ct_len.parse::().ok()) - .ok_or_else(|| anyhow::anyhow!("Couldn't retrieve content length"))?; - anyhow::Ok(total_size) -} - -/// Fetches snapshot from a trusted location and saves it to the given -/// directory. Chain is inferred from configuration. -pub async fn snapshot_fetch( - snapshot_out_dir: &Path, - config: &Config, - provider: &Option, - use_compressed: bool, - use_aria2: bool, -) -> anyhow::Result { - let server = match provider { - Some(s) => *s, - None => SnapshotServer::try_get_default(&config.chain.network)?, - }; - match server { - SnapshotServer::Forest => { - snapshot_fetch_forest(snapshot_out_dir, config, use_compressed, use_aria2).await - } - SnapshotServer::Filecoin => { - snapshot_fetch_filecoin(snapshot_out_dir, config, use_compressed, use_aria2).await - } - } -} - -/// Checks whether `aria2c` is available in PATH -pub fn is_aria2_installed() -> bool { - which::which("aria2c").is_ok() -} - -/// Fetches snapshot for `calibnet` from a default, trusted location. On -/// success, the snapshot will be saved in the given directory. In case of -/// failure (e.g. connection interrupted) it will not be removed. -async fn snapshot_fetch_forest( - snapshot_out_dir: &Path, - config: &Config, - use_compressed: bool, - use_aria2: bool, -) -> anyhow::Result { - if use_compressed { - anyhow::bail!( - "It is not yet supported by the forest provider to download zstd compressed snapshots" - ); - } - - let snapshot_fetch_config = match &config.chain.network { - NetworkChain::Mainnet => bail!( - "Mainnet snapshot fetch service not provided by Forest yet. Suggestion: use `--provider=filecoin` to fetch from Filecoin server." - ), - NetworkChain::Calibnet => &config.snapshot_fetch.forest.calibnet, - NetworkChain::Devnet(name) => bail!("Fetch not supported for chain {name}"), - }; - let name = &snapshot_fetch_config.bucket_name; - let region = &snapshot_fetch_config.region; - let bucket = Bucket::new_public(name, region.parse()?)?; - - // Grab contents of the bucket - let bucket_contents = bucket.list(snapshot_fetch_config.path.clone(), Some("/".to_string()))?; - - // Find the the last modified file that is not a directory or empty file - let last_modified = bucket_contents - .first() - .ok_or_else(|| anyhow::anyhow!("Couldn't list bucket"))? - .contents - .iter() - .filter(|obj| obj.size > 0 && obj.key.rsplit_once('.').unwrap_or_default().1 == "car") - .max_by_key(|obj| DateTime::parse_from_rfc3339(&obj.last_modified).unwrap_or_default()) - .ok_or_else(|| anyhow::anyhow!("Couldn't retrieve bucket contents"))? - .to_owned(); - - // Grab the snapshot name and create requested directory tree. - let filename = last_modified.key.rsplit_once('/').unwrap().1; - let snapshot_path = snapshot_out_dir.join(filename); - create_dir_all(snapshot_out_dir).await?; - - // Download the file - // It'd be better to use the bucket directly with `get_object_stream`, but at - // the time of writing this code the Stream API is a bit lacking, making - // adding a progress bar a pain. https://github.com/durch/rust-s3/issues/275 - let client = https_client(); - let snapshot_spaces_url = &snapshot_fetch_config.snapshot_spaces_url; - let path = &snapshot_fetch_config.path; - let url = snapshot_spaces_url.join(path)?.join(filename)?; - - let snapshot_response = client.get(url.as_str().try_into()?).await?; - - if use_aria2 { - download_snapshot_and_validate_checksum_if_needed_with_aria2( - client, - url, - &snapshot_path, - use_compressed, - ) - .await? - } else { - let total_size = last_modified.size; - download_snapshot_and_validate_checksum_if_needed( - client, - url, - &snapshot_path, - snapshot_response, - total_size, - use_compressed, - ) - .await?; - } - - Ok(snapshot_path) -} - -/// Fetches snapshot for `mainnet` from a default, trusted location. On success, -/// the snapshot will be saved in the given directory. In case of failure (e.g. -/// checksum verification fiasco) it will not be removed. -async fn snapshot_fetch_filecoin( - snapshot_out_dir: &Path, - config: &Config, - use_compressed: bool, - use_aria2: bool, -) -> anyhow::Result { - if !use_compressed { - bail!( - "Uncompressed snapshot fetch service not provided by Filecoin. \ - Suggestion: use `--compressed` to fetch compressed snapshots." - ); - } - let service_url = match &config.chain.network { - NetworkChain::Mainnet => config.snapshot_fetch.filecoin.mainnet_compressed.clone(), - NetworkChain::Calibnet => config.snapshot_fetch.filecoin.calibnet_compressed.clone(), - NetworkChain::Devnet(name) => bail!("Fetch not supported for chain {name}"), - }; - info!("Snapshot url: {service_url}"); - let client = https_client(); - - let snapshot_url = { - let head_response = client - .request(hyper::Request::head(service_url.as_str()).body("".into())?) - .await?; - - // Use the redirect if available. - match head_response.headers().get("location") { - Some(url) => { - let url = url.to_str()?; - if url.starts_with('/') { - let mut result_url = Url::parse(service_url.as_str())?; - result_url.set_path(url); - result_url - } else { - url.try_into()? - } - } - None => service_url, - } - }; - - let snapshot_response = client.get(snapshot_url.as_str().try_into()?).await?; - - // Grab the snapshot file name - let filename = filename_from_url(&snapshot_url)?; - // Create requested directory tree to store the snapshot - create_dir_all(snapshot_out_dir).await?; - let snapshot_name = normalize_filecoin_snapshot_name(&config.chain.network, &filename)?; - let snapshot_path = snapshot_out_dir.join(&snapshot_name); - // Download the file - if use_aria2 { - download_snapshot_and_validate_checksum_if_needed_with_aria2( - client, - snapshot_url, - &snapshot_path, - use_compressed, - ) - .await? - } else { - let total_size = snapshot_response - .headers() - .get("content-length") - .and_then(|ct_len| ct_len.to_str().ok()) - .and_then(|ct_len| ct_len.parse::().ok()) - .ok_or_else(|| anyhow::anyhow!("Couldn't retrieve content length"))?; - - download_snapshot_and_validate_checksum_if_needed( - client, - snapshot_url, - &snapshot_path, - snapshot_response, - total_size, - use_compressed, - ) - .await?; - } - Ok(snapshot_path) -} - -/// Downloads snapshot to a file with a progress bar. Returns the digest of the -/// downloaded file. -async fn download_snapshot_and_validate_checksum_if_needed( - client: hyper::Client, - url: Url, - snapshot_path: &Path, - snapshot_response: Response, - total_size: u64, - use_compressed: bool, -) -> anyhow::Result<()> -where - C: Connect + Clone + Send + Sync + 'static, -{ - info!("Snapshot url: {url}"); - info!( - "Snapshot will be downloaded to {} ({})", - snapshot_path.display(), - to_size_string(&total_size.into())? - ); - - let progress_bar = ProgressBar::new(total_size); - progress_bar.message("Downloading snapshot "); - progress_bar.set_max_refresh_rate(Some(Duration::from_millis(500))); - progress_bar.set_units(Units::Bytes); - - let snapshot_file_tmp = TempFile::new(snapshot_path.with_extension("car.tmp")); - let file = File::create(snapshot_file_tmp.path()).await?; - let mut writer = BufWriter::new(file); - let mut downloaded: u64 = 0; - let mut stream = snapshot_response.into_body(); - - let mut snapshot_hasher = if use_compressed { - None - } else { - Some(Sha256::new()) - }; - - while let Some(item) = futures::StreamExt::next(&mut stream).await { - let chunk = item?; - writer.write_all(&chunk).await?; - downloaded = total_size.min(downloaded + chunk.len() as u64); - progress_bar.set(downloaded); - if let Some(snapshot_hasher) = &mut snapshot_hasher { - snapshot_hasher.update(chunk); - } - } - writer.flush().await?; - - let file_size = std::fs::metadata(snapshot_file_tmp.path())?.len(); - if file_size != total_size { - bail!("Didn't manage to download the entire file. {file_size}/{total_size} [B]"); - } - - progress_bar.finish_println("Finished downloading the snapshot."); - - if let Some(snapshot_hasher) = snapshot_hasher { - fetch_checksum_and_validate(client, url, &snapshot_hasher.finalize()).await?; - } - - std::fs::rename(snapshot_file_tmp.path(), snapshot_path)?; - - Ok(()) -} - -async fn download_snapshot_and_validate_checksum_if_needed_with_aria2( - client: hyper::Client, - url: Url, - snapshot_path: &Path, - use_compressed: bool, -) -> anyhow::Result<()> -where - C: Connect + Clone + Send + Sync + 'static, -{ - info!("Snapshot url: {url}"); - info!("Snapshot will be downloaded to {}", snapshot_path.display()); - - if !is_aria2_installed() { - bail!("Command aria2c is not in PATH. To install aria2, refer to instructions on https://aria2.github.io/"); - } - - let checksum_expected = if use_compressed { - None - } else { - let checksum_url = replace_extension_url(url.clone(), "sha256sum")?; - let checksum_response = client.get(checksum_url.as_str().try_into()?).await?; - if !checksum_response.status().is_success() { - bail!("Unable to get the checksum file. Url: {checksum_url}"); - } - let checksum_bytes = hyper::body::to_bytes(checksum_response.into_body()).await? - [..Sha256::output_size() * 2] - .to_vec(); - let checksum = String::from_utf8(checksum_bytes)?; - info!("Expected sha256 checksum: {checksum}"); - Some(checksum) - }; - - download_with_aria2( - url.as_str(), - snapshot_path - .parent() - .and_then(|p| p.to_str()) - .unwrap_or_default(), - snapshot_path - .file_name() - .and_then(|f| f.to_str()) - .unwrap_or_default(), - checksum_expected.map(|checksum| format!("sha-256={checksum}")), - ) -} - -fn download_with_aria2( - url: &str, - dir: &str, - out: &str, - checksum: Option, -) -> anyhow::Result<()> { - let mut args = vec![ - "--continue=true".into(), - "--max-connection-per-server=5".into(), - "--split=5".into(), - "--max-tries=0".into(), - format!("--dir={dir}",), - format!("--out={out}",), - url.into(), - ]; - if let Some(checksum) = checksum { - args.insert(0, format!("--checksum={checksum}")); - } - - let mut child = Command::new("aria2c").args(args).spawn()?; - - let exit_code = child.wait()?; - if exit_code.success() { - Ok(()) - } else { - // https://aria2.github.io/manual/en/html/aria2c.html#exit-status - bail!(match exit_code.code() { - Some(32) => "Checksum validation failed".into(), - Some(code) =>format!("Failed with exit code {code}, checkout https://aria2.github.io/manual/en/html/aria2c.html#exit-status"), - None => "Failed with unknown exit code.".into(), - }); - } -} - -/// Tries to extract resource filename from a given URL. -fn filename_from_url(url: &Url) -> anyhow::Result { - let filename = url - .path_segments() - .ok_or_else(|| anyhow::anyhow!("Can't parse url: {url}"))? - .last() - .unwrap() // safe, there is at least one - .to_owned(); - - if filename.is_empty() { - Err(anyhow::anyhow!("can't extract filename from {url}")) - } else { - Ok(filename) - } -} - -/// Returns a normalized snapshot name -/// Filecoin snapshot files are named in the format of -/// `_TZ.car`. Normalized snapshot name are in the -/// format `filecoin_snapshot_{mainnet|calibnet}__height_. -/// car`. # Example -/// ``` -/// # use forest_cli_shared::cli::normalize_filecoin_snapshot_name; -/// # use forest_networks::NetworkChain; -/// let actual_name = "64050_2022_11_24T00_00_00Z.car"; -/// let normalized_name = "filecoin_snapshot_calibnet_2022-11-24_height_64050.car"; -/// assert_eq!(normalized_name, normalize_filecoin_snapshot_name(&NetworkChain::Calibnet, actual_name).unwrap()); -/// ``` -pub fn normalize_filecoin_snapshot_name( - network: &NetworkChain, - filename: &str, -) -> anyhow::Result { - let pattern = Regex::new( - r"(?P\d+)_(?P\d{4}_\d{2}_\d{2})T(?P