From 31741f19e6e1b5d72493d99754246d78958ae30b Mon Sep 17 00:00:00 2001 From: WheatNetwork <86817720+wheatnetwork@users.noreply.github.com> Date: Tue, 12 Oct 2021 15:32:45 +0800 Subject: [PATCH] 1.2.900 --- .pre-commit-config.yaml | 6 +- CHANGELOG.md | 1608 ----------------- azure-pipelines.yml | 49 +- benchmarks/coin_store.py | 244 +++ build_scripts/build_linux_deb.sh | 2 +- build_scripts/build_linux_rpm.sh | 2 +- build_scripts/build_macos.sh | 9 + build_scripts/build_macos_m1.sh | 11 +- build_scripts/build_windows.ps1 | 12 +- build_scripts/clean-runner.sh | 2 +- install-gui.sh | 12 +- install-timelord.sh | 18 +- install.sh | 4 +- setup.py | 5 +- wheat-blockchain-gui | 2 +- wheat/clvm/spend_sim.py | 19 +- wheat/cmds/configure.py | 8 + wheat/cmds/init.py | 2 +- wheat/cmds/init_funcs.py | 16 +- wheat/cmds/keys_funcs.py | 2 +- wheat/cmds/passphrase_funcs.py | 136 +- wheat/cmds/start.py | 2 +- wheat/cmds/stop.py | 2 +- wheat/cmds/wallet_funcs.py | 2 +- wheat/cmds/wheat.py | 2 +- wheat/consensus/block_body_validation.py | 9 - wheat/consensus/block_creation.py | 1 - wheat/consensus/blockchain.py | 147 +- wheat/consensus/constants.py | 3 +- wheat/consensus/default_constants.py | 1 - wheat/consensus/multiprocess_validation.py | 1 - wheat/daemon/client.py | 33 +- wheat/daemon/keychain_proxy.py | 20 +- wheat/daemon/keychain_server.py | 14 +- wheat/daemon/server.py | 151 +- wheat/full_node/block_store.py | 18 +- wheat/full_node/coin_store.py | 224 ++- wheat/full_node/full_node.py | 214 ++- wheat/full_node/full_node_api.py | 140 +- wheat/full_node/hint_store.py | 40 + wheat/full_node/mempool.py | 12 +- wheat/full_node/mempool_check_conditions.py | 353 +--- wheat/full_node/mempool_manager.py | 34 +- wheat/plotting/check_plots.py | 2 +- wheat/plotting/manager.py | 335 ++-- wheat/pools/pool_puzzles.py | 2 +- wheat/pools/pool_wallet.py | 22 +- wheat/protocols/protocol_message_types.py | 11 + wheat/protocols/protocol_state_machine.py | 64 + wheat/protocols/protocol_timing.py | 4 + wheat/protocols/shared_protocol.py | 2 +- wheat/protocols/wallet_protocol.py | 75 +- wheat/rpc/rpc_client.py | 6 +- wheat/rpc/wallet_rpc_api.py | 18 +- wheat/rpc/wallet_rpc_client.py | 43 +- wheat/server/address_manager_store.py | 2 - wheat/server/node_discovery.py | 7 +- wheat/server/rate_limits.py | 11 +- wheat/server/server.py | 36 +- wheat/server/upnp.py | 2 +- wheat/server/ws_connection.py | 31 +- wheat/ssl/create_ssl.py | 2 +- wheat/timelord/timelord_launcher.py | 5 +- wheat/types/coin_record.py | 12 + wheat/types/coin_spend.py | 5 +- wheat/types/spend_bundle.py | 3 +- wheat/util/api_decorators.py | 11 + wheat/util/chain_utils.py | 16 + wheat/util/condition_tools.py | 26 - wheat/util/config.py | 21 +- wheat/util/dump_keyring.py | 95 + wheat/util/errors.py | 8 +- wheat/util/file_keyring.py | 42 +- wheat/util/initial-config.yaml | 15 +- wheat/util/ints.py | 2 +- wheat/util/keychain.py | 165 +- wheat/util/keyring_wrapper.py | 307 +++- wheat/util/significant_bits.py | 2 +- wheat/util/ssl.py | 2 +- wheat/util/streamable.py | 17 +- wheat/wallet/cc_wallet/cc_wallet.py | 9 +- wheat/wallet/derive_keys.py | 2 +- wheat/wallet/did_wallet/did_info.py | 3 +- wheat/wallet/did_wallet/did_wallet.py | 134 +- wheat/wallet/did_wallet/did_wallet_puzzles.py | 10 +- wheat/wallet/key_val_store.py | 3 - wheat/wallet/puzzles/did_innerpuz.clvm | 92 +- wheat/wallet/puzzles/did_innerpuz.clvm.hex | 2 +- .../puzzles/did_innerpuz.clvm.hex.sha256tree | 2 +- .../wallet/puzzles/pool_member_innerpuz.clvm | 2 +- .../puzzles/pool_waitingroom_innerpuz.clvm | 2 +- wheat/wallet/trading/trade_store.py | 3 - wheat/wallet/util/debug_spend_bundle.py | 3 +- wheat/wallet/wallet.py | 1 - wheat/wallet/wallet_block_store.py | 3 - wheat/wallet/wallet_coin_store.py | 3 - wheat/wallet/wallet_interested_store.py | 2 - wheat/wallet/wallet_pool_store.py | 2 - wheat/wallet/wallet_puzzle_store.py | 2 - wheat/wallet/wallet_state_manager.py | 35 +- wheat/wallet/wallet_transaction_store.py | 5 +- wheat/wallet/wallet_user_store.py | 2 - 102 files changed, 2594 insertions(+), 2749 deletions(-) delete mode 100644 CHANGELOG.md create mode 100644 benchmarks/coin_store.py create mode 100644 wheat/full_node/hint_store.py create mode 100644 wheat/protocols/protocol_state_machine.py create mode 100644 wheat/protocols/protocol_timing.py create mode 100644 wheat/util/dump_keyring.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 01d91fe..aba598d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.3.0 + rev: v4.0.1 hooks: - id: check-yaml - id: end-of-file-fixer @@ -10,11 +10,11 @@ repos: - id: check-ast - id: debug-statements - repo: https://github.com/psf/black - rev: 21.6b0 + rev: 21.8b0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.7.9 + rev: 3.9.2 hooks: - id: flake8 - repo: https://github.com/pre-commit/mirrors-mypy diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index 19c8b2e..0000000 --- a/CHANGELOG.md +++ /dev/null @@ -1,1608 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), -and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) -for setuptools_scm/PEP 440 reasons. - -## 1.2.0 Wheat blockchain 2021-07-07 - -### Added - -- Portable pooled plots are now available using our new plot NFT. These allow you to plot new plots to an NFT that can either self farm or join and leave pools. During development there were changes to the plot NFT so portable pool plots (those made with `-c` option to `wheat plots create`) using code from before June 25th are invalid on mainnet. -OG plots made before this release can continue to be farmed side by side with the new portable pool plots but can not join pools using the official pooling protocol. You can learn more as a farmer by checking out the [pool user guide](https://github.com/WheatNetwork/wheat-blockchain/wiki/Pooling-User-Guide). Pool operators and those wanting to understand how the official pooling protocol operates should check out our [pooling implementation reference repository](https://github.com/Chia-Network/pool-reference). If you plan to use plot NFT, all your farmers and harvesters must be on 1.2.0 to function properly for portable pool plots. -- The exact commit after which Plot NFTs should be valid is the 89f7a4b3d6329493cd2b4bc5f346a819c99d3e7b commit (in which `pools.testnet9` branch was merged to main) or 5d62b3d1481c1e225d8354a012727ab263342c0a within the `pools.testnet9` branch. -- `wheat farm summary` and the GUI now use a new RPC endpoint to properly show plots for local and remote harvesters. This should address issues #6563, #5881, #3875, #1461. -- `wheat configure` now supports command line updates to peer count and target peer count. -- Thank you @gldecurtins for adding logging support for remote syslog. -- Thanks to @maran and @Animazing for adding farmer and pool public key display to the RPC. -- We have added translations for Hungarian, Belarusian, Catalan, and Albanian. For Hungarian thanks to @SirGeoff, @azazio @onokaxxx, @rolandfarkasCOM, @HUNDavid , @horvathpalzsolt, @stishun74, @tusdavgaming, @idotitusz, @rasocsabi, @mail.kope, @gsprblnt, @mbudahazi, @csiberius, @tomatos83, @zok42, @ocel0t, @rwtoptomi, @djxpitke, @ftamas85, @zotya0330, @fnni, @kapabeates, @zamery, @viktor.gonczi, @pal.suta, @miv, and @Joeman_. For Belarusian thanks to @shurix83, @haxycgm, and @metalomaniax. For Catalan thank you to @Poliwhirl, @Pep-33, @marqmarti, @meuca, @Guiwdin, @carlescampi, @jairobtx, @Neoares, @darknsis, @augustfarrerasgimeno, and @fornons. Finally for Albanian thanks to @ATSHOOTER and @lakedeejay. We apologize if we missed anyone and welcome corrections. -- Our release process is now fully automated from tagging a release to publishing installers to all of the appropriate locations and now makes the release artifacts available via torrents as well. -- All Wheat repositories now automatically build M1 wheels and create a new MacOS M1 native installer. -- New CLI command `wheat plotnft` to manage pools. -- We have added a new RPC `get_harvesters` to the farmer. This returns information about remote harvesters and plots. -- We have added a new RPC `check_delete_key` to the wallet, to check keys prior to deleting them. -- We have added a new RPC `delete_unconfirmed_transactions` to the wallet which deletes these transactions for a given wallet ID. -- We have added a new RPC `get_puzzle_and_solution` to the full node, which takes in a coin ID. -- We have added a new RPC `get_recent_signage_point_or_eos` to the full node, to support pooling. -- We have added a new RPC `send_transaction_multi` to the wallet, which sends a payment with multiple payees. - -### Changed - -- We have made a host of changes to the GUI to support pooling and to improve the wallet experience. -- We updated chiapos to version 1.0.3. This adds parallel reads to GetFullProof. Thanks to @marcoabreu ! We now print target/final directory early in the logs refs and log process ID. Thanks to @grayfallstown ! We are now using Gulrak 1.5.6. -@683280 optimized code in phase1.hpp. @jespino and @mrhacky started migrating to flags instead of booleans parameters for `show_progress` and `nobitfield`. If you are providing third-party tools you may need to make adjustments if relying on the chiapos log. -- Updated chiavdf to version 1.0.2 to fix certain tests. -- Windows builds now rely upon Python 3.9 which obviates the fix in 1.1.7. -- We are now using miniupnpc version 2.2.2 so that we can support Python 3.9 on Windows. -- We updated to clvm 0.9.6 and clvm_rs 0.1.8. CLVMObject now lazily converts python types to CLVM types as elements are inspected in clvm. cvlm_rs now returns python objects rather than a serialized object. -- We now have rudimentary checks to makes sure that fees are less than the amount being spent. -- The harvester API no longer relies upon time:time with thanks to @x1957. -- We have increased the strictness of validating Wheatlisp in the mempool and clvm. -- Thanks to @ruslanskorb for improvements to the human-readable forms in the CLI. -- Thanks to @etr2460 for improvements to the plotting progress bar in the GUI and enhancements to human-readable sizes. -- @dkackman changed the way that configuration was found on startup. -- We now delay peer start for wallet until after backup init and make sure only one copy is started. -- Wallets now trust the local node more for enhanced wallet sync speed. -- We now store tasks used to initiate peer connections to ensure they are kept alive and to be able to wait for them if we hit the upper limit on number of pending outgoing connections. -- We improved weight proof validation. -- @cvet changed the wallet to take `override` instead of `confirm`. - -### Fixed - -- The delete plots button in the Windows GUI has been fixed and re-enabled. -- Sometimes upon startup, the GUI takes a while to load the plots to display. We've made a temporary improvement that adds a "Refresh Plots" button whenever the GUI has not yet found plots. -- Correctly display private key in `wheat keys show`. -- Thanks to @gldecurtins for removing a default printout of the private key mnemonic in `wheat keys show`. -- Shutting down the full node is cleaner and manages uPnP better. -- DNS introducer could fail. -- Fixed a potential timelord bug that could lead to a chain stall. -- Add an explicit error message when mnemonic words are not in the dictionary; should help users self-service issues like #3425 faster. Thank you to @elliotback for this PR. -- Thank you to @Nikolaj-K for various typo corrections around the Mozilla CA, code simplifications and improvements in converting to human-readable size estimations, and clean up in the RPCs and logging. -- Thank you to @ChiaMineJP for various improvements. -- @asdf2014 removed some useless code in the wallet node API. -- Thanks to @willi123yao for a fix to under development pool wallets. -- `wheat farm summary` better handles wallet errors. -- @Hoinor fixed formatting issues around the Chinese translation in the GUI. -- Sometimes the GUI would stop refreshing certain fields. -- We have better error handling for misbehaving peers from naive forks/clones. -- We have fixed an error where the wallet could get corrupted, which previously required restarting the application. -- We have fixed an error where transactions were not being resubmitted from the wallet. - -### Known Issues - -- If you resync your wallet, transactions made with your plot NFTs will show incorrectly in the GUI. The internal accounting, and total balance displayed is correct. - -### 1.1.7 Wheat Blockchain 2021-06-05 - -### Fixed - -Batch process weight proof epochs in groups of 900 to fit below May 2020 sqlite limit (999 for Python 3.7 on Windows). Fixes sqlite3.OperationalError: too many SQL variables error and resulting issues with syncing wallets on Windows. - -## 1.1.6 Wheat Blockchain 2021-05-20 - -### Added - -- This is the initial release of our DNS introducer. We built a far too simple basic introducer that we hoped to replace as quickly as possible with an introducer that uses DNS in the style of Bitcoin. This release will now use both and as we gain testing with the DNS version, we will slowly be phasing the classic introducers out. This should significantly help in finding nodes to connect and sync to. -- You can now whitelist networks and hosts so that a node can always connect with chosen peers. -- We added a simple profiler to track performance times in the application (see /wheat/util/profiler.py for instructions). -- We added a transaction filter to get_header_blocks_in_range. -- There is now an unspent coin count and pending coin removal count to wallet_rpc_api. -- Added configuration options for moving an install to testnet and back (use `wheat configure -t true|false`). -- Added Arabic language support. Thank you to the following community members for their translation contributions: @MohamedSiddig, @bilalghalib, @HoussenAlSamer, @esmailelbob, @MCoreiX, @bestq8, @bt.layth, @sam_774, @yahyakhalid, @itsmekarim44, @anasjawabreh1996, @algeria98, @abduallh, @rabee.khalil, @ajoolee. -- Added Bulgarian language support. Thank you to the following community members for their translation contributions: @shaosoft, @sitio72, @yonchevsv, @sleit2000, @TerminalX, @WoWGeleto, @DrEnderTV, @l2rx, @iliakurdalanov, @liveroy. -- Added Croatian language support. Thank you to the following community members for their translation contributions: @hrvoje555, @ATfarm, @m.filipovski2505, @goranpravda035, @Fistrake, @marko.anti12. -- Added Persian language support. Thank you to the following community members for their translation contributions: @ali.tavanaie, @soheils13, @zanaei, @parham_saeedi, @HYNazism, @IGSkyGuy, @mmbtmp, @esfandani, @amirhh00, @Alirezabayatorg, @dotMahdi, @amirisatisss, @Navidnkh, @tgmgfe, @babooshios, @mahbodius, @amiragheli, @pouyanagazadeh97, @yaghout.ad, @faramarz1985, @inert1a, @want3d, @avion.raid, @hadiamirnejad, @iziran, @hamidreza13771393, @ebrahimfalamarzi, @navidnr, @behzadmosaffa. -- Added Serbian language support. Thank you to the following community members for their translation contributions: @mihailokg, @Cheli0S, @stevica.canadi, @ukica92, @stefanmih, @acko83. -- Added Spanish (Argentina) language support. Thank you to the following community members for their translation contributions: @franpepper94, @gdestribats, @omar17, @tomigambi, @doldraug. -- Added Spanish (Mexico) language support. Thank you to the following community members for their translation contributions: @javieriv, @dvd101x, @cdgraff, @danielrangel6. -- Thanks to @proea for adding RockyLinux support for install-gui.sh. -- And thank you to @etr2460 for various GUI contributions. - -### Changed - -- Performance improvement of speeding up outbound connections. -- Performance improvements to block_store. -- Optimized Streamable parsing by avoiding extra post-init checks, making parsing block records from database up to 40% faster. -- Made the serialization of Coin follow the normal protocol of implementing stream(). -- Minor improvements to add_spendbundle and weight proofs. -- We now use version 1.0.2 of chiapos. We now reopen files on read or write failure with thanks to @mreid-moz! We also prevent chiapos prover crashing for more bad plots. -- Disabled deletion of running plot for Windows users as the crash/lockup bug has returned. -- We more clearly prohibit root from installing/running the GUI. -- Thanks to @sargonas for improvements to the template for creating Issues. -- Thanks to @aisk for continuing to add more types. -- Thanks for @dkackman for improving the remote harvester setup by having init -c only copy the right certs. -- And thank you to @vidschofelix, @meawoppl, and @rongou for various other changes and improvements. - -### Fixed - -- Fixed a bug where multiple wallets running at the same time would all issue a transaction. -- Fixed issues with signage points: duplicate signage point spikes, and bugs in signage point cache. -- Fixed a blank screen error during Centos install. -- Thanks to @linnik for fixing a TypeError exception in case of unresponsive service. -- Thanks to @msg7086 for fixing install.sh for Debian. -- And thank you to @alfonsoperez, @asdf2014, @fredericosilva, @lamba09, @Nikolaj-K, @sargonas, @aisk, @Ich5003, and @trumankain for various other fixes and improvements. - -## 1.1.5 Wheat Blockchain 2021-05-09 - -### Fixed - -- We were not checking for negative values in the uint64 constructor. Therefore coins with negative values were added to the mempool. These blocks passed validation, but they did not get added into the blockchain due to negative values not serializing in uint64. Farmers making these blocks would make blocks that did not make it into or advance the chain, so the blockchain slowed down starting at block 255518 around 6:35AM PDT 5/9/2021. The fix adds a check in the mempool and block validation, and does not disconnect peers who send these invalid blocks (any peer 1.1.4 or older), making this update not mandatory but is recommended. Users not updating might see their blocks get rejected from other peers. Upgraded nodes will show an error when they encounter an old node trying to send an invalid block. This "Consensus error 124..." can be safely ignored. - -## 1.1.4 Wheat Blockchain 2021-05-04 - -### Changed - -- Secret wallet key is hidden by default in `wheat keys show`. Use `wheat keys show --show-mnemonic-seed` for private keys. -- Performance improvement while parsing variable length field in transaction blocks. - -### Fixed - -- Spends are now sorted in the mempool by fee/cost. -- Resolved pending transaction issue during reorgs. -- Fixed bug in message send retry logic. -- Two issues with processing Weight Proofs during syncing while farming. -- Fixed a bug in the outgoing rate control logic that could prevent messages being sent. - -## 1.1.3 Wheat Blockchain 2021-05-01 - -### Added - -- Significant speed improvements have been added to the Node during P2P operations. This is mostly a performance improvement to make sure there is plenty of overhead in Node for transaction processing. Smaller machines like the Pi 4 are strongly advised to update to this release before Monday 5/3/21 at 10AM PDT when transactions start. -- Significant syncing speed improvements have been added. However, there is a lot of low hanging fruit to continue to increase overall chain sync speed in subsequent releases. - -### Changed - -- We now require node 12.x to build the GUI. Installers have been building using node 12.x for quite some time. -- Node will now farm while syncing. -- We changed wheatlisp singletons to take a puzzlehash as its origin. We also updated the DID wallet to use this. -- Transactions are now cached for 10 minutes in mempool to retry if there is a failure of a spending attempt. -- Thank you to @Chida82 who made the log rotation count fully configurable. Apologies to him for not initially being included here. -- Thank you to @fiveangle for making install.sh more resilient across python installations. -- Standard transactions now all have announcements. -- VDF verification during sync and normal operations are now cached to enhance node performance. -- Target peer count has been increased from 60 to 80 to increase the number of peer Nodes available to connect to. Outbound connections were lowered from 10 to 8. -- The streamables class has had significant performance improvements. -- If a proof of space lookup takes longer than 5 seconds we will now log an error at WARNING level. -- Allow configuration of the plot loading interval (default is 2 minutes) in config.yaml. -- CLI wallet UI was improved and shows syncing status of wallet. -- @martomi added logging of added coins back. -- Thank you to @aisk for additional type checking. -- @aisk added error checking in bech32m -- Wheatlisp programs now remained serialized in Node for better performance. -- Mempool is now set to be 50 times the single block size. -- Mitigate 1-3 mojo dust attacks. -- CLI now switches to EiB for netspace display as appropriate. - -### Fixed - -- We fixed excess memory use when displaying plot logs in GUI. This was causing the GUI application to fail in many "fun" ways after plotting a lot of plots in parallel. -- Fixed plot update issues in the GUI. -- Long syncing will now correctly show "Syncing" around the application. -- Nonce collisions in request IDs is fixed. -- Some duplicate plots were not being caught and logged by harvester on loading. -- We have removed many places where Node was making duplicate requests of other nodes. -- Daemon now waits for services to end to attempt to avoid zombie processes. -- Node is less likely to lose sync due to state inconsistency. -- A large thank you to @sargonas for diagnosing why so many technical support questions were flooding GitHub issues, PRing a fix, and mass migrating issues to Discussions. -- Thanks to @jeolcho for fixing a bug in full_node_rpc_api.py. -- Thanks @antoniobg for a typo fix in keychain.py. -- Thanks to @altendky for catching a Copyright date error. - -## 1.1.2 Wheat Blockchain 2021-04-24 - -### Changed - -- Weight proof time outs have been increased from 3 minutes to 6 minutes. - -### Fixed - -- When attempting to sync, connections could accidentally disconnect for rate limiting reasons. This was causing many to not be able to sync. -- Some temp files were not being closed during GUI plotting. - -## 1.1.1 Wheat Blockchain 2021-04-21 - -### Added - -- This is a bug fix release for 1.1.0. It is not required to upgrade from 1.1.0 if you are not experiencing the specific issue that it addresses. You are required to upgrade from any 1.0.x version before Monday evening PDT to continue farming. - -### Changed - -- Logging now includes year, month, and date. Thanks and apologies for not tipping the hat sooner to @alfonsoperez for the PR. - -### Fixed - -- Changes were made in 1.1.0 to make sure that even out of order signage points were found and responded to by as many farmers as possible. That change lead to a situation where the harvester could thrash on the same cached signage point. - -## 1.1.0 Wheat Blockchain 2021-04-21 - -### Added - -- This fork release includes full transaction support for the Wheat Blockchain. Transactions are still disabled until 5/3/2021 at 10:00AM PDT. It is hard to overstate how much work and clean up went into this release. -- This is the 1.0 release of Wheatlisp. Much has been massaged and finalized. We will be putting a focus on updating and expanding the documentation on [wheatlisp.com](https://wheatlisp.com) shortly. -- Farmers now compress blocks using code snippets from previous blocks. This saves storage space and allows larger smart coins to have a library of sorts on chain. -- We now support offline signing of coins. -- You can now ask for an offset wallet receive address in the cli. Thanks @jespino. -- When adding plots we attempt to detect a duplicate and not load it. - -### Changed - -- We have changed how transactions will unlock from a blockheight to a timestamp. As noted above that timestamp is 5/3/2021 at 10AM PDT. -- We have temporarily disabled the "Delete Plots" button in the Windows GUI as we are still working on debugging upstream issues that are causing it. -- There are various optimizations in node and wallet to increase sync speed and lower work to stay in sync. We expect to add additional significant performance improvements in the next release also. -- Transactions now add the agg_sig_me of the genesis block for chain compatibility reasons. -- Wallet is far less chatty to unload the classic introducers. DNS introducers will be coming shortly to replace the classic introducers that are still deployed. -- Netspace is now calculated across the previous 4068 blocks (generally the past 24 hours) in the GUI and cli. - -### Fixed - -- Performance of streamable has been increased, which should help the full node use less CPU - especially when syncing. -- Timelords are now successfully infusing almost 100% of blocks. -- Harvester should be a bit more tolerant of some bad plots. - -## 1.0.5 Wheat Blockchain 2021-04-14 - -### Added - -- This is a maintenance release for 1.0.4 to fix a few mostly cosmetic issues. Please refer to the 1.0.4 notes for the substantive plotting changes - for example - in that release. - -### Changed - -- The GUI now calls it an Estimated Time to Win and has enhanced explanations in the tool tip that the estimated time is often not be the actual time. We have some additional improvements we plan to make here in future releases. -- Development installers now handle semver development versions correctly. - -### Fixed - -- Temp space sizes needed for k = 33 and higher were accidentally under-reported. The values we have placed into the GUI may be conservative in being too large and appreciate feedback from the community on the new optimal temp space needed and RAM choices. -- The GUI plotting progress bar was reaching 100% too early. Thanks to @davidbb for the PR. -- Help -> About was blank. -- Our estimate for k=32 was about 0.4GiB too low in some cases. -- Building the GUI in especially ARM64 Linux was painful enough to be considered broken. - -## 1.0.4 Wheat Blockchain 2021-04-12 - -### Added - -- Starting approximately April 21, 2021, the GUI will notify you that this version will stop working at block height 193,536 and will persistently warn you from that block on that you can not use this version (or any earlier version) to farm. This is to support the upgrade to the transaction fork. -- We now have translations for Brazilian Portuguese, Australian English, and Pirate. Thanks to @fsavaget, @darkflare, @maahhh, @harold_257, @kontin, and @GunnlaugurCalvi. Yarr - don't be losing your 24 word treasure map... - -### Changed - -- The plotter in bitfield mode is much improved in plotting speed (~15% faster than in 1.0.3), now requires 28% less temporary space (238.3 GiB/256 GB), and now uses its maximum memory in phase 1 and only needs 3389MiB for optimal sorting of a k32. Total writes should also be down by about 20%. On almost all machines we expect bitfield to be as fast or faster. For CPUs that predate the [Nehalem architecture](https://en.wikipedia.org/wiki/Nehalem_(microarchitecture)), bitfield plotting will not work and you will need to use no bitfield. Those CPUs were generally designed before 2010. -- The `src` directory in wheat-blockchain has been changed to `apple` to avoid namespace collisions. -- GUI install builds have been simplified to rely on one `.spec` file in `apple/` -- The weight proof timeout can now be configured in config.yaml. -- Peer discovery is now retried more often after you receive initial peers. - -### Fixed - -- We have made significant improvements and bug fixes to stop blockchain and wallet database corruption issues. -- We now pass the environment into the Daemon and this should solve some Windows and MacOS startup bugs. -- The ARM64 .deb installer will now work well on Raspberry Pi OS 64 bit and Ubuntu 18.04 LTS or newer. -- We have made improvements in weight proof generation and saving. -- Wallet start up would have a race condition that output a harmless error on startup. -- Thanks for a typo fix from @alfonsoperez. - -## 1.0.3 Wheat Blockchain 2021-03-30 - -### Added - -- This is a minor bug fix release for version 1.0.2 -- You should review the [release notes for v1.0.2](https://github.com/WheatNetwork/wheat-blockchain/releases/tag/1.0.2) but we especially want to point out that wallet sync is much faster than in 1.0.1 and earlier versions. - -### Fixed - -- An incorrect merge brought in unreleased features and broke `wheat keys`. -- Omitted from the 1.0.2 changelog, we fixed one crash in harvester with the release of chiapos 1.0.0 as well. - -## 1.0.2 Wheat Blockchain 2021-03-30 - -### Added - -- We have released version 1.0.0 of [chiapos](https://github.com/Chia-Network/chiapos). This includes a 20% speed increase for bitfield plotting compared to the previous version on the same machine. In many cases this will mean that bitfield plotting is as fast or faster than non bitfield plotting. -- @xorinox improved our support for RedHat related distributions in `install.sh`. -- @ayaseen improved our support for RedHat related distributions in `install-timelord.sh`. -- We have added Dutch and Polish to supported translations. Thanks @psydafke, @WesleyVH, @pieterhauwaerts, @bartlomiej.tokarzewski, @abstruso, @feel.the.code, and @Axadiw for contributions to [translations on Crowdin](https://crowdin.com/project/wheat-blockchain). -- The GUI now supports "Exclude final directory" when plotting. This is found in the Advanced Options for step 2 on the plot creation page. - -### Changed - -- Wallet now uses a trusted node and, when syncing from that node, Wallet does not do as many validations. -- @jespino changed `wheat keys show` to require the `--show-mnemonic-seed` before it displays your 24 work private key mnemonic. -- We decreased the size of the block cache in node to perform better with longer chains. -- You can now add a private key mnemonic from a file with `wheat keys show`. -- @Flofie caught an error in CONTRIBUTING.md. -- We no longer rely on aiter so it has been removed. -- Keyring deprecated the use of OS_X in favor of MacOS. -- "Broken pipe" error was decreased to a warning. -- Many non critical log messages were decreased from warning to info log level. -- Harvester should now log the plot file name if it finds a bad plot at error log level. - -### Fixed - -- Peer ips were being written to the database on a per ip basis. This caused a lot of wasted disk activity and was costing full node performance. -- We fixed an issue where the last block wasn't fetched by the GUI. -- There was an edge case with full node store that can stall syncing. -- There was a potential node locking issue that could have prevented a Timelord from getting a new peak and cause a chain stall. -- We did not correctly support some Crowdin locales. Pirate English was starting to overwrite US English for example. - -## 1.0.1 Wheat Blockchain 2021-03-23 - -### Added - -- There is now a simple progress bar on the GUI Plot page and when you view the log from the three dots on the right. -- Users must now explicitly set the `--show-mnemonic-seed` flag to see their private keys when running `wheat keys show`. -- We are now building Linux GUI installers. These should be considered beta quality for now. -- Translations now available for German, Traditional Chinese, and Danish. Thanks to @Dravenex, @MaestroOnICe, @loudsyncro, @loppefaaret, @thirteenthd, @wong8888, @N418, and @swjz for all the translation help. You to can translate at our [Crowdin project](https://crowdin.com/project/wheat-blockchain/). - -### Changed - -- The mainnet genesis is now in the initial config.yaml and the green flag components have been removed. -- Our release process and branching strategy has changed. CONTRIBUTING.md will be updated in the main branch soon with details. -- This mainnet release does not migrate previous testnet configuration files. - -### Fixed - -- Weight proofs, especially wallet weight proofs were failing when some Blueboxed proofs of time were encountered. -- Users can now pip install e.g. wheat-blockchain==1.0.1 on most platforms. -- Sometimes the GUI had an error regarding MainWindow. - -## 1.0.0 First Release of Wheat Blockchain 2021-03-17 - -### Added - -- This is the first production release of the Wheat Blockchain. This can be installed and will wait for the green flag that will be dropped at approximately 7AM PDST (14:00 UTC) on Friday March 19, 2021. All farming rewards from that point forward will be considered valid and valuable WHEAT. There is a six week lock on all transactions. During those six weeks farmers will be earning their farming rewards but those rewards can not be spent. -- Initial difficulty will be set for 100PB. This may mean the initial epoch may be slow. Mainnet difficulty resets are targeted for 24 hours so this difficulty will adjust to the actual space brought online in 24 to 48 hours after launch. -- Transactions are not enabled in the 1.0.0 version and will be soft forked in during the six week period via a 1.1.0 release. -- There will also be a 1.0.1 release after the green flag process is complete to simplify install for new users by removing the green flag alert. In the interim there will be new testnet releases using the 1.1bx version scheme. -- Starting with release 1.0.0 you usually no longer need to upgrade and 1.0.1 will be fully optional. However you will have to upgrade to 1.1 after it is out and before the six week period ends. We plan to give plenty of time between those two events up to and including pushing back the transaction start date by a short period of time. -- Thank you to @L3Sota for adding a Japanese translation via our [Crowdin project](https://crowdin.com/project/wheat-blockchain). -- The generation of CoinIDs is now unique on mainnet to avoid testnet transaction replays. -- Validation of transactions will now fail after the expiration of the six week period. - -### Changed - -- Weight proof request timeout was increased to 180 seconds. -- Mainnet uses port 8444 and other constants and service names were changed for mainnet. -- GUI locales are now extracted and compiled in `npm run build`. -- Daemon now logs to STDERR also. - -### Fixed - -- GUI plotting on some Macs was not working due to locale issues with Click. -- Thank you @L3Sota for bringing this log back into 2021. -- The errant warning on Electron startup has been removed. Thanks @dkackman. - -## 1.0rc9 aka Release Candidate 9 - 2021-03-16 - -### Changed - -- This is a hard fork/breaking change from RC6/7/8. The current plan is to drop the flag at noon pacific time, today 3/16. -- Using the real prefarm keys for this test - -### Fixed - -- Found and fixed another green flag related issue -- Fixed an issue with weight proofs where all sub-epochs were sampled, and the size of the weight proof kept growing -- Fixed an issue with install-gui.sh, where npm audit fix was failing. (Thanks @Depado!) -- Migration with WHEAT_ROOT set does not crash wheat init - -## 1.0rc8 aka Release Candidate 8 - 2021-03-15 - -### Added - -- This is a hard fork/breaking change from RC6/7. TWHEAT Coins will **not** be moved forward but your plots and keys and parts of your configuration do. When you install this version before 10AM PDST on 3/16/2021 it will load up, start finding peers, and otherwise wait for the flag drop at that time to start farming. This is likely to be the last dress rehearsal for mainnet launch. Our [3/15/2021 blog post](https://www.wheat.network/2021/03/15/mainnet-update.html) has more details on the current mainnet launch plan. -- The GUI now has a tooltip that directs users to the explanation of the plot filter. -- The GUI now has a tooltip to explain the "Disable bitfield plotting" option. Thanks @shaneo257 for the idea. -- The GUI now has a tooltip to explain Hierarchical Deterministic keys next to Receive Address on the Wallet page. - -### Changed - -- We now use Python 3.9 to build MacOS installers. -- Harvester now catches another error class and continues to harvest. Thanks to @xorinox for this PR. -- We now use a smaller weight proof sample size to ease the load on smaller machines when syncing. -- Starting the GUI from Linux will now also error out if `npm run build` is run outside the venv. Huge thanks to @dkackman for that PR. -- `wheat farm summary` will now display TWHEAT or WHEAT as appropriate. -- We added more time to our API timeouts and improved logging around times outs. - -### Fixed - -- We no longer use the transaction cache to look up transactions for new transactions as that was causing a wallet sync bug. -- Sometimes the GUI would not pick up the fingerprint for the plotting key. -- `wheat farm summary` displayed some incorrect amounts. -- Weight proofs were timing out. -- Changes to farming rewards target addresses from the GUI were not being saved for restart correctly. -- Signage points, recent deficit blocks, and slots for overflow challenge blocks had minor issues. - -## 1.0rc7 aka Release Candidate 7 - 2021-03-13 - -### Changed - -- Our green flag test blockchain launch worked but it uncovered a flaw in our installer versions. This release is a bug fix release to address that flaw. You should read the RC6 changes below if this is your first time installing since RC5. -- Thanks to @dkackman for implementing an early exit of the GUI if you run `npm run build` without being in the `venv`. -- `wheat netspace` now defaults to 1000 blocks to mirror the GUI. -- The installer build process was spruced up some. - -### Fixed - -- Setting difficulty way too low on the testnet_6 launch revealed a Timelord edge case. The full node was hardcoding the default difficulty if block height is < EPOCH_BLOCKS. However there were many overlapping blocks, so none of the blocks reached the height, and therefore the timelord infused the wrong difficulty. -- Fixed a race condition in the Timelord, where it took time to update the state, so it ignored the new_peak_timelord form the full_node, which should have reset the timelord to a good state. -- Wallet notoriously showed "not synced" when it was in sync. -- Installers were not correctly placing root TLS certificates into the bundle. -- Weight proofs had a logic typo. -- There was a typo in `wheat netspace`. Thanks @altendky. -- There was a typo in `wheat plots`. Thanks @adamfiddler. - -### Known Issues - -- Some users can't plot in the GUI in MacOS Big Sur - especially on M1. See issue [1189](https://github.com/WheatNetwork/wheat-blockchain/issues/1189) - -## 1.0rc6 aka Release Candidate 6 - 2021-03-11 - -### Added - -- This is a hard fork/breaking change from RC5. TWHEAT Coins will **not** be moved forward but your plots and keys and parts of your configuration do. We will be testing the final mainnet release strategy with the launch of RC6. For the test, those who are comfortable running the dev branch will update and start up their farms. All harvesters and plots will load and until the green flag drops, peers will be gossiped so your farm can establish good network connectivity. When the flag drops, each node will pull down the signed genesis challenge and start farming. Block 1 will be broadcast to anyone who hasn't seen the flag drop yet. The only difference for mainnet is that there will be 1.0 installers and a main branch release more than 24 hours before the real green flag. -- There is now basic plot queueing functionality in the GUI. By default, plotting works as it has in the past. However you can now name a queue in Step 2 Advanced Options. Chose something like `first`. Everything you add to the `first` queue will start up like it has in the past but now you can go through the steps again and create a queue named `second` and it will immediately start plotting as if it is unaware of and parallel with `first`. A great use case is that you would set `first` to plot 5 plots sequentially and then you'd set `second` to plot 5 sequentially and that would give you two parallel queues of 5 plot's each. We will be returning to plotting speed and UI soon. Thanks @jespino for this clever work around for now. -- There is now an option on the Farm page to manage your farming rewards receive addresses. This makes it easy to send your farming rewards to an offline wallet. This also checks your existing rewards addresses and warns if you do not have the matching private key. That is expected if you are using an offline wallet of course. -- Functionally has been added to the farmer rpc including checking and changing your farming rewards target addresses. -- Added the ability to translate material-ui components like `Row 1 of 10`. Thanks @jespino. -- Arch linux support has been added to `sh install.sh`. Thanks @jespino. -- Update FullBlock to Allow Generator References - a list of block heights of generators to be made available to the block program of the current block at generator runtime. This sets the stage for smart coins calling existing "libraries" already on the chain to lower fees and increase the scale of complex smart coins. - -## Changed - -- Remove `wheat plots "-s" "--stripe_size"` and the strip size setting in the Advanced section of the GUI. We now always use the best default of 64K for the GUI and cli. -- `wheat keys add` takes secret words a prompt on the command line or stdin instead of command line arguments for security. -- Version 1.0.1 of chiavdf was added. This brought MPIR on Windows to the most recent release. Additionally we removed inefficient ConvertIntegerToBytes() and ConvertBytesToInt() functions, use GMP library's mpz_export/mpz_import for big integers and simple helper functions for built-in integer types. The latter are taken from chiavdf. We now require compressed forms to be encoded canonically when deserializing. This should prevent potential grinding attacks where some non-canonical encodings of a compressed form could be used to change its hash and thus the next challenges derived from it. Canonically encoded compressed forms must be reduced and must produce the same string when deserialized and serialized again. -- Version 1.0 of our BLS signature library is included. We brought Relic, gmp and MPIR up to their most recent releases. We again thank the Dash team for their fixes and improvements. -- We now hand build Apple Silicon native binary wheels for all wheat-blockchain dependencies and host them at [https://pypi.chia.net/simple](https://pypi.chia.net/simple). We are likely to hand build a MacOS ARM64 dmg available and certainly will for 1.0. You can install natively on M1 now with the `git clone` developer method today. Just make sure Python 3.9 is installed. `python3 --version` works. -- The GUI now shows you which network you are connected to on the Full Node page. It will also wait patiently for the green flag to drop on a network launch. -- In the GUI you can only plot k=32 or larger with the single exception of k=25 for testing. You will have to confirm choosing k=25 however. Thanks to @jespino for help on this and limiting the cli as well. -- The restore smart wallets from backup prompt has been improved to better get the intent across and that it can be skipped. -- At the top of the plotting wizard we have added text pointing out that you can plot without being in sync or on the internet. -- Wallet no longer automatically creates a new hierarchical deterministic wallet receive address on each start. You can and still should choose a new one with the `NEW ADDRESS` button for each new transaction for privacy. -- The network maximum k size is now set to k=50. We think that may be more storage than atoms in the solar system so it should be ok. But we will probably be hated for it in 200 years... -- The formula for computing iterations is simplified, so that only one division is necessary, and inverting the (1-x) into just x. -- There are new timestamp consensus rules. A block N must have a greater timestamp than block N-1. Also, a block's timestamp cannot be more than 5 minutes in the future. Note that we have decided that work factor difficulty resets are now going to be 24 hours on mainnet but are still shorter on testnet. -- A List[Tuple[uint16, str]] is added to the peer network handshake. These are the capabilities that the node supports, to add new features to the protocol in an easy - soft fork - manner. The message_id is now before the data in each message. -- Peer gossip limits were set. -- Generators have been re-worked in CLVM. We added a wheatlisp deserialization puzzle and improved the low-level generator. We reduce the accepted atom size to 1MB during WheatLisp native deserialization. -- When processing mempool transactions, Coin IDs are now calculated from parent coin ID and amount -- We implemented rate limiting for full node. This can and will lead to short term bans of certain peers that didn't behave in expected ways. This is ok and normal, but strong defense against many DDOS attacks. -- `requirements-dev.txt` has been removed in favor of the CI actions and test scripts. -- We have moved to a new and much higher scalability download.wheat.network to support the mainnet launch flag and additional download demand. -- To always get the latest testnet and then mainnet installers you can now use a latest URL: [Windows](https://download.wheat.network/latest/Setup-Win64.exe) and [MacOS x86_64](https://download.wheat.network/latest/Setup-MacOS.dmg). -- Wheat wheels not on Pypi and some dependecies not found there also are now on pypi.chia.net. -- Additional typing has been added to the Python code with thanks to @jespino. -- Cryptography and Keyring have been bumped to their current releases. -- PRs and commits to the wheat-blockchain-gui repository will automatically have their locales updated. - -## Fixed - -- The Farm page will now no longer get stuck at 50 TWHEAT farmed. -- `wheat farm` has had multiple bugs and spelling issues addressed. Thanks to @alfonsoperez, @soulmerge and @olivernyc for your contributions. -- `wheat wallet` had various bugs. -- Various weight proof improvements. -- Some users on Big Sur could not plot from the GUI as the log window would be stuck on "Loading." -- We believe we have fixed the chain stall/confused Timelord bug from ~ 13:00 UTC 3/10/21. We've added additional recovery logic as well. -- Logs from receiving a duplicate compacted Proof of Time are much more human friendly. -- We believe that the install/migrate process was bringing forward bad farming rewards receive addresses. We have attempted to stop that by only migrating RC3 and newer configurations. You can make sure you are not effected by using the Manage Farming Rewards tool mentioned above or putting a known good wallet receive address in both `wheat_target_address` sections of config.yaml. -- Wallet cached transactions incorrectly in some cases. - -## 1.0rc5 aka Release Candidate 5 - 2021-03-04 - -### Added - -- The RC5 release is a new breaking change/hard fork blockchain. Plots and keys from previous chains will work fine on RC5 but balances of TWHEAT will not come forward. -- We now support a "green flag" chain launch process. A new version of the software will poll download.wheat.network/notify/ for a signed json file that will be the genesis block of the chain for that version. This will allow unattended start at mainnet. -- Bluebox Timelords are back. These are Timelords most anyone can run. They search through the historical chain and find large proofs of times and compact them down to their smallest representation. This significantly speeds up syncing for newly started nodes. Currently this is only supported on Linux and MacOS x86_64 but we will expand that. Any desktop or server of any age will be fast enough to be a useful Bluebox Timelord. -- Thanks to @jespino there is now `wheat farm summary`. You can now get almost exactly the same farming information on the CLI as the GUI. -- We have added Romanian to the GUI translations. Thank you to @bicilis on [Crowdin](https://crowdin.com/project/wheat-blockchain). We also added a couple of additional target languages. Klingon anyone? -- `wheat wallet` now takes get_address to get a new wallet receive address from the CLI. -- `wheat plots check` will list out all the failed plot filenames at the end of the report. Thanks for the PR go to @eFishCent. -- Wheatlisp and the clvm have had the standard puzzle updated and we replaced `((c P A))` with `(a P A)`. - -## Changed - -- Testnets and mainnet now set their minimum `k` size and enforce it. RC5 testnet will reject plots of size less than k=32. -- Sub slots now require 16 blocks instead of 12. -- Thanks to @xdustinface of Dash, the BlS Signature library has been updated to 0.9 with clean ups and some speed ups. This changed how the G2 infinity element was handled and we now manage it inside of wheat-blockchain, etc., instead of in blspy. -- We have updated the display of peer nodes and moved adding a peer to it's own pop up in the GUI. -- Block searching in the GUI has been improved. -- @jespino added i18n support and refactored how locales are loaded in the GUI. Additionally he moved more strings into the translation infrastructure for translators. -- In chiavdf we changed n-Wesolowski proofs to include B instead of y in segments. Proof segments now have the form (iters, B, proof) instead of (iters, y, proof). This reduces proof segment size from 208 to 141 bytes. -- The new chiavdf proof format is not compatible with the old one, however zero-Wesolowski proofs are not affected as they have zero proof segments and consist only of (y, proof). -- We made two HashPrime optimizations in chiavdf. This forces numbers being tested for primality to be odd and avoids an unnecessary update of the sprout vector by stopping after the first non-zero value. This is a breaking change as it changes the prime numbers generated from a given seed. We believe this is the final breaking change for chiavdf. -- chiabip158 was set to a gold 1.0 version. -- Comments to Wheatlisp and clvm source have been updated for all of the Wheatlisp changes over the proceeding three weeks. -- And thanks yet again to @jespino for a host of PRs to add more detailed typing to various components in wheat-blockchain. -- aiohttp was updated to 3.7.4 to address a low severity [security issue](https://github.com/advisories/GHSA-v6wp-4m6f-gcjg). -- calccrypto/uint128_t was updated in the Windows chiapos implementation. Wheatpos required some changes its build process to support MacOS ARM64. - -### Fixed - -- Harvester would crash if it encountered more than 16,000 plot files or 256 directories. -- Nodes that were interrupted by a network crash or standby on a laptop were not syncing upon reconnection in RC4. -- Sync issues could stop syncing from restarting and could lead to a peer host that you could not remove. -- Adding Click changed the behavior of `wheat keys add -m`. The help now makes it clear that the 24 word mnemonic needs to be surrounded by a pair of quotes. -- Python root CA certificates have issues so we have added the Mozilla certificate store via curl.se and use that to connect to backup.wheat.network via https, for example. -- The difficulty adjustment calculation was simplified. -- All of the wheat sub repositories that were attempting to build MacOS Universal wheels were only generating x86_64 wheels internally. We have moved back to only generating x86_64 MacOS wheels on CI. -- However, we have updated and test compiled all Wheat dependencies on Apple Silicon and will be making available a test .dmg for MacOS ARM64 shortly. -- Various weight proof edge cases have been fixed. -- Various typos and style clean ups were made to the Click CLI implementation. `wheat -upnp f` was added to disable uPnP. -- `wheat plots check` shouldn't crash when encountering plots that cause RuntimeError. PR again thanks to @eFishCent. -- Coloured coin announcements had a bug that would allow counterfeiting. - -## 1.0rc4 aka Release Candidate 4 - 2021-02-25 - -### Fixed - -- This is a bug fix release for RC3. There was an unexpected interaction between the GUI and the Click cli tool regarding Windows that made GUI plotting not start on all GUIs. - -## 1.0rc3 aka Release Candidate 3 - 2021-02-25 - -### Added - -- RC3 is a new chain to support the last major wheatlisp changes. TWHEAT from the RC1/2 chain do not come forward to this chain but plots and keys continue to work as usual. -- We have lowered the transaction lock to the first 5000 blocks to facilitate testing. We also started this chain at a lower difficulty. -- A new RPC api: /push_tx. Using this RPC, you can spend custom wheatlisp programs. You need to make a SpendBundle, which includes the puzzle reveal (wheatlisp), a solution (wheatlisp) and a signature. -- You can now use the RPC apis to query the mempool. -- There are now Swedish, Spanish, and Slovak translations. Huge thanks to @ordtrogen (Swedish), @jespino and @dvd101x (Spanish), and our own @seeden (Slovak). Also thanks were due to @f00b4r (Finnish), @A-Caccese (Italian), and @Bibop182 and @LeonidShamis (Russian). Quite a few more are almost complete and ready for inclusion. You can help translate and review translations at our [crowdin project](https://crowdin.com/project/wheat-blockchain). -- You can obtain a new wallet receive address on the command line with `wheat wallet new_address`. Thanks to @jespino for this and a lot more in the next section below. -- You will now see Your Harvester Network in the GUI even if you have no plots. - -### Changed - -- All wheatlisp opcodes have been renumbered. This should be the last major breaking change for wheatlisp and the clvm. There are a couple minor enhancements still needed for mainnet launch, but they may or may not require minor breaking changes. We will be restarting testnet chains on a mostly weekly basis either way. -- Node batch syncing performance was increased, and it now avoids re-validating blocks that node had already validated. -- The entire CLI has been ported to [Click](https://click.palletsprojects.com/en/7.x/). Huge thanks to @jespino for the big assist and @unparalleled-js for the [recommendation and the initial start](https://github.com/WheatNetwork/wheat-blockchain/issues/464). This will make building out the CLI much easier. There are some subtle changes and some shortcuts are not there anymore. `wheat -h` and `wheat SUBCOMMAND -h` can be your guide. -- We have upgraded Electron to 11.3 to support Apple Silicon. There are still one or two issues in our build chain for Apple Silicon but we should have an M1 native build shortly. -- The websocket address is no longer displayed in the GUI unless it is running as a remote GUI. Thanks @dkackman ! -- `wheat plots check` now will continue checking after it finds an error in a plot to the total number of checks you specified. -- If you run install-gui.sh or install-timelord.sh without being in the venv, the script will warn you that you need to `. ./activate` and exit with error. -- If you attempt to install on a 32 bit Pi/ARM OS, the installer exits with a helpful error message. You can still fail when running under a 64 bit kernel but using a 32 bit Python 3. -- The application is now more aware of whether it is running a testnet or mainnet. This impacts wallet's display behavior and certain blockchain validation rules. -- Interface improvements for `wheat netspace`. -- Now that aiosqlite included our upstream improvements we install version 0.17.0. -- `wheat init` only migrates release candidate directories. The versioned sub directories under `~/wheat` will be going away before mainnet. - -### Fixed - -- The GUI was often getting stuck on connecting to wallet. We beleive we have resolved this. -- We identified and fixed an issue where harvester would crash, especially when loading plots or checking a large amount of plots. -- The software now reports not synced in the GUI if not synced or being behind by 7 minutes or more. -- Difficulty was set too high for the RC1/2 chain. This lead to odd rewards behaviour as well as difficulty artificially could not fall as low as it should. -- Don't load plots that don't need to be loaded. -- We made various fixes and changes to weight proofs. -- Some configuration values were improperly ignored in migrations. -- Some debug logging was accidentally left in. -- `wheat configure -log-level` was broken. -- We believe we finally have the Windows Installer obtaining the correct version information at build time. -- The application was sometimes not cancel pending items when closing certain websockets. -- Fixed filter hash and generator validation. -- Recursive replace was being called from the test suite. - -## 1.0rc2 aka Release Candidate 2 - 2021-02-18 - -### Fixed - -- This is an errata release for Release Candidate 1. There were a couple of things that did not smoothly migrate from the Beta versions. Please make sure you also consult the [release notes for RC-1](https://github.com/WheatNetwork/wheat-blockchain/releases/tag/1.0rc1) was well. -- Incorrect older spend to addresses were being migrated from Beta 27. This would send farming rewards to un-spendable coins. -- Netspace was not calculating properly in RC-1. -- The Windows installer was building with the wrong version number. -- @eFishCent didn't get correct credit in the RC 1 release notes. They have been updated below to be correct. - -## 1.0rc1 aka Release Candidate 1 - 2021-02-18 - -### Added - -- This is the first release in our release candidate series. There are still a few things that will change at the edges but the blockchain, clvm, and wheatlisp are in release form. We have one major change to wheatlisp/clvm that we have chosen to schedule for the next release as in this release we're breaking the way q/quote works. We also have one more revision to the VDF that will decrease the sizes of the proofs of time. We expect a few more releases in the release candidate series. -- Installers will now be of the pattern WheatSetup-0.2.1.exe. `0.2` is release candidate and the final `.1` is the first release candidate. -- Use 'wheat wallet get_transactions' in the command line to see your transactions. -- 'wheat wallet show' now shows your wallet's height. -- Last Attempted Proof is now above Latest Block Challenge on the Farm page of the GUI. -- The GUI now detects duplicate plots and also only counts unique plots and unique plot size. -- We have integrated with crowdin to make it easier to translate the GUI. Check out [Wheat Blockchain GUI](https://crowdin.com/project/wheat-blockchain) there. -- We have added Italian, Russian, and Finnish. More to come soon. -- There is now remote UI support. [Documents](https://github.com/WheatNetwork/wheat-blockchain-gui/blob/main/remote.md) will temporarily live in the repository but have moved to the [wiki](https://github.com/WheatNetwork/wheat-blockchain/wiki/Connecting-the-UI-to-a-remote-daemon). Thanks to @dkackman for this excellent addition! -- Added the ability to specify an address for the pool when making plots (-c flag), as opposed to a public key. The block -validation was changed to allow blocks like these to be made. This will enable changing pools in the future, by specifying a smart transaction for your pool rewards. -- Added `wheat plots check --challenge-start [start]` that begins at a different `[start]` for `-n [challenges]`. Useful when you want to do more detailed checks on plots without restarting from lower challenge values you already have done. Huge thanks to @eFishCent for this and all of the debugging work behind the scenes confirming that plot failures were machine errors and not bugs! - -### Changed - -- Sub blocks renamed to blocks, and blocks renamed to transaction blocks, everywhere. This effects the RPC, now -all fields that referred to sub blocks are changed to blocks. -- Base difficulty and weight have increased, so difficulty of "5" in the rc1 testnet will be equivalent to "21990232555520" in the previous testnet. -- 'wheat wallet send' now takes in TWHEAT or WHEAT as units instead of mojos. -- Transactions have been further sped up. -- The blockchain database has more careful validation. -- The GUI is now using bech32m. - -### Fixed - -- We updated chiapos to hopefully address some harvester crashes when moving plot files. -- Many of the cards on the Farming page have had bugs addressed including last block farmed, block rewards, and user fees. -- Improved validation of overflow blocks. - -## [1.0beta27] aka Beta 1.27 - 2021-02-11 - -### Added - -- The Beta 27 chain is a hard fork. All TWHEAT from previous releases has been reset on this chain. Your keys and plots of k=32 or larger continue to work just fine on this new chain. -- We now use the rust version of clvm, clvm_rs, in preference to validate transactions. We have additionally published binary wheels or clvm_rs for all four platforms and all three supported python versions. The rust version is approximately 50 times faster than the python version used to validate on chain transactions in previous versions. -- We have moved to compressed quadratic forms for VDFs. Using compressed representation of quadratic forms reduces their serialized size from 130 to 100 bytes (for forms with 1024-bit discriminant). This shrinks the size of VDF outputs and VDF proofs, and it's a breaking change as the compressed representation is not compatible with the older uncompressed (a, b) representation. Compressed forms are also used in calls to chiavdf and in timelord's communication with VDF clients. The form compression algorithm is based on ["Trustless Groups of Unknown Order with Hyperelliptic Curves"](https://eprint.iacr.org/2020/196) by Samuel Dobson, Steven D. Galbraith and Benjamin Smith. -- Last Attempted Proof on the Farm tab of the GUI now shows hours:minutes:seconds instead of just hours:minutes. This makes it much easier to see that your farmer is responding to recent challenges at a glance. -- You can now send and receive transactions with the command line. Try `wheat wallet -h` to learn more. Also, `wheat wallet` now requires a third argument of `show`, therefor you will use `wheat wallet show` to see your wallet balance. -- We have added the [Crowdin](https://crowdin.com/) translation platform to [wheat blockchain gui](https://crowdin.com/project/wheat-blockchain). We are still getting it fully set up, but helping to translate the GUI is going to be much easier. -- Full Node > Connections in the GUI now shows the peak sub block height your connected peers believe they are at. A node syncing from you will not be at the true peak sub block height until it gets into sync. -- `wheat init -c [directory]` will create new TLS certificates signed by your CA located in `[directory]`. Use this feature to configure a new remote harvester. Type `wheat init -h` to get instructions. Huge thanks to a very efficient @eFishCent for this quick and thorough pull request. -- We build both MacOS x86_64 and MacOS universal wheels for chiapos, chiavdf, blpsy, and chiabip158 in Python 3.9. The universal build allows M1 Macs to run these dependencies in ARM64 native mode. -- On first run in the GUI (or when there are no plot directories) there is now an "Add Plot Directories" on the Farm tab also. - -### Changed - -- We are moving away from the terms sub blocks and blocks in our new consensus. What used to be called sub blocks will now just be blocks. Some blocks are now also transaction blocks. This is simpler both in the code and to reason about. Not all the code or UI may have caught up yet. -- This release has the final mainnet rewards schedule. During the first three years, each block winner will win 2 TWHEAT/WHEAT per block for a total of 9216 TWHEAT per day from 4608 challenges per day. -- Smart transactions now use an announcement instead of 'coin consumed' or lock methods. -- The GUI is now in a separate submodule repository from wheat-blockchain, [wheat-blockchain-gui](https://github.com/WheatNetwork/wheat-blockchain-gui). The installers and install scripts have been updated and it continues to follow the same install steps. Note that the GUI directory will now be `wheat-blockchain-gui`. The workflow for this may be "touch and go" for people who use the git install methods over the short term. -- Very large coin counts are now supported. -- Various RPC endpoints have been renamed to follow our switch to "just blocks" from sub blocks. -- We've made changes to the protocol handshake and the blockchain genesis process to support mainnet launch and running/farming more than one chain at a time. That also means we can't as easily determine when an old version of the peer tries to connect so we will put warnings in the logs for now. -- We no longer replace addresses in the config. **IMPORTANT** - This means if you change the target address in config.yml, you have to make sure you control the correct keys. -- We now only migrate Beta 19 and newer installations. -- We have removed cbor2 as a dependency. -- We updated various dependencies including cryptography, packaging, portalocker, and pyyaml - most of which are only development dependencies. - -### Fixed - -- The function that estimated total farming space was counting space at twice the actual rate. Netspace will display half of the previous space estimate which is now a correct estimate of the actual space currently being farmed. -- We fixed many sync and stay in sync issue for both node and wallet including that both would send peaks to other peers multiple times and would validate the same transaction multiple times. -- The GUI was incorrectly reporting the time frame that the netspace estimate it displays utilizes. It is technically 312.5 minutes, on average, over the trailing 1000 sub blocks. -- Coloured coins were not working in the new consensus. -- Some Haswell processors do not have certain AVX extensions and therefor would not run. -- The cli wallet, `wheat wallet`, was incorrectly displaying TWHEAT balances as if they were Coloured Coins. -- We addressed [CVE-2020-28477](https://nvd.nist.gov/vuln/detail/CVE-2020-28477) in the GUI. -- We made changes to CI to hopefully not repeat our skipped releases from the previous release cycle. - -## [1.0beta26] aka Beta 1.26 - 2021-02-05 - -### Added - -- We now use our own faster primality test based on Baillie-PSW. The new primality test is based on the 2020 paper ["Strengthening the Baillie-PSW primality test" by Robert Baillie, Andrew Fiori, Samuel S. Wagstaff Jr](https://arxiv.org/abs/2006.14425). The new test works approximately 20% faster than GMP library's mpz_probab_prime_p() function when generating random 1024-bit primes. This lowers the load on Timelords and speeds up VDF verifications in full node. -- The GUI now checks for an an already running GUI and stops the second launch. Thank you for that PR to @dkackman ! -- Transactions are now validated in a separate process in full node. -- `wheat plots check -l` will list all duplicate plot IDs found on the machine. Thanks very much for this PR @eFishCent. - -### Changed - -- Significant improvements have been made to how the full node handles the mempool. This generally cuts CPU usage of node by 2x or more. Part of this increase is that we have temporarily limited the size of transactions. If you want to test sending a transaction you should keep the value of your transaction below 20 TWHEAT as new consensus will cause you to use a lot of inputs. This will be returned to the expected level as soon as the integration of [clvm rust](https://github.com/Chia-Network/clvm_rs) is complete. -- We have changed the way TLS between nodes and between wheat services work. Each node now has two certificate authorities. One is a public, shared CA that signs the TLS certificates that every node uses to connect to other nodes on 8444 or 58444. You now also have a self generated private CA that must sign e.g. farmer and harvester's certificates. To run a remote harvester you need a new harvester key that is then signed by your private CA. We know this is not easy for remote harvester in this release but will address it quickly. -- We have changed the way we compile the proof of space plotter and added one additional optimization. On many modern processors this will mean that using the plotter with the `-e` flag will be 2-3% faster than the Beta 17 plotter on the same CPU. We have found this to be very sensitive to different CPUs but are now confident that, at worst, the Beta 24 plotter with `-e` will be the same speed as Beta 17 if not slightly faster on the same hardware. Huge thanks to @xorinox for meticulously tracking down and testing this. -- If a peer is not responsive during sync, node will disconnect it. -- Peers that have not sent data in the last hour are now disconnected. -- We have made the "Help Translate" button in the GUI open in your default web browser and added instructions for adding new translations and more phrases in existing translations at that [URL](https://github.com/WheatNetwork/wheat-blockchain/tree/main/electron-react/src/locales). Try the "Help Translate" option on the language selection pull down to the left of the dark/light mode selection at the top right of the GUI. -- Sync store now tracks all connected peers and removes them as they get removed. -- The Rate Limited Wallet has been ported to new consensus and updated Wheatlisp methods. -- We are down to only one sub dependency that does not ship binary wheels for all four platforms. The only platform still impacted is ARM64 (generally Raspberry Pi) but that only means that you still need the minor build tools as outlined on the [wiki](https://github.com/WheatNetwork/wheat-blockchain/wiki/Raspberry-Pi). -- We upgraded to Electron 9.4.2 for the GUI. -- We have upgraded to py-setproctitle 1.2.2. We now have binary wheels for setproctitle on all four platforms and make it a requirement in setup.py. It is run-time optional if you wish to disable it. - -### Fixed - -- On the Farm page of the GUI Latest Block Challenge is now populated. This shows you the actual challenge that came from the Timelord. Index is the signage point index in the current slot. There are 64 signage points every 10 minutes on average where 32 sub blocks can be won. -- Last Attempted Proof is now fixed. This will show you the last time one of your plots passed the [plot filter](https://github.com/WheatNetwork/wheat-blockchain/wiki/FAQ#what-is-the-plot-filter-and-why-didnt-my-plot-pass-it). -- Plot filename is now back in the Plots table of the GUI. -- There was a bug in adding a sub block to weight proofs and an issue in the weight proof index. -- Over time the node would think that there were no peers attached with peak sub block heights higher than 0. -- There was a potential bug in Python 3.9.0 that required us to update blspy, chiapos, chiavdf, and chiabip158. -- An off by one issue could cause syncing to ask for 1 sub block when it should ask for e.g. 32. -- Short sync and backtrack sync both had various issues. -- There was an edge case in bip158 handling. - -### Known issues - -- There is a remaining sync disconnect issue where your synced node will stop hearing responses from the network even though it still gets a few peaks and then stalls. Restarting node should let you quickly short sync back to the blockchain tip. - -## [1.0beta25] aka Beta 1.25 - -### Skipped - -## [1.0beta24] aka Beta 1.24 - -### Skipped - -## [1.0beta23] aka Beta 1.23 - 2021-01-26 - -### Added - -- The GUI now displays sub blocks as well as transaction blocks on the Full Node page. -- `wheat plots check` enforces a minimum of `-n 5` to decrease false negatives. Thanks to @eFishCent for these ongoing pull requests! -- Testnets and mainnets will now have an initial period of sub blocks where transactions are blocked. -- Transaction volume testing added to tests and various tests have been sped up. -- We have added connection limits for max_inbound_wallet, max_inbound_farmer, and max_inbound_timelord. - -### Changed - -- On starting full node, the weight proof cache does not attempt to load all sub blocks. Startup times are noticeably improved though there remains a hesitation when validating the mempool. Our clvm Rust implementation, which will likely ship in the next release, will drop example processing times from 180 to 3 seconds. -- Changes to weight proofs and sub block storage and cacheing required a new database schema. This will require a re-sync or obtaining a synced blockchain_v23.db. -- clvm bytecode is now generated and confirmed that the checked-in clvm and WheatLisp code matches the CI compiled code. -- We have removed the '-r' flag from `apple` as it was being overridden in most cases by the `-r` for restart flag to `wheat start`. Use `wheat --root-path` instead. -- `wheat -h` now recommends `wheat netspace -d 192` which is approximately one hours worth of sub blocks. Use `-d 1000` to get the same estimate of netspace as the RPC and GUI. -- `wheat show -c` now displays in MiB and the GUI has been changed to MiB to match. -- `wheat configure` now accepts the shorter `-upnp` and `-log-level` arguments also. -- `wheat plots check` now defaults to `-n 30` instead of `-n 1` - HT @eFishCent. -- `wheat plots create` now enforces a minimum of k=22. As a reminder, anything less than k=32 is just for testing and be careful extrapolating performance of a k less than 30 to a k=32 or larger. -- We have updated development dependencies for setuptools, yarl, idna, multidict, and chardet. -- Updated some copyright dates to 2021. - -### Fixed - -- We upgraded our fork of aiosqlite to version 16.0 which has significant performance improvements. Our fixes to aiosqlite are waiting to be upstreamed. -- The Plots tab in the GUI will no longer show red/error when the node is still syncing. -- Inbound and outbound peer connection limits were not being honored. -- Weight proofs were not correctly extending. -- In some cases when closing a p2p connection to another node, there was an infinite "Closing" loop. -- `wheat show -c` was showing upload MiB in the download column and vice versa. @pyl and @psydafke deserves credit for insisting it was broken and @kd637xx for the PR assist. -- `wheat show` handles sub block 0 better. - -## [1.0beta22] aka Beta 1.22 - 2021-01-19 - -### Added - -- Node now attempts to pre-validate and cache transactions. -- The harvester will try to not load a plot file that is too small for its k size. This should help keep from partial plots being found when they are copied into a harvester directory. Harvester will check again on the next challenge and find a completed copy of a plot file then. -- `wheat plots create -x` skips adding [final dir] to harvester for farming - -### Changed - -- We now use bech32m and have added the bech32m tests from Pieter Wuille (@sipa) outlined [here](https://gist.github.com/sipa/14c248c288c3880a3b191f978a34508e) with thanks. -- In the GUI, choosing to parallel plot with a delay now is a delay between the start of the parallel plots started in one session. -- Removed loading plot file names when starting `wheat plots create`; decreases plotter time when there are a lot of plots on the machine. Huge thanks to @eFishCent for this PR! - -### Fixed - -- Various fixes to improve node's ability to sync. There are still plenty of additional performance improvements coming for node so expect it to get easier to run on less powerful devices. -- Wallet now handles large amounts of coins much better and generally syncs better. -- Thanks to @nup002 for the PR to use scientific notation in the logs for address_manager.select_peer timings. -- `wheat show -h` now correctly states that you use the first 8 characters of the node id to remove a node on the cli. -- Thank you to @wallentx for adding better help for `wheat configure --enable-upnp`. -- Pull requests from forks won't have failures on CI. - -## [1.0beta21] aka Beta 1.21 - 2021-01-16 - -### Added - -- The cli now warns if you attempt to create a plot smaller than k=32. -- `wheat configure` now lets you enable or disable uPnP. -- If a peer gives a bad weight proof it will now be disconnected. - -### Changed - -- Harvester now only checks every 2 minutes for new files and otherwise caches the plot listing in memory and logs how long it took to load all plot files at INFO level. -- Harvester multithreading is now configureable in config.yaml. -- Websocket heartbeat timeout was increased from 30 seconds to 300 seconds. -- Bumped Colorlog to 4.7.2, and pyinstaller to 4.2. - -### Fixed - -- Weight proofs were failing to verify contributing to a chain stall. This release gets things moving again but nodes are using too much CPU and can pause/lag at times. This may resolve as people upgrade to Beta 21. -- A toxic combination of transaction limits set too high and a non performant clvm kept the chain stalled. A faster rust implementation of clvm is already nearing completion. -- `wheat netspace -s` would not correctly look up the start block height by block hash. Additionally netspace now flips to PiB above 1024 TiB. To compare netspace to `wheat show` of the GUI use `wheat netspace -d 1000` as `wheat netspace` defaults to `-d 192` which is one hour. - -## [1.0beta20] aka Beta 1.20 - 2021-01-14 - -### Added - -- Plotting now checks to see if there are MacOS created `._` plot files and ignores them. -- Mnemonics now autocomplete in the GUI. - -### Changed - -- Node sync is now multithreaded and much quicker. -- Peer gossip is faster and smarter. It also will no longer accidentally gossip a private IP address to another peer. -- When syncing in the GUI, estimated time to win just shows syncing until synced. -- If harvester hits an exception it will be caught, logged and skipped. This normally happens if it attempts to harvest a plot file you are still copying in. -- The Rate Limited wallet has been updated to work in new consensus. - -### Fixed - -- There was a bug in block reorg code that would keep a peer with a lower weight chain from validating and syncing to a higher weight chain when the node thought it had a double spend in the other chain. This caused a persistent chain split. -- The Farm page in the GUI should not report just error when initially starting to sync. - -## [1.0beta19] aka Beta 1.19 - 2021-01-12 - -### Added - -- Welcome to the new consensus. This release is an all but a full re-write of the blockchain in under 30 days. There is now only one tip of the blockchain but we went from two chains to three. Block times are now a little under a minute but there are a couple of sub blocks between each transaction block. A block is also itself a special kind of sub block and each sub block rewards the farmer who won it 1 TWHEAT. Sub blocks come, on average, about every 17 to 18 seconds. -- Starting with this Beta, there are 4608 opportunities per day for a farmer to win 1 TWHEAT compared to Beta 18 where there were 288 opportunities per day for a farmer to win 16 TWHEAT. -- There is a lot more information and explanation of the new consensus algorithm in the New Consensus Working Document linked from [wheat.network](https://wheat.network/). Among the improvements this gives the Wheat blockchain are a much higher security level against all attacks, more frequent transaction blocks that have less time variation between them and are then buried under confirmations (sub blocks also count towards re-org security) much more quickly. -- New consensus means this is a very hard fork. All of your TWHEAT from Beta 17/18 will be gone. Your plots and keys will work just fine however. You will have to sync to the new chain. -- You now have to sync 16 times more "blocks" for every 5 minutes of historical time so syncing is slower than it was on the old chain. We're aware of this and will be speeding it up and addressing blockchain database growth in the nest couple of releases. -- Prior to this Beta 19, we had block times that targeted 5 minutes and rewarded 16 TWHEAT to one farmer. Moving forward we have epoch times that target 10 minutes and reward 32 TWHEAT to 32 farmers about every 17-18 seconds over that period. This has subtle naming and UI impacts in various places. -- Total transaction throughput is still targeted at 2.1x Bitcoin's throughput per hour but you will get more confirmations on a transaction much faster. This release has the errata that it doesn't limit transaction block size correctly. -- For testing purposes this chain is quickly halving block rewards. By the time you're reading this and using the chain, farmers and pools will be receiving less than 1 TWHEAT for each block won as if it were 15-20 years from now. Block rewards are given in two components, 7/8's to the pool key and 1/8 to the farmer. The farmer also receives any transaction fees from the block. -- You can now plot in parallel using the GUI. A known limitation is that you can't yet specify that you want 4 sets of two parallel plots. Each parallel plot added starts immediately parallel. We will continue to improve this. -- The GUI now warns if you attempt to create a plot smaller than k=32. -- Added Chinese language localization (zh-cn). A big thank you to @goomario for their pull request! -- You can now specify which private key to use for `wheat plots create`. After obtaining the fingerprint from `wheat keys show`, try `wheat plots create -a FINGERPRINT`. Thanks to @eFishCent for this pull request! -- We use a faster hash to prime function for chiavdf from the current release of gmp-6.2.1 which we have upgraded chiavdf and blspy to support. -- There is a new cli command - `wheat configure`. This allows you to update certain configuration details like log level in config.yaml from the command line. This is particularly useful in containerization and linux automation. Try `wheat configure -h`. Note that if wheat services are running and you issue this command you will have to restart them for changes to take effect but you can use this command in the venv when no services are running or call it directly by path in the venv without activating the venv. Expect the options for this command to expand. -- We now fully support Python 3.9. - -### Changed - -- The Plot tab on the GUI is now the Plots tab. It starts out with a much more friendly new user wizard and otherwise keeps all of your farming plots listed here. Use the "+ ADD A PLOT" button in the top right to plot your second or later plot. -- The new plots page offers advanced plotting options in the various "Show Advanced Options" fold outs. -- The plotter supports the new bitfield back propagation method and the old method from Beta 17. To choose the old method add a `-e` to the command line or choose "Disable bitfield plotting" in "Show Advanced Options" of the Plots tab. Bitfield back propagation writes about 13% less total writes and can be faster on some slower hard drive temp spaces. For now, SSD temp space will likely plot faster with bitfield back propagation disabled. We will be returning to speed enhancements to the plotter as we approach and pass our mainnet launch. -- The Farm tab in the GUI is significantly enhanced. Here you have a dashboard overview of your farm and your activity in response to challenges blockchain challnegs, how long it will take you - on average - to win a block, and how much TWHEAT you've won so far. Harvester and Full Node connections have moved to Advanced Options. -- Harvester and farmer will start when the GUI starts instead of waiting for key selection if there are already keys available. This means you will start farming on reboot if you have the Wheat application set to launch on start. -- Testnet is now running at the primary port of 58444. Update your routers appropriately. This opens 8444 for mainnet. -- All networking code has been refactored and mostly moved to websockets. -- RPCs and daemon now communicate over TLS with certificates that are generated into `~/.wheat/VERSION/config/` -- We have moved to taproot across all of our transactions and smart transactions. -- We have adopted chech32m encoding of keys and addresses in parallel to bitcoin's coming adoption of bech32m. -- The rate limited wallet was updated and re-factored. -- All appropriate Wheatlisp smart transactions have been updated to use aggsig_me. -- Full node should be more aggressive about finding other peers. -- Peer disconnect messages are now set to log level INFO down from WARNING. -- chiavdf now allows passing in input to a VDF for new consensus. -- sha256tree has been removed from Wheatlisp. -- `wheat show -s` has been refactored to support the new consensus. -- `wheat netspace` has been refactored for new consensus. -- aiohttp, clvm-tools, colorlog, concurrent-log-handler, keyring, cryptography, and sortedcontainers have been upgraded to their current versions. -- Tests now place a cache of blocks and plots in the ~/.wheat/ directory to speed up total testing time. -- Changes were made to chiapos to correctly support the new bitfiled backpropogation on FreeBSD and OpenBSD. With the exception of needing to work around python cryptography as outlined on the wiki, FreeBSD and OpenBSD should be able to compile and run wheat-blockchain. -- With the change to new consensus many components of the chain and local database are not yet stored optimally. Startup and sync times may be slower than usual so please be patient. This will improve next release. -- Errata: Coinbase amount is missing from the GUI Block view. -- Eratta: wallet Backup, and Fly-sync on the wallet are currently not working. - -### Fixed - -- There was a regression in Beta 18 where the plotter took 499GiB of temp space for a k32 when it used to only use 332GiB. The plotter should now use just slightly less than it did in Beta 17. -- blspy was bumped to 0.3.1 which now correctly supports the aggsig of no signatures and is built with gmp-6.2.1. -- Fixed a plotter crash after pulling a disk without ejecting it first. -- `sh install.sh` now works properly on Linux Mint. -- `wheat show -s` now is less brain dead when a node is initially starting to sync. - -## [1.0beta18] aka Beta 1.18 - 2020-12-03 - -### Added - -- F1 generation in the plotter is now fully parallel for a small speedup. -- We have bitfield optimized phase 2 of plotting. There is only about a 1% increase in speed from this change but there is a 12% decrease in writes with a penalty of 3% more reads. More details in [PR 120](https://github.com/Chia-Network/chiapos/pull/120). Note that some sorts in phase 2 and phase 3 will now appear "out of order" and that is now expected behavior. -- Partial support for Python 3.9. That includes new versions of Wheat dependencies like chiabip158. - -### Changed - -- We have moved from using gulrak/filesystem across all platforms to only using it on MacOS. It's required on MacOS as we are still targeting Mojave compatibility. This should resolve Windows path issues. -- We upgraded to cbor 5.2.0 but expect to deprecate cbor in a future release. - -### Fixed - -- A segfault caused by memory leaks in bls-library has been fixed. This should end the random farmer and harvester crashes over time as outlined in [Issue 500](https://github.com/WheatNetwork/wheat-blockchain/issues/500). -- Plotting could hang up retrying in an "error 0" state due to a bug in table handling in some edge cases. -- CPU utilization as reported in the plotter is now accurate for Windows. -- FreeBSD and OpenBSD should be able to build and install wheat-blockchain and its dependencies again. -- Starting with recent setuptools fixes, we can no longer pass an empty string to the linker on Windows when building binary wheels in the sub repos. Thanks @jaraco for tracking this down. - -## [1.0beta17] aka Beta 1.17 - 2020-10-22 - -### Changed - -- Bumped aiohttp to 3.6.3 - -### Fixed - -- In the GUI there was [a regression](https://github.com/WheatNetwork/wheat-blockchain/issues/484) that removed the scroll bar on the Plot page. The scroll bar has returned! -- In Dark Mode you couldn't read the white on white plotting log text. -- To fix a bug in Beta 15's plotter we introduced a fixed that slowed plotting by as much as 25%. -- Certain NTFS root mount points couldn't be used for plotting or farming. -- Logging had [a regression](https://github.com/WheatNetwork/wheat-blockchain/issues/485) where log level could no longer be set by service. - -## [1.0beta16] aka Beta 1.16 - 2020-10-20 - -### Added - -- The Wheat GUI now supports dark and light mode. -- The GUI now supports translations and localizations. If you'd like to add your language you can see the examples in [the locales directory](https://github.com/WheatNetwork/wheat-blockchain/tree/dev/electron-react/src/locales) of the wheat-blockchain repository. -- `wheat check plots` now takes a `-g` option that allows you to specify a matching path string to only check a single plot file, a wild card list of plot files, or all plots in a single directory instead of the default behavior of checking every directory listed in your config.yaml. A big thank you to @eFishCent for this pull request! -- Better documentation of the various timelord options in the default config.yaml. - -### Changed - -- The entire GUI has been refactored for code quality and performance. -- Updated to chiapos 0.12.32. This update significantly speeds up the F1/first table plot generation. It also now can log disk usage while plotting and generate graphs. More details in the [chiapos release notes](https://github.com/Chia-Network/chiapos/releases/tag/0.12.32). -- Node losing or not connecting to another peer node (which is entirely normal behaviour) is now logged at INFO and not WARNING. Your logs will be quieter. -- Both the GUI and CLI now default to putting the second temporary directory files into the specified temporary directory. -- SSL Certificate handling was refactored along with Consensus constants, service launching, and internal configuration management. -- Updated to clvm 0.5.3. This fixed a bug in the `point_add` operator, that was causing taproot issues. This also removed the `SExp.is_legit_list` function. There were significant refactoring of various smart transactions for simplicity and efficiency. -- WalletTool was generally removed. -- Deprecated pep517.build for the new standard `python -m build --sdist --outdir dist .` - -### Fixed - -- A bug in bls-singatures/blspy could cause a stack overflow if too many signatures were verified at once. This caused the block of death at 11997 of the Beta 15 chain. Updated to 0.2.4 to address the issue. -- GUI Wallet now correctly updates around reorgs. -- chiapos 0.12.32 fixed a an out of bounds read that could crash the plotter. It also contains a fix to better handle the case of drive letters on Windows. -- Node would fail to start on Windows Server 2016 with lots of cores. This [python issue explains]( https://bugs.python.org/issue26903) the problem. - -### Known Issues - -- On NTFS, plotting and farming can't use a path that includes a non root mountpoint. This is fixed in an upcoming version but did not have enough testing time for this release. - -## [1.0beta15] aka Beta 1.15 - 2020-10-07 - -### Added - -- Choosing a larger k size in the GUI also increases the default memory buffer. - -### Changed - -- The development tool WalletTool was refactored out. -- Update to clvm 0.5.3. -- As k=30 and k=31 are now ruled out for mainnet, the GUI defaults to a plot size of k=32. - -### Fixed - -- Over time the new peer gossip protocol could slowly disconnect all peers and take your node offline. -- Sometimes on restart the peer connections database could cause fullnode to crash. - -## [1.0beta14] aka Beta 1.14 - 2020-10-01 - -### Added - -- Node peers are now gossiped between nodes with logic to keep connected nodes on disparate internet networks to partially protect from eclipse attacks. This is the second to last step to remove our temporary introducer and migrate to DNS introducers with peer gossip modeled directly off of Bitcoin. This adds a new database of valid peer nodes that will persist across node restarts. This also makes changes to config.yaml's contents. -- For 'git clone' installs there is now a separate install-gui.sh which speeds up running install.sh for those who wish to run headless and makes docker and other automation simpler. -- The rate limited wallet library now supports coin aggregation for adding additional funds after the time of creation. -- Fees are now used in all applicable rate limited wallet calls -- New parameters for plotting: -r (number of threads) -s (stripe size) -u (number of buckets) in cli and GUI -- chiavdf now has full IFMA optimizations for processors that support it. - -### Changed - -- Multithreading support in chiapos, as well as a new algorithm which is faster and does 70% less IO. This is a significant improvement in speed, much lower total writing, and configurability for different hardware environments. -- Default -b changed to 3072 to improve performance -- The correct amount of memory is used for plotting -- `sh install.sh` was upgraded so that on Ubuntu it will install any needed OS dependencies. -- Wallet and puzzlehash generation have been refactored and simplified. -- Wallet has had various sync speed ups added. -- The rpc interfaces of all wheat services have been refactored, simplified, and had various additional functionality added. -- Block timestamps are now stored in the wallet database. Both database versions were incremented and databases from previous versions will not work with Beta 14. However, upon re-sync all test wheat since Beta 12 should appear in your wallet. -- All vestigial references to plots.yaml have been removed. - -### Fixed - -- Temporary space required for each k size was updated with more accurate estimates. -- Tables in the README.MD were not rendering correctly on Pypi. Thanks again @altendky. -- Wheatpos issue where memory was spiking and increasing -- Fixed working space estimates so they are exact -- Log all errors in chiapos -- Fixed a bug that was causing Bluebox vdfs to fail. - -## [1.0beta13] aka Beta 1.13 - 2020-09-15 - -### Added - -### Changed - -- Long_description_content_type is now set to improve wheat-blockchian's Pypi entry. Thanks to @altendky for this pull request. -- A minor edit was made to clarify that excessive was only related to trolling in the Code of Conduct document. - -### Fixed - -- When starting the GUI from an installer or the command line on Linux, if you had not previously generated a key on your machine, the generate new key GUI would not launch and you would be stuck with a spinner. -- Farmer display now correctly displays balance. - -## [1.0beta12] aka Beta 1.12 - 2020-09-14 - -### Added - -- Rate limited wallets can now have unspent and un-spendable funds clawed back by the Admin wallet. -- You can now backup your wallet related metadata in an encrypted and signed file to a free service from Wheat Network at backup.wheat.network. Simply having a backup of your private key will allow you to fully restore the state of your wallet including coloured coins, rate limited wallets, distributed identity wallets and many more. Your private key is used to automatically restore the last backup you saved to the Wheat backup cloud service. This service is open source and ultimately you will be able to configure your backups to go to backup.wheat.network, your own installation, or a third party's version of it. -- Added a Code of Conduct in CODE_OF_CONDUCT.md. -- Added a bug report template in `.github/ISSUE_TEMPLATE/bug_report.md`. - -### Changed - -- This is a new blockchain as we changed how the default puzzle hashes are generated and previous coins would not be easy to spend. Plots made with Beta 8 and newer continue to work, but all previous test wheat are left on the old chain and do not migrate over. Configuration data like plot directories automatically migrate in your `~/.wheat` directory. -- Proof of Space now requires significantly less temp space to generate a new plot. A k=32 that used to require 524GiB now requires only 313GiB - generally a 40% decrease across all k sizes. -- When plotting, instead of 1 monolithic temp file, there are now 8 files - one for each of the 7 tables and one for sorting plot data. These files are deleted as the `-2` or `-d` final file is written so the final file can fit within the footprint of the temporary files on the same filesystem. -- We've made various additional CPU optimizations to the Proof of Space plotter that reduces plotting time by an additional 13%. These changes will also reduce CPU utilization in harvesting. -- We have ruled out k=30 for mainnet minimum plot size. k=31 may still make mainnet. k=32 and larger will be viable on mainnet. -- We moved to react-styleguidist to develop reusable components in isolation and better document the UI. Thanks to @embiem for this pull request. -- Coloured coins have been updated to simplify them, remove 'a', and stop using an 'auditor'. -- clvm has been significantly changed to support the new coloured coins implementation. -- Bumped cryptography to 3.1. Cryptography is now publishing ARM64 binary wheels to PyPi so Raspberry Pi installs should be even easier. -- `wheat init` now automatically discovers previous releases in each new release. - -### Fixed - -- `wheat show -w` should now more reliably work. Wallet balances should be more often correct. -- View -> Developer -> Developer Tools now correctly opens the developer tools. Thank you to @roxaaams for this pull request! -- Fixed 'Receive Address' typo in Wallet. Thanks @meurtn on Keybase. -- Fixed a typo in `wheat show -w` with thanks to @pyl on Keybase. -- In Windows the start menu item is now Wheat Network and the icon in Add/Remove is updated. - -## [1.0beta11] aka Beta 1.11 - 2020-08-24 - -### Added - -- The Wheat UI now has a proper About menu entry that gives the various component versions and directs people to submit issues on GitHub. Thank you to @freddiecoleman for this pull request! -- Ability to run only the farmer, wallet, or timelord services, for more advanced configurations (wheat run farmer-only, wallet-only, timelord-only) - -### Changed - -- To complement the new About menu, we have revamped all Electron menus and made them OS native. There are now direct links to the Wiki, Keybase, and FAQ in the Help menu. -- There are minor improvements to how working space is calculated and displayed by the plotter. The plotter also has additional debugging information in its output. -- Successful plots only have an atomic rename. - -### Fixed - -- kOffsetSize should have been 10 bits and not 9. This was causing plots, especially larger plots, to fail with "Error 0". This bug was introduced in Beta 8 with the new plot file format. -- A bug in aiosqlite was causing tests to hang - especially on the ci. This may also have been causing wallet database corruption. -- `wheat show -w` now correctly outputs all wallet types and balances from the local wallet. - -## [1.0beta10] aka Beta 1.10 - 2020-08-18 - -### Added - -- Meet our new Rate Limited wallet. You can now fund a wallet from an Admin wallet that will set how many coins can be spent over a given range of blocks for a given User wallet. Once combined with on chain wallet recovery, this makes it much easier to secure your "spending money" wallet so that if it is compromised you have time to get most of the funds back before an attacker can steal them all. This wallet should be considered alpha in this release as additional fixes and functionality will be coming in subsequent releases. -- We've added unhardened HD keys to bls-signatures for the smart wallets that need them. We've added significant cross project testing to our BLS implementation. -- The python implementation of bls-signatures is now current to the new specification. -- `wheat show -b` now returns plot public key and pool public key for each block. -- Added cbor2 binary wheels for ARM64 to the Wheat simple site. Raspberry Pi should be just a little easier to install. - -### Changed - -- Wallet addresses and other key related elements are now expressed in Chech32 which is the Wheat implementation of [Bech32](https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki). All of your old wallet addresses will be replaced with the new Chech32 addresses. The only thing you can't do is send test wheat between 1.8/1.9 and 1.10 software. Anyone who upgrades to 1.10 will keep their transactions and balances of test wheat from the earlier two releases however. -- We added a first few enhancements to plotting speed. For a k=30 on a ramdisk with `-b 64 GiB` it results in an 11% speedup in overall plotting speed and a 23% improvement in phase 1 speed. Many more significant increases in plotting speed are in the works. -- The proof of space document in chiapos has been updated to the new format and edited for clarity. Additionally GitHub actions now has the on demand ability to create the PDF version. -- Relic has upstreamed our changes required for the IETF BLS standard. We now build directly from the Relic repository for all but Windows and will be migrating Windows in the next release. -- Minor improvements to the Coloured Coin wallet were integrated in advance of an upcoming re-factor. -- Smart wallet backup was upgraded to encrypt and sign the contents of the backup. - -### Fixed - -- Proof of space plotting now correctly calculates the total working space used in the `-t` directory. -- `wheat show -w` now displays a message when balances cannot be displayed instead of throwing an error. Thanks to @freddiecoleman for this fix! -- Fix issue with shutting down full node (full node processes remained open, and caused a spinner when launching Wheat) -- Various code review alerts for comparing to a wider type in chiapos were fixed. Additionally, unused code was removed from chiapos -- Benchmarking has been re-enabled in bls-signatures. -- Various node security vulnerabilities were addressed. -- Updated keyring, various GitHub actions, colorlog, cbor2, and clvm_tools. - -## [1.0beta9] aka Beta 1.9 - 2020-07-27 - -### Added - -- See wallet balances in command line: `wheat show -w` -- Retry opening invalid plots every 20 minutes (so you can copy a large plot into a plot directory.) -- We've added `wheat keys sign` and `wheat keys verify` to allow farmers to certify their ownership of keys. -- Windows BLS Signature library now uses libsodium for additional security. -- You can now backup and restore Smart Wallet metadata. -- Binary wheels for ARM64/aarch64 also build for python 3.7. -- See and remove plot directories from the UI and command line. -- You can now specify the memory buffer in UI. -- Optimized MPIR for Sandybridge and Ivybridge CPUs under Windows - -### Changed - -- `wheat start wallet-server` changed to `wheat start wallet`, for consistency. -- All data size units are clarified to displayed in GiB instead of GB (powers of 1024 instead of 1000.) -- Better error messages for restoring wallet from mnemonic. - -### Fixed - -- Fixed open_connection not being cancelled when node exits. -- Increase the robustness of node and wallet shutdown. -- Handle disconnection and reconnection of hard drives properly. -- Addressed pre-Haswell Windows signatures failing. -- MacOS, Linux x64, and Linux aarch64 were not correctly compiling libsodium in -the blspy/bls-signatures library. -- Removed outdated "200 plots" language from Plot tab. -- Fixed spelling error for "folder" on Plot tab. -- Various node dependency security vulnerabilities have been fixed. -- Request peers was not returning currently connected peers older than 1 day. -- Fixed timeout exception inheritance changes under python 3.8 (pull 13528) - -### Deprecated - -- Removed legacy scripts such as apple-stop-server, apple-restart-harvester, etc. - -## [1.0beta8] aka Beta 1.8 - 2020-07-16 - -### Added - -- We have released a new plot file format. We believe that plots made in this -format and with these IETF BLS keys will work without significant changes on -mainnet at launch. -- We now use [chacha8](https://cr.yp.to/chacha.html) and -[blake3](https://github.com/BLAKE3-team/BLAKE3) for proof of space instead of -the now deprecated AES methods. This should increase plotting speed and support -more processors. -- Plot refreshing happens during all new challenges and only new/modified files -are read. -- Updated [blspy](https://github.com/Chia-Network/bls-signatures) to use the -new [IETF standard for BLS signatures](https://tools.ietf.org/html/draft-irtf-cfrg-bls-signature-02). -- Added a faster VDF process which generates n-wesolowski proofs quickly -after the VDF result is known. This requires a high number of CPUs. To use it, -set timelord.fast_algorithm = True in the config file. -- Added a new type of timelord helper - blue boxes, which generate compact -proofs of time for existing proven blocks. This helps reducing the database -size and speeds up syncing a node for new users joining the network. Full nodes -send 100 random un-compact blocks per hour to blue boxes, and if -timelord.sanitizer_mode = True, the blue box timelord will work on those -challenges. Unlike the main timelord, average machines can run blue boxes -and contribute to the chain. Expect improvements to the install method for -blue boxes in future releases. -- From the UI you can add a directory that harvester will always check for -existing and new plots. Harvester will only look in the specific directory you -specify so you'll have to add any subfolders you want to also contain plots. -- The UI now asks for confirmation before closing and shows shutdown progress. -- UI now tries to shut down servers gracefully before exiting, and also closes -the daemon before starting. -- The various sub repositories (chiapos, chiavdf, etc.) now build ARM64 binary -wheels for Linux with Python 3.8. This makes installing on Ubuntu 20.04 lts on -a Raspberry Pi 3 or 4 easy. -- Ci's check to see if they have secret access and attempt to fail cleanly so -that ci runs successfully complete from PRs or forked repositories. -- Farmer now sends challenges after a handshake with harvester. -- The bls-signatures binary wheels include libsodium on all but Windows which -we expect to add in future releases. -- The wheat executable is now available if installing from the Windows or MacOS -Graphical installer. Try `./wheat -h` from -`~\AppData\Local\Wheat-Blockchain\app-0.1.8\resources\app.asar.unpacked\daemon\` -in Windows or -`/Applications/Wheat.app/Contents/Resources/app.asar.unpacked/daemon` on MacOS. - -### Changed - -- Minor changes have been made across the repositories to better support -compiling on OpenBSD. HT @n1000. -- Changed WHEAT units to TWHEAT units for testnet. -- A push to a branch will cancel all ci runs still running for that branch. -- Ci's now cache pip and npm caches between runs. -- Improve test speed with smaller discriminants, less blocks, less keys, and -smaller plots. -- RPC servers and clients were refactored. -- The keychain no longer supports old keys that don't have mnemonics. -- The keychain uses BIP39 for seed derivation, using the "" passphrase, and -also stores public keys. -- Plots.yaml has been replaced. Plot secret keys are stored in the plots, - and a list of directories that harvester can find plots in are in config.yaml. -You can move plots around to any directory in config.yaml as long as the farmer -has the correct farmer's secret key too. -- Auto scanning of plot directories for .plot files. -- The block header format was changed (puzzle hashes and pool signature). -- Coinbase and fees coin are now in merkle set, and bip158 filter. -- New harvester protocol with 2/2 harvester and farmer signatures, and modified -farmer and full node protocols. -- 255/256 filter which allows virtually unlimited plots per harvester or drive. -- Improved create_plots and check_plots scripts, which are now -"wheat plots create" and "wheat plots check". -- Add plot directories to config.yaml from the cli with "wheat plots add". -- Use real plot sizes in UI instead of a formula/ -- HD keys now use EIP 2333 format instead of BIP32, for compatibility with -other chains. -- Keys are now derived with the EIP 2334 (m/12381/8444/a/b). -- Removed the ability to pass in sk_seed to plotting, to increase security. -- Linux builds of chiavdf and blspy now use a fresh build of gmp 6.2.1. - -### Fixed - -- uPnP now works on Windows. -- Log rotation should now properly rotate every 20MB and keep 7 historical logs. -- Node had a significant memory leak under load due to an extraneous fork -in the network code. -- Skylake processors on Windows without AVX would fail to run. -- Harvester no longer runs into 512 maximum file handles open issue on Windows. -- The version generator for new installers incorrectly handled the "dev" -versions after a release tag. -- Due to a python bug, ssl connections could randomly fail. Worked around -[Python issue 29288](https://bugs.python.org/issue29288) -- Removed websocket max message limit, allowing for more plots -- Daemon was crashing when websocket gets improperly closed - -### Deprecated - -- All keys generated before Beta 1.8 are of an old format and no longer useful. -- All plots generated before Beta 1.8 are no longer compatible with testnet and -should be deleted. - -### Known Issues - -- For Windows users on pre Haswell CPUs there is a known issue that causes -"Given G1 element failed g1_is_valid check" when attempting to generate -keys. This is a regression from our previous fix when it was upstreamed into -relic. We will make a patch available for these systems shortly. - -## [1.0beta7] aka Beta 1.7 - 2020-06-08 - -### Added - -- Added ability to add plot from filesystem (you will need pool_pk and sk from plots.yaml.) -- Added ability to import private keys in the UI. -- Added ability to see private keys and mnemonic seeds in the keys menu -- User can specify log level in the config file (defaults to info.) -- The Windows installer is now signed by a Wheat Network certificate. It may take some time to develop enough reputation to not warn multiple times during install. - -### Changed - -- Plots are now refreshed in the UI after each plot instead of at the end of plotting. -- We have made performance improvements to plotting speed on all platforms. -- The command line plotter now supports specifying it's memory buffer size. -- Test plots for the simulation and testing harness now go into `~/.wheat/test-plots/` -- We have completely refactored all networking code towards making each Wheat service use the same default networking infrastructure and move to websockets as the default networking wire protocol. -- We added additional improvements and more RPCs to the start daemon and various services to continue to make wheat start/stop reliable cross platform. -- The install.sh script now discovers if it's running on Ubuntu less than 20.04 and correctly upgrades node.js to the current stable version. -- For GitHub ci builds of the Windows installer, editbin.exe is more reliably found. -- All installer ci builds now obtain version information automatically from setuptools_scm and convert it to an installer version number that is appropriate for the platform and type of release (dev versus release.) -- We now codesign the Apple .dmg installer with the Wheat Network developer ID on both GitHub Actions and Azure Pipelines. We will be notarizing and distributing the Azure Pipelines version as it's built on MacOS Mojave (10.14.6) for stronger cross version support. - -### Fixed - -- Having spaces in the path to a plot or temporary directory caused plotting to fail. -- Changing keys will no longer interrupt plotting log. -- 1.6 introduced a bug where certain very many core machines would sync the blockchain very slowly. -- The plotter log in the UI should scroll more reliably. -- The plotter UI should display the correct log on all platforms -- Starting wheat now waits for the full node to be active before contacting the introducer. - -## [1.0beta6] aka Beta 1.6 - 2020-06-01 - -### Added - -- Windows and MacOS now have one click installers that then send users to a GUI on both platforms to farm or use their wallets. Windows is built on GitHub Actions and MacOS is also built on Azure Pipelines so as to build on Mojave. -- You can see and control your farmer, harvester, and plotter from the GUI on Windows, MacOS, and Linux. -- Create plots and see the plotting log from a GUI on Windows, MacOS, and Linux. -- You can now create or import private keys with a 24 word mnemonic, both in the UI and 'wheat keys' command line. -- You can delete and change active keys from the GUI and cli. -- We added a new keychain system that replaces keys.yaml, and migrates existing users from keys.yaml. It utilizes each OS's keychain for slightly more secure key storage. -- We added a `wheat keys` command line program, to see, add, and remove private keys. -- We added RPC servers and RPC client implementations for Farmer and Harvester. The new UI uses these for additional information and functionality. -- We added total network storage space estimation to the node RPC at the `/get_network_space` endpoint instead of only being available in the cli. The RPC endpoint takes two block header hashes and estimates space between those header hashes. -- Logs now autorotate. Once the debug.log reaches 20MB it is compressed and archived keeping 7 historical 20MB logs. -- We now have a CHANGELOG.md that adheres closely to the [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) standard. We merged in the version history and updated some previous release notes to capture items important to the change log. We are modifying our release process to accumulate changes at the top of the change log and then copy those to the release notes at the time of the release. -- We added [lgtm](https://lgtm.com/) source analysis on pull request to the wheat-blockchain, chiapos, chiavdf, chiabip158, and bls-library repositories to add some automated security analysis to our ci. - -### Changed - -- Due to an issue with aggsig and aggsig-me, the beta 1.6 blockchain is not compatible with earlier chains. -- We replaced the Electron/JavaScript interface with a React user interface which is cleaner and more responsive. -- We now have a multithreaded harvester to farm more plots concurrently. This is especially faster when there are multiple disks being harvested. The class is also made thread safe with mutex guards. This is achieved by releasing GIL in the python bindings when fetching qualities and proofs. We estimate that the former guidance of only 50 plots per physical drive should be updated to 250-350 plots per physical drive. We will continue to improve the plots per physical drive limit during the beta period. -- Syncing a node is now much faster and uses less memory. -- `wheat netspace` has been refactored to use the `/get_network_space` RPC. The command - syntax has changed slightly. By default it calculates the last 24 blocks from the - current LCA. Optionally you can use the `-b` flag to start the calculation from a different block - height. Use `-d` to specify the delta number of blocks back into history to estimate over from either LCA or your `-b` block height. -- The Full node RPC response formats have been changed. All API calls now return a dict with success, and an additional value, for example {"success": True, "block": block}. -- chiapos is now easier to compile with MSVC. -- create plots now takes in an optional sk_seed, it is no longer read in from keys.yaml. If not passed in, it is randomly generated. The -i argument can now only be used when you provide an sk_seed. -- Moved to PyYAML 5.3.1 which prevents arbitrary code execution during python/object/new constructor. -- Moved to Python cryptography 2.9.2 which deprecates OpenSSL 1.0.1 and now relies upon OpenSSL 1.1.1g. -- Moved to aiosqlite 0.13.0 which adds official support for Python 3.8 and fixes a possible hung thread if a connection failed. - -### Fixed - -- In beta 1.5 we introduced a bug in aggsig and aggsig-me that we have fixed in this release. That forced a hard fork of the chain so coins and balances are lost from beta 1.5. There is no impact on existing plots. -- Starting and stopping servers now works much more reliably. -- `apple-check-plots` uses the plot root and checks the plots in the same manner as harvester. -- `apple-check-plots` now does not override plots.yaml, which means concurrent plots will properly be added to plots.yaml. -- Fixed and issue where [Relic](https://github.com/relic-toolkit/relic) and thus blspy would crash on processors older than Haswell as they don't support lzc. -- Some non-critical networking errors are no longer logged. -- Blocks with compact proofs of time are now able to be updated into the node database. -- The `install-timelord.sh` script now correctly determines which version of python it is running under and correctly builds vdf_client and correctly links to vdf_bench. It also handles upgrading CMake on Ubuntu's older than 20.04LTS do satisfy the new CMake 3.14+ requirement to build Timelord. -- An issue in asyncio was not being caught correctly and that could cause nodes to crash. -- The build status shield layout is fixed in README.md -- Raspberry Pi 3/4 with Ubuntu 20.04LTS 64 bit should compile again. - -## [1.0beta5] aka Beta 1.5 - 2020-05-05 - -### Added - -- This release is primarily a maintenance release for Beta 1.4. -- We have added an option to `apple-create-plots` to specify the second temporary directory. Creating a plot is a three step process. First a working file ending in `.dat.tmp` is created. This file is usually 5 times larger than the final plot file. In the later stages of plotting a second temp file is created ending in `.dat.2.tmp` which will grow to the size of the final plot file. In the final step, the `.dat.2.tmp` is copied to the final `.dat` plot file. You can now optionally set the directory for the `.dat.2.tmp` file with the `-2` flag. An example use case is plotting on a ramdisk and writing both the second temp file and the final file out to an SSD - `chia-create-plots -n 1 -k 30 -t /mnt/ramdisk -2 /mnt/SSD -d /mnt/SSD`. - -### Changed - -- `wheat init` properly migrates from previous versions including the k>=32 workaround. Additionally, the farming target key is checked to make sure that it is the valid and correct public key format. -- We have implemented a workaround for the `wheat start` issues some were having upon crash or reboot. We will be rebuilding start and stop to be robust across platforms. -- This release re-includes `apple-start-harvester`. -- Coloured coins now have a prefix to help identify them. When sending transactions, the new prefix is incompatible with older clients. -- The user interface now refers to wheat coins with their correct currency code of WHEAT. -- The next release will now be in the dev branch instead of the e.g. beta-1.5. Additionally we are enforcing linear merge into dev and prefer rebase merges or partial squash merges of particularly chatty commit histories. -- Building the sub reposities (chiapos, chiavdf, blslibrary) now requires CMake 3.14+. - -### Fixed - -- There was a regression in Wheat Proof of Space ([chiapos](https://github.com/Chia-Network/chiapos)) that came from our efforts to speed up plotting on Windows native. Now k>=32 plots work correctly. We made additional bug fixes and corrected limiting small k size generation. -- There was a bug in Timelord handling that could stop all VDF progress. - -### Deprecated - -- We have made significant changes to the full node database to make it more reliable and quicker to restart. This requires re-syncing the current chain. If you use `wheat init` then sync on first start will happen automatically. "\$WHEAT_ROOT" users will need to delete `$WHEAT_ROOT/db/*` before starting Beta 1.5. This also fixes the simulation issue in Beta 1.4 where tips could go "back in time." - -### Known issues - -- uPnP support on Windows may be broken. However, Windows nodes will be able to connect to other nodes and, once connected, participate fully in the network. -- Currently, there is no way to restore a Coloured Coin Wallet. - -## [1.0beta4] aka Beta 1.4 - 2020-04-29 - -### Added - -- This release adds Coloured coin support with offers. Yes that is the correct spelling. Coloured coins allow you to issue a coin, token, or asset with nearly unlimited issuance plans and functionality. They support inner smart transactions so they can inherit any of the other functionality you can implement in Wheatlisp. Offers are especially cool as they create a truly decentralized exchange capability. Read much more about them in Bram's [blog post on Coloured coins](https://wheat.network/2020/04/29/coloured-coins-launch.en.html). -- This release adds support for native Windows via a (mostly) automated installer and MacOS Mojave. Windows still requires some PowerShell command line use. You should expect ongoing improvements in ease of install and replication of the command line tools in the GUI. Again huge thanks to @dkackman for continued Windows installer development. Native Windows is currently slightly slower than the same version running in WSL 2 on the same machine for both block verification and plotting. -- We made some speed improvements that positively affected all platforms while trying to increase plotting speed in Windows. -- The graphical Full Node display now shows the expected finish times of each of the prospective chain tips. -- Now you can run estimates of the total space currently farming the network. Try `wheat netspace -d 12` to run an estimate over the last 12 blocks which is approximately 1 hour. -- We’ve added TLS authentication for incoming farmer connections. TLS certs and keys are generated during wheat init and only full nodes with your keys will be able to connect to your Farmer. Also, Harvester, Timelord, and Wallet will now not accept incoming connections which reduces the application attack surface. -- The node RPC has a new endpoint get_header_by_height which allows you to retrieve the block header from a block height. Try `wheat show -bh 1000` to see the block header hash of block 1000. You can then look up the block details with `wheat show -b f655e1a9f7f8c89a703e40d9ce82ae33508badaf7b37fa1a56cad27926b5e936` which will look up a block by it's header hash. -- Our Windows binaries check the processor they are about to run on at runtime and choose the best processor optimizations for our [MPIR](http://mpir.org/) VDF dependency on Windows. -- Most of the content of README.md and INSTALL.md have been moved to the [repository wiki](https://github.com/WheatNetwork/wheat-blockchain/wiki) and placed in [INSTALL](https://github.com/WheatNetwork/wheat-blockchain/wiki/INSTALL) and [Quick Start Guide](https://github.com/WheatNetwork/wheat-blockchain/wiki/Quick-Start-Guide) -- Harvester is now asynchronous and will better be able to look up more plots spread across more physical drives. -- Full node startup time has been sped up significantly by optimizing the loading of the blockchain from disk. - -### Changed - -- Most scripts have been removed in favor of wheat action commands. You can run `wheat version` or `wheat start node` for example. Just running `apple` will show you more options. However `apple-create-plots` continues to use the hyphenated form. Also it's now `wheat generate keys` as another example. -- Wheat start commands like `wheat start farmer` and `wheat stop node` now keep track of process IDs in a run/ directory in your configuration directory. `wheat stop` is unlikely to work on Windows native for now. If `wheat start -r node` doesn't work you can force the run/ directory to be reset with `wheat start -f node`. -- We suggest you take a look at our [Upgrading documentation](https://github.com/WheatNetwork/wheat-blockchain/wiki/Updating-beta-software) if you aren't performing a new install. -- blspy now has libsodium included in the MacOS and Linux binary wheels. -- miniupnpc and setprotitle were dynamically checked for an installed at runtime. Removed those checks and we rely upon the install tools installing them before first run. -- Windows wheels that the Windows Installer packages are also available in the ci Artifacts in a .zip file. -- The script `wheat start wallet-gui` has been chaned to `wheat start wallet` which launches but the GUI and server on MacOS and Linux. `wheat start wallet-server` remains for WSL 2 and Windows native. - -### Deprecated - -- This release breaks the wire protocol so it comes with a new chain. As we merged in Coloured coins we found that we needed to change how certain hashes were managed. Your 1.0beta3 coin balances will be lost when you upgrade but your plots will continue to work on the 1.0beta4 chain. Since we had to make a breaking wire protocol change we accelerated changing our hash to prime function for starting proofs of time. That was also going to be a future breaking change. - -### Known issues - -- Plots of k>=32 are not working for farming, and some broken plots can cause a memory leak. A [workaround is available](https://github.com/WheatNetwork/wheat-blockchain/wiki/Beta-1.4-k=32-or-larger-work-around). -- If you are running a simulation, blockchain tips are not saved in the database and this is a regression. If you stop a node it can go back in time and cause an odd state. This doesn't practically effect testnet participation as, on restart, node will just sync up a few blocks to the then current tips. -- uPnP support on Windows may be broken. However, Windows nodes will be able to connect to other nodes and, once connected, participate fully in the network. -- Coins are not currently reserved as part of trade offers and thus could potentially be spent before the offer is accepted resulting in a failed offer transaction. -- Currently, there is no way to restore a Coloured Coin Wallet. -- The `wheat stop all` command sometimes fails, use `apple-stop-all` instead. In windows, use the task manager to stop the servers. - -## [1.0beta3] aka Beta 1.3 - 2020-04-08 - -### Added - -- Windows, WSL 2, Linux and MacOS installation is significantly streamlined. There is a new Windows installer for the Wallet GUI (huge thanks to @dkackman). -- All installs can now be from the source repository or just the binary dependencies on WSL 2, most modern Linuxes, and MacOS Catalina. Binary support is for both Python 3.7 and 3.8. -- There is a new migration tool to move from Beta1 (or 2) to Beta3. It should move everything except your plots. -- There is a new command `wheat init` that will migrate files and generate your initial configuration. If you want to use the Wallet or farm, you will also have to `apple-generate-keys`. You can read step by step instructions for [upgrading from a previous beta release](https://github.com/WheatNetwork/wheat-blockchain/wiki/Updating-beta-software). If you've set `$WHEAT_ROOT` you will have to make sure your existing configuration remains compatible manually. -- Wallet has improved paper wallet recovery support. -- We now also support restoring old wallets with only the wallet_sk and wallet_target. Beta3's Wallet will re-sync from scratch. -- We've made lots of little improvements that should speed up node syncing -- We added full block lookup to `wheat show`. - -### Changed - -- `apple-restart-harvester` has been renamed from `apple-start-harvester` to better reflect its functionality. Use it to restart a harvester that's farming so that it will pick up newly finished plots. -- We made the Wallet configurable to connect to a remote trusted node. -- We now have farmers reconnect to their trusted node if they lose contact. -- We updated our miniupnpc dependency to version 2.1. -- We increase the default farmer propagate threshold to reduce chain stall probability. - -### Deprecated - -- You should not copy over any prior Wallet database as they are not compatible with Beta3. Your existing full node will not have to re-sync and its database remains compatible. - -#### Fixed - -- Among a lot of bug fixes was removing a regression that slowed plotting on MacOS by 3 times and may have had smaller impacts on plotting speed on other platforms. -- We've removed some race conditions in the Wallet -- We resolved the "invalid blocks could disconnect farmers" bug -- We and upped the default tls certificate size to 2048 for some unhappy operating systems. - -### Known issues - -- Windows native is close but not here yet. Also, we should be adding back MacOS Mojave support shortly. -- So why is this Beta 3 you're wondering? Well, we're getting used to our new release management tools and a hotfix devoured our beta2 nomenclature... We've marked it YANKED here. -- If you previously used the plot_root variable in config, your plot directory names might not migrate correctly. Please double check the filenames in `~/.wheat/beta-1.0b3/config/plots.yaml` after migrating - -## [1.0beta2] aka Beta 1.2 - 2020-04-04 [YANKED] - -## [1.0beta1] aka Beta 1.0 - 2020-04-02 - -### Added - -- There is now full transaction support on the Wheat blockchain. In this initial Beta 1.0 release, all transaction types are supported though the wallets and UIs currently only directly support basic transactions like coinbase rewards and sending coins while paying fees. UI support for our [smart transactions](https://github.com/WheatNetwork/wallets/blob/main/README.md) will be available in the UIs shortly. -- Wallet and Node GUI’s are available on Windows, Mac, and desktop Linux platforms. We now use an Electron UI that is a full light client wallet that can also serve as a node UI. Our Windows Electron Wallet can run standalone by connecting to other nodes on the network or another node you run. WSL 2 on Windows can run everything except the Wallet but you can run the Wallet on the native Windows side of the same machine. Also the WSL 2 install process is 3 times faster and _much_ easier. Windows native node/farmer/plotting functionality are coming soon. -- Install is significantly easier with less dependencies on all supported platforms. -- If you’re a farmer you can use the Wallet to keep track of your earnings. Either use the same keys.yaml on the same machine or copy the keys.yaml to another machine where you want to track of and spend your coins. -- We have continued to make improvements to the speed of VDF squaring, creating a VDF proof, and verifying a VDF proof. - -### Changed - -- We have revamped the wheat management command line. To start a farmer all you have to do is start the venv with `. ./activate` and then type `apple-start-farmer &`. The [README.md](https://github.com/WheatNetwork/wheat-blockchain/blob/main/README.md) has been updated to reflect the new commands. -- We have moved all node to node communication to TLS 1.3 by default. For now, all TLS is unauthenticated but certain types of over the wire node to node communications will have the ability to authenticate both by certificate and by inter protocol signature. Encrypting over the wire by default stops casual snooping of transaction origination, light wallet to trusted node communication, and harvester-farmer-node communication for example. This leaves only the mempool and the chain itself open to casual observation by the public and the various entities around the world. -- Configuration directories have been moved to a default location of HomeDirectory/.wheat/release/config, plots/ db/, wallet/ etc. This can be overridden by `export WHEAT_ROOT=~/.wheat` for example which would then put the plots directory in `HomeDirectory/.wheat/plots`. -- The libraries chia-pos, chia-fast-vdf, and chia-bip-158 have been moved to their own repositories: [chiapos](https://github.com/Chia-Network/chiapos), [chiavdf](https://github.com/Chia-Network/chiavdf), and [chaibip158](https://github.com/Chia-Network/chiabip158). They are brought in by wheat-blockchain at install time. Our BLS signature library remains at [bls-signatures](https://github.com/Chia-Network/bls-signatures). -- The install process now brings in chiapos, chiavdf, etc from Pypi where they are auto published via GitHub Actions ci using cibuildwheel. Check out `.github/workflows/build.yml` for build methods in each of the sub repositories. -- `chia-regenerate-keys` has been renamed `chia-generate-keys`. -- setproctitle is now an optional install dependency that we will continue to install in the default install methods. -- The project now defaults to `venv` without the proceeding . to better match best practices. -- Developer requirements were separated from the actual requirements. -- `install-timelord.sh` has been pulled out from `install.sh`. This script downloads the source python package for chiavdf and compiles it locally for timelords. vdf_client can be included or excluded to make building normal user wheels easier. - -### Removed - -- The Beta release is not compatible with the history of the Alpha blockchain and we will be ceasing support of the Alpha chain approximately two weeks after the release of this Beta. However, your plots and keys are fully compatible with the Beta chain. Please save your plot keys! Examples of how to save your keys and upgrade to the Beta are available on the [repo wiki](https://github.com/WheatNetwork/wheat-blockchain/wiki). -- The ssh ui and web ui are removed in favor of the cli ui and the Electron GUI. To mimic the ssh ui try `wheat show -s -c` and try `wheat show --help` for usage instructions. -- We have removed the inkfish vdf implementation and replaced it with the pybind11 C++ version. - -### Known Issues - -- Wallet currently has limited support for restoring from a paper wallet. Wallet uses hierarchically deterministic keys, and assumes that any keys that are at index "higher than one" have not been used yet. If you have received a payment to an address associated with a key at a higher index and you want it to appear in Wallet, the current work around is to press the "NEW ADDRESS" button multiple times shortly after sync start. That will make wallet "aware of" addresses at higher indexes. Full support for paper wallet restoration will be added soon. -- We. Don't... Have.. Windows.... Native. YET!?! But the entire project is compiling on Windows 10 natively. Assistance would be more than appreciated if you have experience building binary python wheels for Windows. We are pushing some limits like uint-128, avx-2, avx-512, and AES-NI so it's not as easy as it looks... - -## [Alpha 1.5.1] - 2020-03-24 - -### Fixed - -- Fixed a bug in harvester that caused plots not to be farmed. - -## [Alpha 1.5] - 2020-03-08 - -### Added - -- You can now provide an index to create_plots using the -i flag to create an arbitrary new plot derived from an existing plot key. Thanks @xorinox. -- There is a new restart_harvester.sh in scripts/ to easily restart a harvester when you want to add a newly completed plot to the farm without restarting farmer, fullnode, timelord, etc. -- Harvesters now log errors if they encounter a malformed or corrupted plot file. Again thanks @xorinox. -- New AJAX based full node UI. To access go to [http://127.0.0.1:8555/index.html](http://127.0.0.1:8555/index.html) with any modern web browser on the same machine as the full node. -- If you want to benchmark your CPU as a VDF you can use vdf_bench square_asm 500000 for the assembly optimized test or just vdf_bench square 500000 for the plain C++ code path. This tool is found in lib/chiavdf/fast_vdf/. -- Improvements to shutting down services in all of the scripts in scripts/. Another @xorinox HT. - -### Changed - -- VDF verification code is improved and is now more paranoid. -- Timelords can now be run as a cluster of VDF client instances around a central Timelord instance.. Instructions are available in the Cluster Timelord section of the repo wiki. - -### Fixed - -- Thanks @dkackman for clean ups to the proof of space code. -- Thanks to @davision for some typo fixes. - -## [Alpha 1.4.1] - 2020-03-06 - -### Fixed - -- Stack overflow in verifier - -## [Alpha 1.4] - 2020-02-19 - -### Added - -- Compiling and execution now detect AES-NI, or a lack of it, and fall back to a software AES implementation. -- Software AES adds support for Raspberry Pi 4, related ARM processors and Celeron processors. -- Added install instructions for CentOS/RHEL 8.1. -- Plotting working directory and final directory can both be specified in config.yaml -- Proof of space binary and create_plots scripts now allows passing in temp and final directories. -- Plotting now logs a timestamp at each major step. -- Added support for Python 3.8. - -### Changed - -- Due to changes to the sqlite database that are not backwards compatible, re-synch will be required. -- Loading the blockchain only loads headers into memory instead of header blocks (header + proofs), speeds up the startup, and reduces normal operation memory usage by 80%. -- Memory access is now synchronous to reduce use of locks and speed up block processing. -- Wheat fullnode, farmer and harvester now default to logging to wheat.log in the wheat-blockchain directory. This is configured in config.yaml and due to config.yaml changes it is recommended to edit the new template config instead of using older config.yaml’s from previous versions. -- uvloop is now an optional add on. -- Harvester/farmer will not try to farm plots that they don’t have the key for. - -### Fixed - -- Thanks to @A-Caccese for fixes to Windows WSL instructions. -- Thanks @dkackman who also fixed some compiler warnings. - -## [Alpha 1.3] - 2020-01-21 - -### Added - -- FullNode performance improvements - Syncing up to the blockchain by importing all blocks is faster due to improvements in VDF verification speed and multithreading block verification. -- VDF improvements - VDF verification and generation speed has increased and dependence on flint2 has been removed. We wish to thank Dr. William Hart (@wbhart) for dual licensing parts of his contributions in FLINT and Antic for inclusion in the Wheat blockchain. -- Implemented an RPC interface with JSON serialization for streamables - currently on port 8555. -- Added details on how to contribute in CONTRIBUTING.md. Thanks @RichardLitt. -- Added color logging -- Now wheat_harvester will periodically announce which plots it is currently farming and their k sizes. - -### Changed - -- Moved the ssh UI to use RPC. -- Changed the displayed process names for harvester, farmer, fullnode, timelords, and VDFs to to wheat_full node, wheat_harvester, etc. Fixed a bug that could cause inadvertent shutdown of other processes like an ongoing plotting session when new wheat services were started. -- Clarified the minimum version of boost required to build timelord/VDFs. Hat tip @AdrianScott -- Consensus and related documentation moved to the repository wiki. - -### Fixed - -- Fixed a bug where the node may not sync if it restarts close to a tip. -- Fixed a typo in the UI. Hat tip to @lvcivs for the pr. -- Fixed a memory leak in qfb_nudupl. -- Lots of smaller bug and documentation fixes. - -### Removed - -- Mongodb removed and replaced with SQLite for the blockchain database. This will require nodes to re-sync with the network. Luckily this is now faster. - -## [Alpha 1.2] - 2020-01-08 - -### Added - -- Performance improvements - - Removes database access from blockchain, and handles headers instead of blocks - - Avoid processing blocks and unfinished blocks that we have already seen. - - Also adds test for load. - -### Changed - -- Improvements to plotting via lookup table - as much as 15% faster - -### Fixed - -- Fixed a blockchain initialization bug - -## [Alpha 1.1.1] - 2019-12-25 - -### Added - -- Added install instructions for Windows using WSL and Ubuntu. -- Added install instructions for CentOS 7.7. -- Added install instructions for Amazon Linux 2. -- New install_timelord.sh. - -### Changed - -- Installation is now separated into everything except timelord/vdf and timelord/vdf. -- replaced VDF server compilation scripts with Makefile - -### Fixed - -- setuptools_scm was corrupting .zip downloads of the repository. - -## [Alpha 1.1] - 2019-12-12 - -### Added - -- Introducer now makes sure it only sends peer addresses to peers of peers that it can reach on port 8444 or their UPnP port. -- We are now using setuptools_scm for versioning. - -### Changed - -- Timelord VDF submission and management logic upgraded. - -### Fixed - -- FullNode: A long running or low ulimit situation could cause an “out of files” issue which would stop new connection creation. Removed the underlying socket leak. -- FullNode: Multiple SSH UI bugs fixed. -- Harvester: Farming a plot of k = 30 or greater could lead to a segfault in the harvester. -- Updated blspy requirement to address an issue in the underlying bls-signatures library. - -## [Alpha 1.0] - 2019-12-05 - -### Added - -- This is the first release of the Wheat testnet! Blockchain consensus, proof of time, and proof of space are included. -- More details on the release at [https://www.wheat.network/developer/](https://www.wheat.network/developer/) - -[unreleased]: https://github.com/WheatNetwork/wheat-blockchain/compare/1.0beta5...dev -[1.0beta5]: https://github.com/WheatNetwork/wheat-blockchain/compare/1.0beta4...1.0beta5 -[1.0beta4]: https://github.com/WheatNetwork/wheat-blockchain/compare/1.0beta3...1.0beta4 -[1.0beta3]: https://github.com/WheatNetwork/wheat-blockchain/compare/1.0beta2...1.0beta3 -[1.0beta2]: https://github.com/WheatNetwork/wheat-blockchain/compare/1.0beta1...1.0beta2 -[1.0beta1]: https://github.com/WheatNetwork/wheat-blockchain/compare/alpha-1.5.1...1.0beta1 -[alpha 1.5.1]: https://github.com/WheatNetwork/wheat-blockchain/compare/alpha-1.5...alpha-1.5.1 -[alpha 1.5]: https://github.com/WheatNetwork/wheat-blockchain/compare/alpha-1.4.1...alpha-1.5 -[alpha 1.4.1]: https://github.com/WheatNetwork/wheat-blockchain/compare/alpha-1.4...alpha-1.4.1 -[alpha 1.4]: https://github.com/WheatNetwork/wheat-blockchain/compare/alpha-1.3...alpha-1.4 -[alpha 1.3]: https://github.com/WheatNetwork/wheat-blockchain/compare/alpha-1.2...alpha-1.3 -[alpha 1.2]: https://github.com/WheatNetwork/wheat-blockchain/compare/alpha-1.1.1...alpha-1.2 -[alpha 1.1.1]: https://github.com/WheatNetwork/wheat-blockchain/compare/alpha-1.1...alpha-1.1.1 -[alpha 1.1]: https://github.com/WheatNetwork/wheat-blockchain/compare/alpha-1.0...alpha-1.1 -[alpha 1.0]: https://github.com/WheatNetwork/wheat-blockchain/releases/tag/Alpha-1.0 diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 8805f30..a74cd09 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -9,7 +9,7 @@ trigger: - "*" tags: include: - - "*" + - "*" pool: vmImage: "macOS-10.15" @@ -44,24 +44,18 @@ steps: deleteCert: true - script: | - python -m venv venv - ln -s venv/bin/activate . - . ./activate - python -m pip install --upgrade pip - pip install wheel pep517 setuptools_scm - node -v - pip install -i https://pypi.chia.net/simple/ miniupnpc==2.2.2 - cd build_scripts + python3 -m venv ../venv + . ../venv/bin/activate + pip install setuptools_scm touch $(System.DefaultWorkingDirectory)/build_scripts/version.txt - python -m installer-version > $(System.DefaultWorkingDirectory)/build_scripts/version.txt - displayName: "Install dependencies" + python ./build_scripts/installer-version.py > $(System.DefaultWorkingDirectory)/build_scripts/version.txt + cat $(System.DefaultWorkingDirectory)/build_scripts/version.txt + deactivate + displayName: Create installer version number - script: | - . ./activate - clang --version - pip wheel --use-pep517 --extra-index-url https://pypi.chia.net/simple/ --wheel-dir=wheels . - pip install --no-index --find-links=./wheels/ wheat-blockchain - displayName: "Build and install wheels" + sh install.sh + displayName: "Install dependencies" - task: NodeTool@0 inputs: @@ -75,28 +69,16 @@ steps: APPLE_NOTARIZE_PASSWORD="$(APPLE_NOTARIZE_PASSWORD)" export APPLE_NOTARIZE_PASSWORD if [ "$(APPLE_NOTARIZE_PASSWORD)" ]; then NOTARIZE="true"; export NOTARIZE; fi - git submodule update --init --recursive cd build_scripts || exit sh build_macos.sh displayName: "Build DMG with build_scripts/build_macos.sh" - - task: PublishPipelineArtifact@1 - inputs: - targetPath: $(System.DefaultWorkingDirectory)/wheels - artifactName: MacOS-wheels - displayName: "Upload MacOS wheels" - - task: PublishPipelineArtifact@1 inputs: targetPath: $(System.DefaultWorkingDirectory)/build_scripts/final_installer/ artifactName: MacOS-DMG displayName: "Upload MacOS DMG" - - bash: | - ls $(System.DefaultWorkingDirectory)/build_scripts/ - cat $(System.DefaultWorkingDirectory)/build_scripts/version.txt - displayName: "list files in dir" - - bash: | ls $(System.DefaultWorkingDirectory)/build_scripts/final_installer/ cd $(System.DefaultWorkingDirectory)/build_scripts/ @@ -122,6 +104,17 @@ steps: displayName: "Create torrent file" condition: and(succeeded(), contains(variables['build.sourceBranch'], 'refs/tags/')) + - bash: | + export AWS_ACCESS_KEY_ID=$(AccessKey) + export AWS_SECRET_ACCESS_KEY=$(SecretKey) + export AWS_DEFAULT_REGION=us-west-2 + cd $(System.DefaultWorkingDirectory)/build_scripts/ + export WHEAT_VERSION="Wheat-"$( DBWrapper: + db_filename = Path("coin-store-benchmark.db") + try: + os.unlink(db_filename) + except FileNotFoundError: + pass + connection = await aiosqlite.connect(db_filename) + await connection.execute("pragma journal_mode=wal") + await connection.execute("pragma synchronous=OFF") + return DBWrapper(connection) + + +def rand_hash() -> bytes32: + return random.randbytes(32) + + +def make_coin() -> Coin: + return Coin(rand_hash(), rand_hash(), uint64(1)) + + +async def run_new_block_benchmark(): + + db_wrapper: DBWrapper = await setup_db() + + try: + coin_store = await CoinStore.create(db_wrapper) + # farmer puzzle hash + ph = bytes32(b"a" * 32) + + all_added: List[bytes32] = [] + + block_height = 1 + timestamp = 1631794488 + + print("Building database ", end="") + for height in range(block_height, block_height + NUM_ITERS): + additions = [] + removals = [] + + # add some new coins + for i in range(2000): + c = make_coin() + additions.append(c) + all_added.append(c.get_hash()) + + # farm rewards + farmer_coin = create_farmer_coin(height, ph, 250000000, DEFAULT_CONSTANTS.GENESIS_CHALLENGE) + pool_coin = create_pool_coin(height, ph, 1750000000, DEFAULT_CONSTANTS.GENESIS_CHALLENGE) + reward_coins = [pool_coin, farmer_coin] + all_added += [pool_coin.name(), farmer_coin.name()] + + # remove some coins we've added previously + random.shuffle(all_added) + removals = all_added[:100] + all_added = all_added[100:] + + await coin_store.new_block( + height, + timestamp, + set(reward_coins), + additions, + removals, + ) + await db_wrapper.db.commit() + + # 19 seconds per block + timestamp += 19 + + print(".", end="") + sys.stdout.flush() + block_height += NUM_ITERS + + total_time = 0 + total_add = 0 + total_remove = 0 + print("\nProfiling mostly additions ", end="") + for height in range(block_height, block_height + NUM_ITERS): + additions = [] + removals = [] + + # add some new coins + for i in range(2000): + c = make_coin() + additions.append(c) + all_added.append(c.get_hash()) + total_add += 2000 + + farmer_coin = create_farmer_coin(height, ph, 250000000, DEFAULT_CONSTANTS.GENESIS_CHALLENGE) + pool_coin = create_pool_coin(height, ph, 1750000000, DEFAULT_CONSTANTS.GENESIS_CHALLENGE) + reward_coins = [pool_coin, farmer_coin] + all_added += [pool_coin.name(), farmer_coin.name()] + total_add += 2 + + # remove some coins we've added previously + random.shuffle(all_added) + removals = all_added[:100] + all_added = all_added[100:] + total_remove += 100 + + start = time() + await coin_store.new_block( + height, + timestamp, + set(reward_coins), + additions, + removals, + ) + await db_wrapper.db.commit() + stop = time() + + # 19 seconds per block + timestamp += 19 + + total_time += stop - start + print(".", end="") + sys.stdout.flush() + + block_height += NUM_ITERS + + print(f"\nMOSTLY ADDITIONS, time: {total_time:0.4f}s additions: {total_add} removals: {total_remove}") + + print("Profiling mostly removals ", end="") + total_add = 0 + total_remove = 0 + total_time = 0 + for height in range(block_height, block_height + NUM_ITERS): + additions = [] + removals = [] + + # add one new coins + c = make_coin() + additions.append(c) + all_added.append(c.get_hash()) + total_add += 1 + + farmer_coin = create_farmer_coin(height, ph, 250000000, DEFAULT_CONSTANTS.GENESIS_CHALLENGE) + pool_coin = create_pool_coin(height, ph, 1750000000, DEFAULT_CONSTANTS.GENESIS_CHALLENGE) + reward_coins = [pool_coin, farmer_coin] + all_added += [pool_coin.name(), farmer_coin.name()] + total_add += 2 + + # remove some coins we've added previously + random.shuffle(all_added) + removals = all_added[:700] + all_added = all_added[700:] + total_remove += 700 + + start = time() + await coin_store.new_block( + height, + timestamp, + set(reward_coins), + additions, + removals, + ) + await db_wrapper.db.commit() + + stop = time() + + # 19 seconds per block + timestamp += 19 + + total_time += stop - start + print(".", end="") + sys.stdout.flush() + + block_height += NUM_ITERS + + print(f"\nMOSTLY REMOVALS, time: {total_time:0.4f}s additions: {total_add} removals: {total_remove}") + + print("Profiling full block transactions", end="") + total_add = 0 + total_remove = 0 + total_time = 0 + for height in range(block_height, block_height + NUM_ITERS): + additions = [] + removals = [] + + # add some new coins + for i in range(2000): + c = make_coin() + additions.append(c) + all_added.append(c.get_hash()) + total_add += 2000 + + farmer_coin = create_farmer_coin(height, ph, 250000000, DEFAULT_CONSTANTS.GENESIS_CHALLENGE) + pool_coin = create_pool_coin(height, ph, 1750000000, DEFAULT_CONSTANTS.GENESIS_CHALLENGE) + reward_coins = [pool_coin, farmer_coin] + all_added += [pool_coin.name(), farmer_coin.name()] + total_add += 2 + + # remove some coins we've added previously + random.shuffle(all_added) + removals = all_added[:2000] + all_added = all_added[2000:] + total_remove += 2000 + + start = time() + await coin_store.new_block( + height, + timestamp, + set(reward_coins), + additions, + removals, + ) + await db_wrapper.db.commit() + stop = time() + + # 19 seconds per block + timestamp += 19 + + total_time += stop - start + print(".", end="") + sys.stdout.flush() + + print(f"\nFULLBLOCKS, time: {total_time:0.4f}s additions: {total_add} removals: {total_remove}") + + finally: + await db_wrapper.db.close() + + +if __name__ == "__main__": + asyncio.run(run_new_block_benchmark()) diff --git a/build_scripts/build_linux_deb.sh b/build_scripts/build_linux_deb.sh index 46a62cf..6405bd5 100644 --- a/build_scripts/build_linux_deb.sh +++ b/build_scripts/build_linux_deb.sh @@ -60,7 +60,7 @@ cp package.json package.json.orig jq --arg VER "$WHEAT_INSTALLER_VERSION" '.version=$VER' package.json > temp.json && mv temp.json package.json electron-packager . wheat-blockchain --asar.unpack="**/daemon/**" --platform=linux \ ---icon=src/assets/img/Wheat.icns --overwrite --app-bundle-id=net.wheat.blockchain \ +--icon=src/assets/img/Wheat.icns --overwrite --app-bundle-id=network.wheat.blockchain \ --appVersion=$WHEAT_INSTALLER_VERSION LAST_EXIT_CODE=$? diff --git a/build_scripts/build_linux_rpm.sh b/build_scripts/build_linux_rpm.sh index d796e46..4486f8a 100644 --- a/build_scripts/build_linux_rpm.sh +++ b/build_scripts/build_linux_rpm.sh @@ -61,7 +61,7 @@ cp package.json package.json.orig jq --arg VER "$WHEAT_INSTALLER_VERSION" '.version=$VER' package.json > temp.json && mv temp.json package.json electron-packager . wheat-blockchain --asar.unpack="**/daemon/**" --platform=linux \ ---icon=src/assets/img/Wheat.icns --overwrite --app-bundle-id=net.wheat.blockchain \ +--icon=src/assets/img/Wheat.icns --overwrite --app-bundle-id=network.wheat.blockchain \ --appVersion=$WHEAT_INSTALLER_VERSION LAST_EXIT_CODE=$? diff --git a/build_scripts/build_macos.sh b/build_scripts/build_macos.sh index a99fa95..1533774 100644 --- a/build_scripts/build_macos.sh +++ b/build_scripts/build_macos.sh @@ -44,10 +44,19 @@ if [ "$LAST_EXIT_CODE" -ne 0 ]; then exit $LAST_EXIT_CODE fi +# sets the version for wheat-blockchain in package.json +brew install jq +cp package.json package.json.orig +jq --arg VER "$WHEAT_INSTALLER_VERSION" '.version=$VER' package.json > temp.json && mv temp.json package.json + electron-packager . Wheat --asar.unpack="**/daemon/**" --platform=darwin \ --icon=src/assets/img/Wheat.icns --overwrite --app-bundle-id=network.wheat.blockchain \ --appVersion=$WHEAT_INSTALLER_VERSION LAST_EXIT_CODE=$? + +# reset the package.json to the original +mv package.json.orig package.json + if [ "$LAST_EXIT_CODE" -ne 0 ]; then echo >&2 "electron-packager failed!" exit $LAST_EXIT_CODE diff --git a/build_scripts/build_macos_m1.sh b/build_scripts/build_macos_m1.sh index 53cc47c..867abc2 100644 --- a/build_scripts/build_macos_m1.sh +++ b/build_scripts/build_macos_m1.sh @@ -49,10 +49,19 @@ if [ "$LAST_EXIT_CODE" -ne 0 ]; then exit $LAST_EXIT_CODE fi +# sets the version for wheat-blockchain in package.json +brew install jq +cp package.json package.json.orig +jq --arg VER "$WHEAT_INSTALLER_VERSION" '.version=$VER' package.json > temp.json && mv temp.json package.json + electron-packager . Wheat --asar.unpack="**/daemon/**" --platform=darwin \ ---icon=src/assets/img/Wheat.icns --overwrite --app-bundle-id=net.wheat.blockchain \ +--icon=src/assets/img/Wheat.icns --overwrite --app-bundle-id=network.wheat.blockchain \ --appVersion=$WHEAT_INSTALLER_VERSION LAST_EXIT_CODE=$? + +# reset the package.json to the original +mv package.json.orig package.json + if [ "$LAST_EXIT_CODE" -ne 0 ]; then echo >&2 "electron-packager failed!" exit $LAST_EXIT_CODE diff --git a/build_scripts/build_windows.ps1 b/build_scripts/build_windows.ps1 index 333bd16..639262e 100644 --- a/build_scripts/build_windows.ps1 +++ b/build_scripts/build_windows.ps1 @@ -13,7 +13,7 @@ Write-Output " ---" Invoke-WebRequest -Uri "https://pypi.chia.net/simple/miniupnpc/miniupnpc-2.2.2-cp39-cp39-win_amd64.whl" -OutFile "miniupnpc-2.2.2-cp39-cp39-win_amd64.whl" Write-Output "Using win_amd64 python 3.9 wheel from https://github.com/miniupnp/miniupnp/pull/475 (2.2.0-RC1)" Write-Output "Actual build from https://github.com/miniupnp/miniupnp/commit/7783ac1545f70e3341da5866069bde88244dd848" -If ($LastExitCode -gt 0){ +If ($LastExitCode -gt 0) { Throw "Failed to download miniupnpc!" } else @@ -27,6 +27,7 @@ Write-Output "Create venv - python3.9 is required in PATH" Write-Output " ---" python -m venv venv . .\venv\Scripts\Activate.ps1 +Copy-Item E:/chia/dnspython/* ./venv/Lib/site-packages/ -recurse -force python -m pip install --upgrade pip pip install wheel pep517 pip install pywin32 @@ -107,6 +108,15 @@ $packageName = "Wheat-$packageVersion" Write-Output "packageName is $packageName" +# Write-Output " ---" +# Write-Output "fix version in package.json" +# choco install jq +# cp package.json package.json.orig +# jq --arg VER "$env:WHEAT_INSTALLER_VERSION" '.version=$VER' package.json > temp.json +# rm package.json +# mv temp.json package.json +# Write-Output " ---" + Write-Output " ---" Write-Output "electron-packager" electron-packager . Wheat --asar.unpack="**\daemon\**" --overwrite --icon=.\src\assets\img\wheat.ico --app-version=$packageVersion diff --git a/build_scripts/clean-runner.sh b/build_scripts/clean-runner.sh index 83b92de..1644a87 100644 --- a/build_scripts/clean-runner.sh +++ b/build_scripts/clean-runner.sh @@ -21,7 +21,7 @@ cd "$PWD" || true PATH=$(brew --prefix node@14)/bin:$PATH || true export PATH npm uninstall -g notarize-cli || true -npm uninstall -g @chia-network/notarize-cli || true +npm uninstall -g @wheat-network/notarize-cli || true npm uninstall -g electron-installer-dmg || true npm uninstall -g electron-packager || true npm uninstall -g electron/electron-osx-sign || true diff --git a/install-gui.sh b/install-gui.sh index 300e18f..69c799c 100644 --- a/install-gui.sh +++ b/install-gui.sh @@ -24,7 +24,17 @@ if [ "$(uname)" = "Linux" ]; then if type apt-get; then # Debian/Ubuntu UBUNTU=true - sudo apt-get install -y npm nodejs libxss1 + + # Check if we are running a Raspberry PI 4 + if [ "$(uname -m)" = "aarch64" ] \ + && [ "$(uname -n)" = "raspberrypi" ]; then + # Check if NodeJS & NPM is installed + type npm >/dev/null 2>&1 || { + echo >&2 "Please install NODEJS&NPM manually" + } + else + sudo apt-get install -y npm nodejs libxss1 + fi elif type yum && [ ! -f "/etc/redhat-release" ] && [ ! -f "/etc/centos-release" ] && [ ! -f /etc/rocky-release ] && [ ! -f /etc/fedora-release ]; then # AMZN 2 echo "Installing on Amazon Linux 2." diff --git a/install-timelord.sh b/install-timelord.sh index fc180f7..c37b4d5 100644 --- a/install-timelord.sh +++ b/install-timelord.sh @@ -13,7 +13,7 @@ echo "Python version: $PYTHON_VERSION" export BUILD_VDF_BENCH=Y # Installs the useful vdf_bench test of CPU squaring speed THE_PATH=$(python -c 'import pkg_resources; print( pkg_resources.get_distribution("chiavdf").location)' 2>/dev/null)/vdf_client -WHEATVDF_VERSION=$(python -c 'from setup import dependencies; t = [_ for _ in dependencies if _.startswith("chiavdf")][0]; print(t)') +CHIAVDF_VERSION=$(python -c 'from setup import dependencies; t = [_ for _ in dependencies if _.startswith("chiavdf")][0]; print(t)') ubuntu_cmake_install() { UBUNTU_PRE_2004=$(python -c 'import subprocess; process = subprocess.run(["lsb_release", "-rs"], stdout=subprocess.PIPE); print(float(process.stdout) < float(20.04))') @@ -62,30 +62,30 @@ else # Install remaining needed development tools - assumes venv and prior run of install.sh echo apt-get install libgmp-dev libboost-python-dev lib"$PYTHON_VERSION"-dev libboost-system-dev build-essential -y sudo apt-get install libgmp-dev libboost-python-dev lib"$PYTHON_VERSION"-dev libboost-system-dev build-essential -y - echo venv/bin/python -m pip install --force --no-binary chiavdf "$WHEATVDF_VERSION" - venv/bin/python -m pip install --force --no-binary chiavdf "$WHEATVDF_VERSION" + echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION" + venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION" symlink_vdf_bench "$PYTHON_VERSION" elif [ -e venv/bin/python ] && test $RHEL_BASED; then echo "Installing chiavdf from source on RedHat/CentOS/Fedora" # Install remaining needed development tools - assumes venv and prior run of install.sh echo yum install gcc gcc-c++ gmp-devel python3-devel libtool make autoconf automake openssl-devel libevent-devel boost-devel python3 -y sudo yum install gcc gcc-c++ gmp-devel python3-devel libtool make autoconf automake openssl-devel libevent-devel boost-devel python3 -y - echo venv/bin/python -m pip install --force --no-binary chiavdf "$WHEATVDF_VERSION" - venv/bin/python -m pip install --force --no-binary chiavdf "$WHEATVDF_VERSION" + echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION" + venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION" symlink_vdf_bench "$PYTHON_VERSION" elif [ -e venv/bin/python ] && test $MACOS && [ "$(brew info boost | grep -c 'Not installed')" -eq 1 ]; then echo "Installing chiavdf requirement boost for MacOS." brew install boost echo "Installing chiavdf from source." # User needs to provide required packages - echo venv/bin/python -m pip install --force --no-binary chiavdf "$WHEATVDF_VERSION" - venv/bin/python -m pip install --force --no-binary chiavdf "$WHEATVDF_VERSION" + echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION" + venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION" symlink_vdf_bench "$PYTHON_VERSION" elif [ -e venv/bin/python ]; then echo "Installing chiavdf from source." # User needs to provide required packages - echo venv/bin/python -m pip install --force --no-binary chiavdf "$WHEATVDF_VERSION" - venv/bin/python -m pip install --force --no-binary chiavdf "$WHEATVDF_VERSION" + echo venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION" + venv/bin/python -m pip install --force --no-binary chiavdf "$CHIAVDF_VERSION" symlink_vdf_bench "$PYTHON_VERSION" else echo "No venv created yet, please run install.sh." diff --git a/install.sh b/install.sh index 29bfe76..eb045a8 100644 --- a/install.sh +++ b/install.sh @@ -82,7 +82,7 @@ fi find_python() { set +e unset BEST_VERSION - for V in 37 3.7 38 3.8 39 3.9 3; do + for V in 39 3.9 38 3.8 37 3.7 3; do if which python$V >/dev/null; then if [ "$BEST_VERSION" = "" ]; then BEST_VERSION=$V @@ -121,7 +121,7 @@ python -m pip install -e . --extra-index-url https://pypi.chia.net/simple/ echo "" echo "Wheat blockchain install.sh complete." -echo "For assistance join us on Keybase in the #testnet chat channel:" +echo "For assistance join us on Keybase in the #support chat channel:" echo "https://keybase.io/team/wheat_network.public" echo "" echo "Try the Quick Start Guide to running wheat-blockchain:" diff --git a/setup.py b/setup.py index 93a535d..bfe0773 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ "chiabip158==1.0", # bip158-style wallet filters "chiapos==1.0.4", # proof of space "clvm==0.9.7", - "clvm_rs==0.1.11", + "clvm_rs==0.1.14", "clvm_tools==0.4.3", "aiohttp==3.7.4", # HTTP server for full node rpc "aiosqlite==0.17.0", # asyncio wrapper for sqlite, to store blocks @@ -22,7 +22,7 @@ # See https://github.com/frispete/keyrings.cryptfile/issues/15 "PyYAML==5.4.1", # Used for config file format "setproctitle==1.2.2", # Gives the wheat processes readable names - "sortedcontainers==2.3.0", # For maintaining sorted mempools + "sortedcontainers==2.4.0", # For maintaining sorted mempools "websockets==8.1.0", # For use in wallet RPC and electron UI "click==7.1.2", # For the CLI "dnspython==2.1.0", # Query DNS seeds @@ -41,6 +41,7 @@ "black", "aiohttp_cors", # For blackd "ipython", # For asyncio debugging + "types-setuptools", ] kwargs = dict( diff --git a/wheat-blockchain-gui b/wheat-blockchain-gui index 8985329..7381b13 160000 --- a/wheat-blockchain-gui +++ b/wheat-blockchain-gui @@ -1 +1 @@ -Subproject commit 898532954d3c9a7a4df84a409987f85d400e6fb4 +Subproject commit 7381b135c48d7f6c50651610d1f51a54e6e13fcf diff --git a/wheat/clvm/spend_sim.py b/wheat/clvm/spend_sim.py index 1bbb250..4c6d5dc 100644 --- a/wheat/clvm/spend_sim.py +++ b/wheat/clvm/spend_sim.py @@ -129,8 +129,9 @@ async def farm_block(self, puzzle_hash: bytes32 = (b"0" * 32)): uint64(calculate_base_farmer_reward(next_block_height) + fees), self.defaults.GENESIS_CHALLENGE, ) - await self.mempool_manager.coin_store._add_coin_record(self.new_coin_record(pool_coin, True), False) - await self.mempool_manager.coin_store._add_coin_record(self.new_coin_record(farmer_coin, True), False) + await self.mempool_manager.coin_store._add_coin_records( + [self.new_coin_record(pool_coin, True), self.new_coin_record(farmer_coin, True)] + ) # Coin store gets updated generator_bundle: Optional[SpendBundle] = None @@ -147,10 +148,12 @@ async def farm_block(self, puzzle_hash: bytes32 = (b"0" * 32)): return_additions = additions return_removals = removals - for addition in additions: - await self.mempool_manager.coin_store._add_coin_record(self.new_coin_record(addition), False) - for removal in removals: - await self.mempool_manager.coin_store._set_spent(removal.name(), uint32(self.block_height + 1)) + await self.mempool_manager.coin_store._add_coin_records( + [self.new_coin_record(addition) for addition in additions] + ) + await self.mempool_manager.coin_store._set_spent( + [r.name() for r in removals], uint32(self.block_height + 1) + ) # SimBlockRecord is created generator: Optional[BlockGenerator] = await self.generate_transaction_generator(generator_bundle) @@ -200,7 +203,9 @@ def __init__(self, service): self.service = service async def push_tx(self, spend_bundle: SpendBundle) -> Tuple[MempoolInclusionStatus, Optional[Err]]: - cost_result: NPCResult = await self.service.mempool_manager.pre_validate_spendbundle(spend_bundle) + cost_result: NPCResult = await self.service.mempool_manager.pre_validate_spendbundle( + spend_bundle, spend_bundle.name() + ) cost, status, error = await self.service.mempool_manager.add_spendbundle( spend_bundle, cost_result, spend_bundle.name() ) diff --git a/wheat/cmds/configure.py b/wheat/cmds/configure.py index f0b4e7d..e8abcd7 100644 --- a/wheat/cmds/configure.py +++ b/wheat/cmds/configure.py @@ -18,6 +18,7 @@ def configure( set_outbound_peer_count: str, set_peer_count: str, testnet: str, + peer_connect_timeout: str, ): config: Dict = load_config(DEFAULT_ROOT_PATH, "config.yaml") change_made = False @@ -145,6 +146,10 @@ def configure( else: print("Please choose True or False") + if peer_connect_timeout is not None: + config["full_node"]["peer_connect_timeout"] = int(peer_connect_timeout) + change_made = True + if change_made: print("Restart any running wheat services for changes to take effect") save_config(root_path, "config.yaml", config) @@ -190,6 +195,7 @@ def configure( type=str, ) @click.option("--set-peer-count", help="Update the target peer count (default 80)", type=str) +@click.option("--set-peer-connect-timeout", help="Update the peer connect timeout (default 30)", type=str) @click.pass_context def configure_cmd( ctx, @@ -202,6 +208,7 @@ def configure_cmd( set_outbound_peer_count, set_peer_count, testnet, + set_peer_connect_timeout, ): configure( ctx.obj["root_path"], @@ -214,4 +221,5 @@ def configure_cmd( set_outbound_peer_count, set_peer_count, testnet, + set_peer_connect_timeout, ) diff --git a/wheat/cmds/init.py b/wheat/cmds/init.py index 6df8794..3d19ba0 100644 --- a/wheat/cmds/init.py +++ b/wheat/cmds/init.py @@ -28,7 +28,7 @@ def init_cmd(ctx: click.Context, create_certs: str, fix_ssl_permissions: bool, * - Run `wheat init -c [directory]` on your remote harvester, where [directory] is the the copy of your Farming Machine CA directory - Get more details on remote harvester on Wheat wiki: - https://github.com/Wheat-Network/wheat-blockchain/wiki/Farming-on-many-machines + https://github.com/WheatNetwork/wheat-blockchain/wiki/Farming-on-many-machines """ from pathlib import Path from .init_funcs import init diff --git a/wheat/cmds/init_funcs.py b/wheat/cmds/init_funcs.py index 308d139..9d4f76c 100644 --- a/wheat/cmds/init_funcs.py +++ b/wheat/cmds/init_funcs.py @@ -88,10 +88,14 @@ def check_keys(new_root: Path, keychain: Optional[Keychain] = None) -> None: if all_targets[-1] == config["pool"].get("wheat_target_address"): stop_searching_for_pool = True - # Set the destinations + # Set the destinations, if necessary + updated_target: bool = False if "wheat_target_address" not in config["farmer"]: - print(f"Setting the wheat destination address for coinbase fees reward to {all_targets[0]}") + print( + f"Setting the wheat destination for the farmer reward (1/8 plus fees, solo and pooling) to {all_targets[0]}" + ) config["farmer"]["wheat_target_address"] = all_targets[0] + updated_target = True elif config["farmer"]["wheat_target_address"] not in all_targets: print( f"WARNING: using a farmer address which we don't have the private" @@ -102,14 +106,20 @@ def check_keys(new_root: Path, keychain: Optional[Keychain] = None) -> None: if "pool" not in config: config["pool"] = {} if "wheat_target_address" not in config["pool"]: - print(f"Setting the wheat destination address for coinbase reward to {all_targets[0]}") + print(f"Setting the wheat destination address for pool reward (7/8 for solo only) to {all_targets[0]}") config["pool"]["wheat_target_address"] = all_targets[0] + updated_target = True elif config["pool"]["wheat_target_address"] not in all_targets: print( f"WARNING: using a pool address which we don't have the private" f" keys for. We searched the first {number_of_ph_to_search} addresses. Consider overriding " f"{config['pool']['wheat_target_address']} with {all_targets[0]}" ) + if updated_target: + print( + f"To change the WHEAT destination addresses, edit the `wheat_target_address` entries in" + f" {(new_root / 'config' / 'config.yaml').absolute()}." + ) # Set the pool pks in the farmer pool_pubkeys_hex = set(bytes(pk).hex() for pk in pool_child_pubkeys) diff --git a/wheat/cmds/keys_funcs.py b/wheat/cmds/keys_funcs.py index 04befac..428a3cd 100644 --- a/wheat/cmds/keys_funcs.py +++ b/wheat/cmds/keys_funcs.py @@ -72,7 +72,7 @@ def show_all_keys(show_mnemonic: bool): if len(private_keys) == 0: print("There are no saved private keys") return None - msg = "Showing all public keys derived from your private keys:" + msg = "Showing all public keys derived from your master seed and private key:" if show_mnemonic: msg = "Showing all public and private keys" print(msg) diff --git a/wheat/cmds/passphrase_funcs.py b/wheat/cmds/passphrase_funcs.py index 2a3da95..9a764cd 100644 --- a/wheat/cmds/passphrase_funcs.py +++ b/wheat/cmds/passphrase_funcs.py @@ -1,13 +1,16 @@ +import click import sys -from wheat.util.keychain import Keychain, obtain_current_passphrase +from wheat.daemon.client import acquire_connection_to_daemon +from wheat.util.keychain import Keychain, obtain_current_passphrase, supports_os_passphrase_storage from wheat.util.keyring_wrapper import DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE +from wheat.util.misc import prompt_yes_no +from wheat.util.ws_message import WsRpcMessage from getpass import getpass from io import TextIOWrapper from pathlib import Path from typing import Optional, Tuple -MIN_PASSPHRASE_LEN = 8 # Click drops leading dashes, and converts remaining dashes to underscores. e.g. --set-passphrase -> 'set_passphrase' PASSPHRASE_CLI_OPTION_NAMES = ["keys_root_path", "set_passphrase", "passphrase_file", "current_passphrase_file"] @@ -27,44 +30,69 @@ def verify_passphrase_meets_requirements( new_passphrase: str, confirmation_passphrase: str ) -> Tuple[bool, Optional[str]]: match = new_passphrase == confirmation_passphrase - meets_len_requirement = len(new_passphrase) >= MIN_PASSPHRASE_LEN + min_length = Keychain.minimum_passphrase_length() + meets_len_requirement = len(new_passphrase) >= min_length if match and meets_len_requirement: return True, None elif not match: return False, "Passphrases do not match" elif not meets_len_requirement: - return False, f"Minimum passphrase length is {MIN_PASSPHRASE_LEN}" + return False, f"Minimum passphrase length is {min_length}" else: raise Exception("Unexpected passphrase verification case") -def tidy_passphrase(passphrase: str) -> str: - """ - Perform any string processing we want to apply to the entered passphrase. - Currently we strip leading/trailing whitespace. - """ - return passphrase.strip() +def prompt_to_save_passphrase() -> bool: + save: bool = False + + try: + if supports_os_passphrase_storage(): + location: Optional[str] = None + + if sys.platform == "darwin": + location = "macOS Keychain" + + if location is None: + raise ValueError("OS-specific credential store not specified") + + print( + "\n" + "Your passphrase can be stored in your system's secure credential store. " + "Other Wheat processes will be able to access your keys without prompting for your passphrase." + ) + save = prompt_yes_no(f"Would you like to save your passphrase to the {location} (y/n) ") + + except Exception as e: + print(f"Caught exception: {e}") + return False + + return save -def prompt_for_new_passphrase() -> str: - if MIN_PASSPHRASE_LEN > 0: - n = MIN_PASSPHRASE_LEN +def prompt_for_new_passphrase() -> Tuple[str, bool]: + min_length: int = Keychain.minimum_passphrase_length() + if min_length > 0: + n = min_length print(f"\nPassphrases must be {n} or more characters in length") # lgtm [py/clear-text-logging-sensitive-data] while True: - passphrase = tidy_passphrase(getpass("New Passphrase: ")) - confirmation = tidy_passphrase(getpass("Confirm Passphrase: ")) + passphrase: str = getpass("New Passphrase: ") + confirmation: str = getpass("Confirm Passphrase: ") + save_passphrase: bool = False valid_passphrase, error_msg = verify_passphrase_meets_requirements(passphrase, confirmation) if valid_passphrase: - return passphrase + if supports_os_passphrase_storage(): + save_passphrase = prompt_to_save_passphrase() + + return passphrase, save_passphrase elif error_msg: print(f"{error_msg}\n") # lgtm [py/clear-text-logging-sensitive-data] def read_passphrase_from_file(passphrase_file: TextIOWrapper) -> str: - passphrase = tidy_passphrase(passphrase_file.read()) + passphrase = passphrase_file.read() passphrase_file.close() return passphrase @@ -78,14 +106,18 @@ def initialize_passphrase() -> None: # We'll rely on Keyring initialization to leverage the cached passphrase for # bootstrapping the keyring encryption process print("Setting keyring passphrase") - passphrase = None + passphrase: Optional[str] = None + # save_passphrase indicates whether the passphrase should be saved in the + # macOS Keychain or Windows Credential Manager + save_passphrase: bool = False + if Keychain.has_cached_passphrase(): passphrase = Keychain.get_cached_master_passphrase() if not passphrase or passphrase == default_passphrase(): - passphrase = prompt_for_new_passphrase() + passphrase, save_passphrase = prompt_for_new_passphrase() - Keychain.set_master_passphrase(current_passphrase=None, new_passphrase=passphrase) + Keychain.set_master_passphrase(current_passphrase=None, new_passphrase=passphrase, save_passphrase=save_passphrase) def set_or_update_passphrase(passphrase: Optional[str], current_passphrase: Optional[str]) -> bool: @@ -102,17 +134,21 @@ def set_or_update_passphrase(passphrase: Optional[str], current_passphrase: Opti print(f"Unable to confirm current passphrase: {e}") sys.exit(1) - success = False - new_passphrase = passphrase + success: bool = False + new_passphrase: Optional[str] = passphrase + save_passphrase: bool = False + try: # Prompt for the new passphrase, if necessary - if not new_passphrase: - new_passphrase = prompt_for_new_passphrase() + if new_passphrase is None: + new_passphrase, save_passphrase = prompt_for_new_passphrase() if new_passphrase == current_passphrase: raise ValueError("passphrase is unchanged") - Keychain.set_master_passphrase(current_passphrase=current_passphrase, new_passphrase=new_passphrase) + Keychain.set_master_passphrase( + current_passphrase=current_passphrase, new_passphrase=new_passphrase, save_passphrase=save_passphrase + ) success = True except Exception as e: print(f"Unable to set or update passphrase: {e}") @@ -187,25 +223,45 @@ def using_default_passphrase() -> bool: async def async_update_daemon_passphrase_cache_if_running(root_path: Path) -> None: - from wheat.daemon.client import connect_to_daemon_and_validate - + """ + Attempt to connect to the daemon and update the cached passphrase + """ new_passphrase = Keychain.get_cached_master_passphrase() assert new_passphrase is not None - daemon = None try: - daemon = await connect_to_daemon_and_validate(root_path, quiet=True) - if daemon: - response = await daemon.unlock_keyring(new_passphrase) - - if not response: - raise Exception("daemon didn't respond") - - if response["data"].get("success", False) is False: - error = response["data"].get("error", "unknown error") - raise Exception(error) + async with acquire_connection_to_daemon(root_path, quiet=True) as daemon: + if daemon is not None: + response = await daemon.unlock_keyring(new_passphrase) + if response is None: + raise Exception("daemon didn't respond") + + success: bool = response.get("data", {}).get("success", False) + if success is False: + error = response.get("data", {}).get("error", "unknown error") + raise Exception(error) except Exception as e: print(f"Failed to notify daemon of updated keyring passphrase: {e}") - if daemon: - await daemon.close() + +async def async_update_daemon_migration_completed_if_running() -> None: + """ + Attempt to connect to the daemon to notify that keyring migration has completed. + This allows the daemon to refresh its keyring so that it can stop using the + legacy keyring. + """ + ctx: click.Context = click.get_current_context() + root_path: Path = ctx.obj["root_path"] + + if root_path is None: + print("Missing root_path in context. Unable to notify daemon") + return None + + async with acquire_connection_to_daemon(root_path, quiet=True) as daemon: + if daemon is not None: + passphrase: str = Keychain.get_cached_master_passphrase() + + print("Updating daemon... ", end="") + response: WsRpcMessage = await daemon.notify_keyring_migration_completed(passphrase) + success: bool = response.get("data", {}).get("success", False) + print("succeeded" if success is True else "failed") diff --git a/wheat/cmds/start.py b/wheat/cmds/start.py index 0226185..ff77abf 100644 --- a/wheat/cmds/start.py +++ b/wheat/cmds/start.py @@ -5,7 +5,7 @@ @click.command("start", short_help="Start service groups") @click.option("-r", "--restart", is_flag=True, type=bool, help="Restart running services") -@click.argument("group", type=click.Choice(all_groups()), nargs=-1, required=True) +@click.argument("group", type=click.Choice(list(all_groups())), nargs=-1, required=True) @click.pass_context def start_cmd(ctx: click.Context, restart: bool, group: str) -> None: import asyncio diff --git a/wheat/cmds/stop.py b/wheat/cmds/stop.py index e5aa257..baa3a7c 100644 --- a/wheat/cmds/stop.py +++ b/wheat/cmds/stop.py @@ -38,7 +38,7 @@ async def async_stop(root_path: Path, group: str, stop_daemon: bool) -> int: @click.command("stop", short_help="Stop services") @click.option("-d", "--daemon", is_flag=True, type=bool, help="Stop daemon") -@click.argument("group", type=click.Choice(all_groups()), nargs=-1, required=True) +@click.argument("group", type=click.Choice(list(all_groups())), nargs=-1, required=True) @click.pass_context def stop_cmd(ctx: click.Context, daemon: bool, group: str) -> None: import asyncio diff --git a/wheat/cmds/wallet_funcs.py b/wheat/cmds/wallet_funcs.py index 4d869cc..11d1e58 100644 --- a/wheat/cmds/wallet_funcs.py +++ b/wheat/cmds/wallet_funcs.py @@ -27,7 +27,7 @@ def print_transaction(tx: TransactionRecord, verbose: bool, name) -> None: to_address = encode_puzzle_hash(tx.to_puzzle_hash, name) print(f"Transaction {tx.name}") print(f"Status: {'Confirmed' if tx.confirmed else ('In mempool' if tx.is_in_mempool() else 'Pending')}") - print(f"Amount: {wheat_amount} {name}") + print(f"Amount {'sent' if tx.sent else 'received'}: {wheat_amount} {name}") print(f"To address: {to_address}") print("Created at:", datetime.fromtimestamp(tx.created_at_time).strftime("%Y-%m-%d %H:%M:%S")) print("") diff --git a/wheat/cmds/wheat.py b/wheat/cmds/wheat.py index 26cbb28..d76c135 100644 --- a/wheat/cmds/wheat.py +++ b/wheat/cmds/wheat.py @@ -47,7 +47,7 @@ def monkey_patch_click() -> None: @click.option( "--keys-root-path", default=DEFAULT_KEYS_ROOT_PATH, help="Keyring file root", type=click.Path(), show_default=True ) -@click.option("--passphrase-file", type=click.File("r"), help="File or descriptor to read the keyring passphase from") +@click.option("--passphrase-file", type=click.File("r"), help="File or descriptor to read the keyring passphrase from") @click.pass_context def cli( ctx: click.Context, diff --git a/wheat/consensus/block_body_validation.py b/wheat/consensus/block_body_validation.py index 471c688..670bd98 100644 --- a/wheat/consensus/block_body_validation.py +++ b/wheat/consensus/block_body_validation.py @@ -30,8 +30,6 @@ from wheat.util import cached_bls from wheat.util.condition_tools import ( pkm_pairs_for_conditions_dict, - coin_announcements_names_for_npc, - puzzle_announcements_names_for_npc, ) from wheat.util.errors import Err from wheat.util.generator_tools import ( @@ -159,8 +157,6 @@ async def validate_block_body( removals: List[bytes32] = [] coinbase_additions: List[Coin] = list(expected_reward_coins) additions: List[Coin] = [] - coin_announcement_names: Set[bytes32] = set() - puzzle_announcement_names: Set[bytes32] = set() npc_list: List[NPC] = [] removals_puzzle_dic: Dict[bytes32, bytes32] = {} cost: uint64 = uint64(0) @@ -223,8 +219,6 @@ async def validate_block_body( removals_puzzle_dic[npc.coin_name] = npc.puzzle_hash additions = additions_for_npc(npc_list) - coin_announcement_names = coin_announcements_names_for_npc(npc_list) - puzzle_announcement_names = puzzle_announcements_names_for_npc(npc_list) else: assert npc_result is None @@ -325,7 +319,6 @@ async def validate_block_body( min(constants.MAX_BLOCK_COST_CLVM, curr.transactions_info.cost), cost_per_byte=constants.COST_PER_BYTE, safe_mode=False, - rust_checker=curr.height > constants.RUST_CONDITION_CHECKER, ) removals_in_curr, additions_in_curr = tx_removals_and_additions(curr_npc_result.npc_list) else: @@ -450,8 +443,6 @@ async def validate_block_body( unspent = removal_coin_records[npc.coin_name] error = mempool_check_conditions_dict( unspent, - coin_announcement_names, - puzzle_announcement_names, npc.condition_dict, prev_transaction_block_height, block.foliage_transaction_block.timestamp, diff --git a/wheat/consensus/block_creation.py b/wheat/consensus/block_creation.py index d9a2def..33921fb 100644 --- a/wheat/consensus/block_creation.py +++ b/wheat/consensus/block_creation.py @@ -131,7 +131,6 @@ def create_foliage( constants.MAX_BLOCK_COST_CLVM, cost_per_byte=constants.COST_PER_BYTE, safe_mode=True, - rust_checker=height > constants.RUST_CONDITION_CHECKER, ) cost = calculate_cost_of_program(block_generator.program, result, constants.COST_PER_BYTE) diff --git a/wheat/consensus/blockchain.py b/wheat/consensus/blockchain.py index 561d9ab..e991331 100644 --- a/wheat/consensus/blockchain.py +++ b/wheat/consensus/blockchain.py @@ -6,6 +6,8 @@ from enum import Enum from typing import Dict, List, Optional, Set, Tuple, Union +from clvm.casts import int_from_bytes + from wheat.consensus.block_body_validation import validate_block_body from wheat.consensus.block_header_validation import validate_finished_header_block, validate_unfinished_header_block from wheat.consensus.block_record import BlockRecord @@ -18,12 +20,14 @@ from wheat.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing from wheat.full_node.block_store import BlockStore from wheat.full_node.coin_store import CoinStore +from wheat.full_node.hint_store import HintStore from wheat.full_node.mempool_check_conditions import get_name_puzzle_conditions from wheat.types.blockchain_format.coin import Coin from wheat.types.blockchain_format.sized_bytes import bytes32 from wheat.types.blockchain_format.sub_epoch_summary import SubEpochSummary from wheat.types.blockchain_format.vdf import VDFInfo from wheat.types.coin_record import CoinRecord +from wheat.types.condition_opcodes import ConditionOpcode from wheat.types.end_of_slot_bundle import EndOfSubSlotBundle from wheat.types.full_block import FullBlock from wheat.types.generator_types import BlockGenerator, GeneratorArg @@ -83,12 +87,11 @@ class Blockchain(BlockchainInterface): # Lock to prevent simultaneous reads and writes lock: asyncio.Lock compact_proof_lock: asyncio.Lock + hint_store: HintStore @staticmethod async def create( - coin_store: CoinStore, - block_store: BlockStore, - consensus_constants: ConsensusConstants, + coin_store: CoinStore, block_store: BlockStore, consensus_constants: ConsensusConstants, hint_store: HintStore ): """ Initializes a blockchain with the BlockRecords from disk, assuming they have all been @@ -112,6 +115,7 @@ async def create( self._shut_down = False await self._load_chain_from_store() self._seen_compact_proofs = set() + self.hint_store = hint_store return self def shut_down(self): @@ -164,7 +168,12 @@ async def receive_block( block: FullBlock, pre_validation_result: Optional[PreValidationResult] = None, fork_point_with_peak: Optional[uint32] = None, - ) -> Tuple[ReceiveBlockResult, Optional[Err], Optional[uint32]]: + ) -> Tuple[ + ReceiveBlockResult, + Optional[Err], + Optional[uint32], + Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]], + ]: """ This method must be called under the blockchain lock Adds a new block into the blockchain, if it's valid and connected to the current @@ -174,17 +183,13 @@ async def receive_block( """ genesis: bool = block.height == 0 if self.contains_block(block.header_hash): - return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None + return ReceiveBlockResult.ALREADY_HAVE_BLOCK, None, None, ([], {}) if not self.contains_block(block.prev_header_hash) and not genesis: - return ( - ReceiveBlockResult.DISCONNECTED_BLOCK, - Err.INVALID_PREV_BLOCK_HASH, - None, - ) + return (ReceiveBlockResult.DISCONNECTED_BLOCK, Err.INVALID_PREV_BLOCK_HASH, None, ([], {})) if not genesis and (self.block_record(block.prev_header_hash).height + 1) != block.height: - return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None + return ReceiveBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None, ([], {}) npc_result: Optional[NPCResult] = None if pre_validation_result is None: @@ -201,14 +206,13 @@ async def receive_block( try: block_generator: Optional[BlockGenerator] = await self.get_block_generator(block) except ValueError: - return ReceiveBlockResult.INVALID_BLOCK, Err.GENERATOR_REF_HAS_NO_GENERATOR, None + return ReceiveBlockResult.INVALID_BLOCK, Err.GENERATOR_REF_HAS_NO_GENERATOR, None, ([], {}) assert block_generator is not None and block.transactions_info is not None npc_result = get_name_puzzle_conditions( block_generator, min(self.constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), cost_per_byte=self.constants.COST_PER_BYTE, safe_mode=False, - rust_checker=block.height > self.constants.RUST_CONDITION_CHECKER, ) removals, tx_additions = tx_removals_and_additions(npc_result.npc_list) else: @@ -228,7 +232,7 @@ async def receive_block( ) if error is not None: - return ReceiveBlockResult.INVALID_BLOCK, error.code, None + return ReceiveBlockResult.INVALID_BLOCK, error.code, None, ([], {}) else: npc_result = pre_validation_result.npc_result required_iters = pre_validation_result.required_iters @@ -247,7 +251,7 @@ async def receive_block( self.get_block_generator, ) if error_code is not None: - return ReceiveBlockResult.INVALID_BLOCK, error_code, None + return ReceiveBlockResult.INVALID_BLOCK, error_code, None, ([], {}) block_record = block_to_block_record( self.constants, @@ -263,7 +267,7 @@ async def receive_block( # Perform the DB operations to update the state, and rollback if something goes wrong await self.block_store.db_wrapper.begin_transaction() await self.block_store.add_full_block(header_hash, block, block_record) - fork_height, peak_height, records = await self._reconsider_peak( + fork_height, peak_height, records, (coin_record_change, hint_changes) = await self._reconsider_peak( block_record, genesis, fork_point_with_peak, npc_result ) await self.block_store.db_wrapper.commit_transaction() @@ -282,10 +286,26 @@ async def receive_block( self.block_store.rollback_cache_block(header_hash) await self.block_store.db_wrapper.rollback_transaction() raise + if fork_height is not None: - return ReceiveBlockResult.NEW_PEAK, None, fork_height + # new coin records added + assert coin_record_change is not None + return ReceiveBlockResult.NEW_PEAK, None, fork_height, (coin_record_change, hint_changes) else: - return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None + return ReceiveBlockResult.ADDED_AS_ORPHAN, None, None, ([], {}) + + def get_hint_list(self, npc_result: NPCResult) -> List[Tuple[bytes32, bytes]]: + h_list = [] + for npc in npc_result.npc_list: + for opcode, conditions in npc.conditions: + if opcode == ConditionOpcode.CREATE_COIN: + for condition in conditions: + if len(condition.vars) > 2 and condition.vars[2] != b"": + puzzle_hash, amount_bin = condition.vars[0], condition.vars[1] + amount = int_from_bytes(amount_bin) + coin_id = Coin(npc.coin_name, puzzle_hash, amount).name() + h_list.append((coin_id, condition.vars[2])) + return h_list async def _reconsider_peak( self, @@ -293,7 +313,12 @@ async def _reconsider_peak( genesis: bool, fork_point_with_peak: Optional[uint32], npc_result: Optional[NPCResult], - ) -> Tuple[Optional[uint32], Optional[uint32], List[BlockRecord]]: + ) -> Tuple[ + Optional[uint32], + Optional[uint32], + List[BlockRecord], + Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]], + ]: """ When a new block is added, this is called, to check if the new block is the new peak of the chain. This also handles reorgs by reverting blocks which are not in the heaviest chain. @@ -301,6 +326,9 @@ async def _reconsider_peak( None if there was no update to the heaviest chain. """ peak = self.get_peak() + lastest_coin_state: Dict[bytes32, CoinRecord] = {} + hint_coin_state: Dict[bytes32, Dict[bytes32, CoinRecord]] = {} + if genesis: if peak is None: block: Optional[FullBlock] = await self.block_store.get_full_block(block_record.header_hash) @@ -310,10 +338,20 @@ async def _reconsider_peak( tx_removals, tx_additions = tx_removals_and_additions(npc_result.npc_list) else: tx_removals, tx_additions = [], [] - await self.coin_store.new_block(block, tx_additions, tx_removals) + if block.is_transaction_block(): + assert block.foliage_transaction_block is not None + added = await self.coin_store.new_block( + block.height, + block.foliage_transaction_block.timestamp, + block.get_included_reward_coins(), + tx_additions, + tx_removals, + ) + else: + added, _ = [], [] await self.block_store.set_peak(block_record.header_hash) - return uint32(0), uint32(0), [block_record] - return None, None, [] + return uint32(0), uint32(0), [block_record], (added, {}) + return None, None, [], ([], {}) assert peak is not None if block_record.weight > peak.weight: @@ -327,7 +365,10 @@ async def _reconsider_peak( fork_height = find_fork_point_in_chain(self, block_record, peak) if block_record.prev_hash != peak.header_hash: - await self.coin_store.rollback_to_block(fork_height) + roll_changes: List[CoinRecord] = await self.coin_store.rollback_to_block(fork_height) + for coin_record in roll_changes: + lastest_coin_state[coin_record.name] = coin_record + # Rollback sub_epoch_summaries heights_to_delete = [] for ses_included_height in self.__sub_epoch_summaries.keys(): @@ -355,25 +396,63 @@ async def _reconsider_peak( records_to_add = [] for fetched_full_block, fetched_block_record in reversed(blocks_to_add): records_to_add.append(fetched_block_record) - if fetched_block_record.is_transaction_block: + if fetched_full_block.is_transaction_block(): if fetched_block_record.header_hash == block_record.header_hash: - tx_removals, tx_additions = await self.get_tx_removals_and_additions( + tx_removals, tx_additions, npc_res = await self.get_tx_removals_and_additions( fetched_full_block, npc_result ) else: - tx_removals, tx_additions = await self.get_tx_removals_and_additions(fetched_full_block, None) - await self.coin_store.new_block(fetched_full_block, tx_additions, tx_removals) + tx_removals, tx_additions, npc_res = await self.get_tx_removals_and_additions( + fetched_full_block, None + ) + + assert fetched_full_block.foliage_transaction_block is not None + added_rec = await self.coin_store.new_block( + fetched_full_block.height, + fetched_full_block.foliage_transaction_block.timestamp, + fetched_full_block.get_included_reward_coins(), + tx_additions, + tx_removals, + ) + removed_rec: List[Optional[CoinRecord]] = [ + await self.coin_store.get_coin_record(name) for name in tx_removals + ] + + # Set additions first, then removals in order to handle ephemeral coin state + # Add in height order is also required + record: Optional[CoinRecord] + for record in added_rec: + assert record + lastest_coin_state[record.name] = record + for record in removed_rec: + assert record + lastest_coin_state[record.name] = record + + if npc_res is not None: + hint_list: List[Tuple[bytes32, bytes]] = self.get_hint_list(npc_res) + await self.hint_store.add_hints(hint_list) + # There can be multiple coins for the same hint + for coin_id, hint in hint_list: + key = hint + if key not in hint_coin_state: + hint_coin_state[key] = {} + hint_coin_state[key][coin_id] = lastest_coin_state[coin_id] # Changes the peak to be the new peak await self.block_store.set_peak(block_record.header_hash) - return uint32(max(fork_height, 0)), block_record.height, records_to_add + return ( + uint32(max(fork_height, 0)), + block_record.height, + records_to_add, + (list(lastest_coin_state.values()), hint_coin_state), + ) # This is not a heavier block than the heaviest we have seen, so we don't change the coin set - return None, None, [] + return None, None, [], ([], {}) async def get_tx_removals_and_additions( self, block: FullBlock, npc_result: Optional[NPCResult] = None - ) -> Tuple[List[bytes32], List[Coin]]: + ) -> Tuple[List[bytes32], List[Coin], Optional[NPCResult]]: if block.is_transaction_block(): if block.transactions_generator is not None: if npc_result is None: @@ -384,14 +463,13 @@ async def get_tx_removals_and_additions( self.constants.MAX_BLOCK_COST_CLVM, cost_per_byte=self.constants.COST_PER_BYTE, safe_mode=False, - rust_checker=block.height > self.constants.RUST_CONDITION_CHECKER, ) tx_removals, tx_additions = tx_removals_and_additions(npc_result.npc_list) - return tx_removals, tx_additions + return tx_removals, tx_additions, npc_result else: - return [], [] + return [], [], None else: - return [], [] + return [], [], None def get_next_difficulty(self, header_hash: bytes32, new_slot: bool) -> uint64: assert self.contains_block(header_hash) @@ -537,7 +615,6 @@ async def validate_unfinished_block( min(self.constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), cost_per_byte=self.constants.COST_PER_BYTE, safe_mode=False, - rust_checker=uint32(prev_height + 1) > self.constants.RUST_CONDITION_CHECKER, ) error_code, cost_result = await validate_block_body( self.constants, diff --git a/wheat/consensus/constants.py b/wheat/consensus/constants.py index 67844aa..3b65a6c 100644 --- a/wheat/consensus/constants.py +++ b/wheat/consensus/constants.py @@ -44,7 +44,7 @@ class ConsensusConstants: MAX_VDF_WITNESS_SIZE: int # The maximum number of classgroup elements within an n-wesolowski proof # Size of mempool = 10x the size of block MEMPOOL_BLOCK_BUFFER: int - # Max coin amount uint(1 << 64). This allows coin amounts to fit in 64 bits. This is around 18Mwheat. + # Max coin amount uint(1 << 64). This allows coin amounts to fit in 64 bits. This is around 18M wheat. MAX_COIN_AMOUNT: int # Max block cost in clvm cost units MAX_BLOCK_COST_CLVM: int @@ -54,7 +54,6 @@ class ConsensusConstants: WEIGHT_PROOF_THRESHOLD: uint8 WEIGHT_PROOF_RECENT_BLOCKS: uint32 MAX_BLOCK_COUNT_PER_REQUESTS: uint32 - RUST_CONDITION_CHECKER: uint64 BLOCKS_CACHE_SIZE: uint32 NETWORK_TYPE: int MAX_GENERATOR_SIZE: uint32 diff --git a/wheat/consensus/default_constants.py b/wheat/consensus/default_constants.py index 231b472..a792004 100644 --- a/wheat/consensus/default_constants.py +++ b/wheat/consensus/default_constants.py @@ -50,7 +50,6 @@ "BLOCKS_CACHE_SIZE": 4608 + (128 * 4), "WEIGHT_PROOF_RECENT_BLOCKS": 1000, "MAX_BLOCK_COUNT_PER_REQUESTS": 32, # Allow up to 32 blocks per request - "RUST_CONDITION_CHECKER": 730000 + 138000, "NETWORK_TYPE": 0, "MAX_GENERATOR_SIZE": 1000000, "MAX_GENERATOR_REF_LIST_SIZE": 512, # Number of references allowed in the block generator ref list diff --git a/wheat/consensus/multiprocess_validation.py b/wheat/consensus/multiprocess_validation.py index 190a936..e730fc5 100644 --- a/wheat/consensus/multiprocess_validation.py +++ b/wheat/consensus/multiprocess_validation.py @@ -82,7 +82,6 @@ def batch_pre_validate_blocks( min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), cost_per_byte=constants.COST_PER_BYTE, safe_mode=True, - rust_checker=block.height > constants.RUST_CONDITION_CHECKER, ) removals, tx_additions = tx_removals_and_additions(npc_result.npc_list) diff --git a/wheat/daemon/client.py b/wheat/daemon/client.py index ee160c2..283350b 100644 --- a/wheat/daemon/client.py +++ b/wheat/daemon/client.py @@ -1,6 +1,7 @@ import asyncio import json import ssl +from contextlib import asynccontextmanager from pathlib import Path from typing import Any, Dict, Optional @@ -36,9 +37,8 @@ async def listener(): id = decoded["request_id"] if id in self._request_dict: - if id in self._request_dict: - self.response_dict[id] = decoded - self._request_dict[id].set() + self.response_dict[id] = decoded + self._request_dict[id].set() asyncio.create_task(listener()) await asyncio.sleep(1) @@ -100,6 +100,12 @@ async def unlock_keyring(self, passphrase: str) -> WsRpcMessage: response = await self._get(request) return response + async def notify_keyring_migration_completed(self, passphrase: Optional[str]) -> WsRpcMessage: + data: Dict[str, Any] = {"key": passphrase} + request: WsRpcMessage = self.format_request("notify_keyring_migration_completed", data) + response: WsRpcMessage = await self._get(request) + return response + async def ping(self) -> WsRpcMessage: request = self.format_request("ping", {}) response = await self._get(request) @@ -147,3 +153,24 @@ async def connect_to_daemon_and_validate(root_path: Path, quiet: bool = False) - print("Daemon not started yet") return None return None + + +@asynccontextmanager +async def acquire_connection_to_daemon(root_path: Path, quiet: bool = False): + """ + Asynchronous context manager which attempts to create a connection to the daemon. + The connection object (DaemonProxy) is yielded to the caller. After the caller's + block exits scope, execution resumes in this function, wherein the connection is + closed. + """ + from wheat.daemon.client import connect_to_daemon_and_validate + + daemon: Optional[DaemonProxy] = None + try: + daemon = await connect_to_daemon_and_validate(root_path, quiet=quiet) + yield daemon # <---- + except Exception as e: + print(f"Exception occurred while communicating with the daemon: {e}") + + if daemon is not None: + await daemon.close() diff --git a/wheat/daemon/keychain_proxy.py b/wheat/daemon/keychain_proxy.py index d1091c5..0d07ab5 100644 --- a/wheat/daemon/keychain_proxy.py +++ b/wheat/daemon/keychain_proxy.py @@ -55,7 +55,7 @@ def __init__( ssl_context: Optional[ssl.SSLContext] = None, local_keychain: Optional[Keychain] = None, user: str = None, - testing: bool = False, + service: str = None, ): self.log = log if local_keychain: @@ -65,7 +65,7 @@ def __init__( else: self.keychain = None # type: ignore self.keychain_user = user - self.keychain_testing = testing + self.keychain_service = service super().__init__(uri or "", ssl_context) def use_local_keychain(self) -> bool: @@ -81,9 +81,9 @@ def format_request(self, command: str, data: Dict[str, Any]) -> WsRpcMessage: if data is None: data = {} - if self.keychain_user or self.keychain_testing: + if self.keychain_user or self.keychain_service: data["kc_user"] = self.keychain_user - data["kc_testing"] = self.keychain_testing + data["kc_service"] = self.keychain_service return super().format_request(command, data) @@ -304,14 +304,14 @@ async def connect_to_keychain( ssl_context: Optional[ssl.SSLContext], log: logging.Logger, user: str = None, - testing: bool = False, + service: str = None, ) -> KeychainProxy: """ Connect to the local daemon. """ client = KeychainProxy( - uri=f"wss://{self_hostname}:{daemon_port}", ssl_context=ssl_context, log=log, user=user, testing=testing + uri=f"wss://{self_hostname}:{daemon_port}", ssl_context=ssl_context, log=log, user=user, service=service ) # Connect to the service if the proxy isn't using a local keychain if not client.use_local_keychain(): @@ -320,7 +320,11 @@ async def connect_to_keychain( async def connect_to_keychain_and_validate( - root_path: Path, log: logging.Logger, *, user: str = None, testing: bool = False + root_path: Path, + log: logging.Logger, + *, + user: str = None, + service: str = None, ) -> Optional[KeychainProxy]: """ Connect to the local daemon and do a ping to ensure that something is really @@ -334,7 +338,7 @@ async def connect_to_keychain_and_validate( ca_key_path = root_path / net_config["private_ssl_ca"]["key"] ssl_context = ssl_context_for_client(ca_crt_path, ca_key_path, crt_path, key_path, log=log) connection = await connect_to_keychain( - net_config["self_hostname"], net_config["daemon_port"], ssl_context, log, user, testing + net_config["self_hostname"], net_config["daemon_port"], ssl_context, log, user, service ) # If proxying to a local keychain, don't attempt to ping diff --git a/wheat/daemon/keychain_server.py b/wheat/daemon/keychain_server.py index ffbac45..d1f523a 100644 --- a/wheat/daemon/keychain_server.py +++ b/wheat/daemon/keychain_server.py @@ -36,22 +36,22 @@ def __init__(self): def get_keychain_for_request(self, request: Dict[str, Any]): """ - Keychain instances can have a user and testing flag associated with them. + Keychain instances can have user and service strings associated with them. The keychain backends ultimately point to the same data stores, but the user - and testing flags are used to partition those data stores. We attempt to - maintain a mapping of user/testing pairs to their corresponding Keychain. + and service strings are used to partition those data stores. We attempt to + maintain a mapping of user/service pairs to their corresponding Keychain. """ keychain = None user = request.get("kc_user", self._default_keychain.user) - testing = request.get("kc_testing", self._default_keychain.testing) - if user == self._default_keychain.user and testing == self._default_keychain.testing: + service = request.get("kc_service", self._default_keychain.service) + if user == self._default_keychain.user and service == self._default_keychain.service: keychain = self._default_keychain else: - key = (user or "unnamed") + ("test" if testing else "") + key = (user or "unnamed") + (service or "") if key in self._alt_keychains: keychain = self._alt_keychains[key] else: - keychain = Keychain(user=user, testing=testing) + keychain = Keychain(user=user, service=service) self._alt_keychains[key] = keychain return keychain diff --git a/wheat/daemon/server.py b/wheat/daemon/server.py index 6d58a92..1e56a5b 100644 --- a/wheat/daemon/server.py +++ b/wheat/daemon/server.py @@ -27,9 +27,11 @@ from wheat.util.json_util import dict_to_json_str from wheat.util.keychain import ( Keychain, - KeyringCurrentPassphaseIsInvalid, + KeyringCurrentPassphraseIsInvalid, KeyringRequiresMigration, + passphrase_requirements, supports_keyring_passphrase, + supports_os_passphrase_storage, ) from wheat.util.path import mkdir from wheat.util.service_groups import validate_service @@ -140,6 +142,7 @@ def __init__( self.net_config = load_config(root_path, "config.yaml") self.self_hostname = self.net_config["self_hostname"] self.daemon_port = self.net_config["daemon_port"] + self.daemon_max_message_size = self.net_config.get("daemon_max_message_size", 50 * 1000 * 1000) self.websocket_server = None self.ssl_context = ssl_context_for_server(ca_crt_path, ca_key_path, crt_path, key_path, log=self.log) self.shut_down = False @@ -162,7 +165,7 @@ def master_close_cb(): self.safe_handle, self.self_hostname, self.daemon_port, - max_size=50 * 1000 * 1000, + max_size=self.daemon_max_message_size, ping_interval=500, ping_timeout=300, ssl=self.ssl_context, @@ -309,10 +312,16 @@ async def handle_message( response = await self.keyring_status() elif command == "unlock_keyring": response = await self.unlock_keyring(cast(Dict[str, Any], data)) + elif command == "validate_keyring_passphrase": + response = await self.validate_keyring_passphrase(cast(Dict[str, Any], data)) + elif command == "migrate_keyring": + response = await self.migrate_keyring(cast(Dict[str, Any], data)) elif command == "set_keyring_passphrase": response = await self.set_keyring_passphrase(cast(Dict[str, Any], data)) elif command == "remove_keyring_passphrase": response = await self.remove_keyring_passphrase(cast(Dict[str, Any], data)) + elif command == "notify_keyring_migration_completed": + response = await self.notify_keyring_migration_completed(cast(Dict[str, Any], data)) elif command == "exit": response = await self.stop() elif command == "register_service": @@ -333,19 +342,23 @@ async def is_keyring_locked(self) -> Dict[str, Any]: async def keyring_status(self) -> Dict[str, Any]: passphrase_support_enabled: bool = supports_keyring_passphrase() - user_passphrase_is_set: bool = not using_default_passphrase() + can_save_passphrase: bool = supports_os_passphrase_storage() + user_passphrase_is_set: bool = Keychain.has_master_passphrase() and not using_default_passphrase() locked: bool = Keychain.is_keyring_locked() needs_migration: bool = Keychain.needs_migration() + requirements: Dict[str, Any] = passphrase_requirements() response: Dict[str, Any] = { "success": True, "is_keyring_locked": locked, "passphrase_support_enabled": passphrase_support_enabled, + "can_save_passphrase": can_save_passphrase, "user_passphrase_is_set": user_passphrase_is_set, "needs_migration": needs_migration, + "passphrase_requirements": requirements, } return response - async def unlock_keyring(self, request: Dict[str, Any]): + async def unlock_keyring(self, request: Dict[str, Any]) -> Dict[str, Any]: success: bool = False error: Optional[str] = None key: Optional[str] = request.get("key", None) @@ -356,6 +369,8 @@ async def unlock_keyring(self, request: Dict[str, Any]): if Keychain.master_passphrase_is_valid(key, force_reload=True): Keychain.set_cached_master_passphrase(key) success = True + # Inform the GUI of keyring status changes + self.keyring_status_changed(await self.keyring_status(), "wallet_ui") else: error = "bad passphrase" except Exception as e: @@ -375,7 +390,62 @@ async def unlock_keyring(self, request: Dict[str, Any]): response: Dict[str, Any] = {"success": success, "error": error} return response - async def set_keyring_passphrase(self, request: Dict[str, Any]): + async def validate_keyring_passphrase(self, request: Dict[str, Any]) -> Dict[str, Any]: + success: bool = False + error: Optional[str] = None + key: Optional[str] = request.get("key", None) + if type(key) is not str: + return {"success": False, "error": "missing key"} + + try: + success = Keychain.master_passphrase_is_valid(key, force_reload=True) + except Exception as e: + tb = traceback.format_exc() + self.log.error(f"Keyring passphrase validation failed: {e} {tb}") + error = "validation exception" + + response: Dict[str, Any] = {"success": success, "error": error} + return response + + async def migrate_keyring(self, request: Dict[str, Any]) -> Dict[str, Any]: + if Keychain.needs_migration() is False: + # If the keyring has already been migrated, we'll raise an error to the client. + # The reason for raising an error is because the migration request has side- + # effects beyond copying keys from the legacy keyring to the new keyring. The + # request may have set a passphrase and indicated that keys should be cleaned + # from the legacy keyring. If we were to return early and indicate success, + # the client and user's expectations may not match reality (were my keys + # deleted from the legacy keyring? was my passphrase set?). + return {"success": False, "error": "migration not needed"} + + success: bool = False + error: Optional[str] = None + passphrase: Optional[str] = request.get("passphrase", None) + cleanup_legacy_keyring: bool = request.get("cleanup_legacy_keyring", False) + + if passphrase is not None and type(passphrase) is not str: + return {"success": False, "error": 'expected string value for "passphrase"'} + + if not Keychain.passphrase_meets_requirements(passphrase): + return {"success": False, "error": "passphrase doesn't satisfy requirements"} + + if type(cleanup_legacy_keyring) is not bool: + return {"success": False, "error": 'expected bool value for "cleanup_legacy_keyring"'} + + try: + Keychain.migrate_legacy_keyring(passphrase=passphrase, cleanup_legacy_keyring=cleanup_legacy_keyring) + success = True + # Inform the GUI of keyring status changes + self.keyring_status_changed(await self.keyring_status(), "wallet_ui") + except Exception as e: + tb = traceback.format_exc() + self.log.error(f"Legacy keyring migration failed: {e} {tb}") + error = f"keyring migration failed: {e}" + + response: Dict[str, Any] = {"success": success, "error": error} + return response + + async def set_keyring_passphrase(self, request: Dict[str, Any]) -> Dict[str, Any]: success: bool = False error: Optional[str] = None current_passphrase: Optional[str] = None @@ -393,23 +463,28 @@ async def set_keyring_passphrase(self, request: Dict[str, Any]): if type(new_passphrase) is not str: return {"success": False, "error": "missing new_passphrase"} + if not Keychain.passphrase_meets_requirements(new_passphrase): + return {"success": False, "error": "passphrase doesn't satisfy requirements"} + try: assert new_passphrase is not None # mypy, I love you Keychain.set_master_passphrase(current_passphrase, new_passphrase, allow_migration=False) except KeyringRequiresMigration: error = "keyring requires migration" - except KeyringCurrentPassphaseIsInvalid: + except KeyringCurrentPassphraseIsInvalid: error = "current passphrase is invalid" except Exception as e: tb = traceback.format_exc() self.log.error(f"Failed to set keyring passphrase: {e} {tb}") else: success = True + # Inform the GUI of keyring status changes + self.keyring_status_changed(await self.keyring_status(), "wallet_ui") response: Dict[str, Any] = {"success": success, "error": error} return response - async def remove_keyring_passphrase(self, request: Dict[str, Any]): + async def remove_keyring_passphrase(self, request: Dict[str, Any]) -> Dict[str, Any]: success: bool = False error: Optional[str] = None current_passphrase: Optional[str] = None @@ -423,13 +498,41 @@ async def remove_keyring_passphrase(self, request: Dict[str, Any]): try: Keychain.remove_master_passphrase(current_passphrase) - except KeyringCurrentPassphaseIsInvalid: + except KeyringCurrentPassphraseIsInvalid: error = "current passphrase is invalid" except Exception as e: tb = traceback.format_exc() self.log.error(f"Failed to remove keyring passphrase: {e} {tb}") else: success = True + # Inform the GUI of keyring status changes + self.keyring_status_changed(await self.keyring_status(), "wallet_ui") + + response: Dict[str, Any] = {"success": success, "error": error} + return response + + async def notify_keyring_migration_completed(self, request: Dict[str, Any]) -> Dict[str, Any]: + success: bool = False + error: Optional[str] = None + key: Optional[str] = request.get("key", None) + + if type(key) is not str: + return {"success": False, "error": "missing key"} + + Keychain.handle_migration_completed() + + try: + if Keychain.master_passphrase_is_valid(key, force_reload=True): + Keychain.set_cached_master_passphrase(key) + success = True + # Inform the GUI of keyring status changes + self.keyring_status_changed(await self.keyring_status(), "wallet_ui") + else: + error = "bad passphrase" + except Exception as e: + tb = traceback.format_exc() + self.log.error(f"Keyring passphrase validation failed: {e} {tb}") + error = "validation exception" response: Dict[str, Any] = {"success": success, "error": error} return response @@ -438,6 +541,33 @@ def get_status(self) -> Dict[str, Any]: response = {"success": True, "genesis_initialized": True} return response + async def _keyring_status_changed(self, keyring_status: Dict[str, Any], destination: str): + """ + Attempt to communicate with the GUI to inform it of any keyring status changes + (e.g. keyring becomes unlocked or migration completes) + """ + websockets = self.connections.get("wallet_ui", None) + + if websockets is None: + return None + + if keyring_status is None: + return None + + response = create_payload("keyring_status_changed", keyring_status, "daemon", destination) + + for websocket in websockets: + try: + await websocket.send(response) + except Exception as e: + tb = traceback.format_exc() + self.log.error(f"Unexpected exception trying to send to websocket: {e} {tb}") + websockets.remove(websocket) + await websocket.close() + + def keyring_status_changed(self, keyring_status: Dict[str, Any], destination: str): + asyncio.create_task(self._keyring_status_changed(keyring_status, destination)) + def plot_queue_to_payload(self, plot_queue_item, send_full_log: bool) -> Dict[str, Any]: error = plot_queue_item.get("error") has_error = error is not None @@ -682,8 +812,10 @@ async def start_plotting(self, request: Dict[str, Any]): } return response + ids: List[str] = [] for k in range(count): id = str(uuid.uuid4()) + ids.append(id) config = { "id": id, "size": size, @@ -704,7 +836,7 @@ async def start_plotting(self, request: Dict[str, Any]): # notify GUI about new plot queue item self.state_changed(service_plotter, self.prepare_plot_state_message(PlotEvent.STATE_CHANGED, id)) - # only first item can start when user selected serial plotting + # only the first item can start when user selected serial plotting can_start_serial_plotting = k == 0 and self._is_serial_plotting_running(queue) is False if parallel is True or can_start_serial_plotting: @@ -716,6 +848,7 @@ async def start_plotting(self, request: Dict[str, Any]): response = { "success": True, + "ids": ids, "service_name": service_name, } diff --git a/wheat/full_node/block_store.py b/wheat/full_node/block_store.py index 0bb9c86..458e97d 100644 --- a/wheat/full_node/block_store.py +++ b/wheat/full_node/block_store.py @@ -28,8 +28,6 @@ async def create(cls, db_wrapper: DBWrapper): # All full blocks which have been added to the blockchain. Header_hash -> block self.db_wrapper = db_wrapper self.db = db_wrapper.db - await self.db.execute("pragma journal_mode=wal") - await self.db.execute("pragma synchronous=2") await self.db.execute( "CREATE TABLE IF NOT EXISTS full_blocks(header_hash text PRIMARY KEY, height bigint," " is_block tinyint, is_fully_compactified tinyint, block blob)" @@ -345,15 +343,19 @@ async def is_fully_compactified(self, header_hash: bytes32) -> Optional[bool]: return None return bool(row[0]) - async def get_first_not_compactified(self) -> Optional[int]: + async def get_random_not_compactified(self, number: int) -> List[int]: # Since orphan blocks do not get compactified, we need to check whether all blocks with a # certain height are not compact. And if we do have compact orphan blocks, then all that # happens is that the occasional chain block stays uncompact - not ideal, but harmless. cursor = await self.db.execute( - "SELECT height FROM full_blocks GROUP BY height HAVING sum(is_fully_compactified)=0 ORDER BY height LIMIT 1" + f"SELECT height FROM full_blocks GROUP BY height HAVING sum(is_fully_compactified)=0 " + f"ORDER BY RANDOM() LIMIT {number}" ) - row = await cursor.fetchone() + rows = await cursor.fetchall() await cursor.close() - if row is None: - return None - return int(row[0]) + + heights = [] + for row in rows: + heights.append(int(row[0])) + + return heights diff --git a/wheat/full_node/coin_store.py b/wheat/full_node/coin_store.py index cc1c90c..ac7ffbc 100644 --- a/wheat/full_node/coin_store.py +++ b/wheat/full_node/coin_store.py @@ -1,14 +1,16 @@ -from typing import List, Optional - +from typing import List, Optional, Set, Dict import aiosqlite - +from wheat.protocols.wallet_protocol import CoinState from wheat.types.blockchain_format.coin import Coin from wheat.types.blockchain_format.sized_bytes import bytes32 from wheat.types.coin_record import CoinRecord -from wheat.types.full_block import FullBlock from wheat.util.db_wrapper import DBWrapper from wheat.util.ints import uint32, uint64 from wheat.util.lru_cache import LRUCache +from time import time +import logging + +log = logging.getLogger(__name__) class CoinStore: @@ -29,8 +31,8 @@ async def create(cls, db_wrapper: DBWrapper, cache_size: uint32 = uint32(60000)) self.cache_size = cache_size self.db_wrapper = db_wrapper self.coin_record_db = db_wrapper.db - await self.coin_record_db.execute("pragma journal_mode=wal") - await self.coin_record_db.execute("pragma synchronous=2") + # the coin_name is unique in this table because the CoinStore always + # only represent a single peak await self.coin_record_db.execute( ( "CREATE TABLE IF NOT EXISTS coin_record(" @@ -57,31 +59,41 @@ async def create(cls, db_wrapper: DBWrapper, cache_size: uint32 = uint32(60000)) await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_puzzle_hash on coin_record(puzzle_hash)") + await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS coin_parent_index on coin_record(coin_parent)") + await self.coin_record_db.commit() self.coin_record_cache = LRUCache(cache_size) return self - async def new_block(self, block: FullBlock, tx_additions: List[Coin], tx_removals: List[bytes32]): + async def new_block( + self, + height: uint32, + timestamp: uint64, + included_reward_coins: Set[Coin], + tx_additions: List[Coin], + tx_removals: List[bytes32], + ) -> List[CoinRecord]: """ Only called for blocks which are blocks (and thus have rewards and transactions) + Returns a list of the CoinRecords that were added by this block """ - if block.is_transaction_block() is False: - return None - assert block.foliage_transaction_block is not None + + start = time() + + additions = [] for coin in tx_additions: record: CoinRecord = CoinRecord( coin, - block.height, + height, uint32(0), False, False, - block.foliage_transaction_block.timestamp, + timestamp, ) - await self._add_coin_record(record, False) + additions.append(record) - included_reward_coins = block.get_included_reward_coins() - if block.height == 0: + if height == 0: assert len(included_reward_coins) == 0 else: assert len(included_reward_coins) >= 2 @@ -89,20 +101,26 @@ async def new_block(self, block: FullBlock, tx_additions: List[Coin], tx_removal for coin in included_reward_coins: reward_coin_r: CoinRecord = CoinRecord( coin, - block.height, + height, uint32(0), False, True, - block.foliage_transaction_block.timestamp, + timestamp, ) - await self._add_coin_record(reward_coin_r, False) + additions.append(reward_coin_r) - total_amount_spent: int = 0 - for coin_name in tx_removals: - total_amount_spent += await self._set_spent(coin_name, block.height) + await self._add_coin_records(additions) + await self._set_spent(tx_removals, height) + + end = time() + log.log( + logging.WARNING if end - start > 10 else logging.DEBUG, + f"It took {end - start:0.2f}s to apply {len(tx_additions)} additions and " + + f"{len(tx_removals)} removals to the coin store. Make sure " + + "blockchain database is on a fast drive", + ) - # Sanity check, already checked in block_body_validation - assert sum([a.amount for a in tx_additions]) <= total_amount_spent + return additions # Checks DB and DiffStores for CoinRecord with coin_name and returns it async def get_coin_record(self, coin_name: bytes32) -> Optional[CoinRecord]: @@ -130,6 +148,9 @@ async def get_coins_added_at_height(self, height: uint32) -> List[CoinRecord]: return coins async def get_coins_removed_at_height(self, height: uint32) -> List[CoinRecord]: + # Special case to avoid querying all unspent coins (spent_index=0) + if height == 0: + return [] cursor = await self.coin_record_db.execute("SELECT * from coin_record WHERE spent_index=?", (height,)) rows = await cursor.fetchall() await cursor.close() @@ -212,13 +233,47 @@ async def get_coin_records_by_names( f"{'' if include_spent_coins else 'AND spent=0'}", names_db + (start_height, end_height), ) - rows = await cursor.fetchall() await cursor.close() for row in rows: coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7])) coins.add(CoinRecord(coin, row[1], row[2], row[3], row[4], row[8])) + + return list(coins) + + def row_to_coin_state(self, row): + coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7])) + spent_h = None + if row[3]: + spent_h = row[2] + return CoinState(coin, spent_h, row[1]) + + async def get_coin_states_by_puzzle_hashes( + self, + include_spent_coins: bool, + puzzle_hashes: List[bytes32], + start_height: uint32 = uint32(0), + end_height: uint32 = uint32((2 ** 32) - 1), + ) -> List[CoinState]: + if len(puzzle_hashes) == 0: + return [] + + coins = set() + puzzle_hashes_db = tuple([ph.hex() for ph in puzzle_hashes]) + cursor = await self.coin_record_db.execute( + f'SELECT * from coin_record WHERE puzzle_hash in ({"?," * (len(puzzle_hashes_db) - 1)}?) ' + f"AND confirmed_index>=? AND confirmed_index List[CoinState]: + if len(coin_ids) == 0: + return [] + + coins = set() + parent_ids_db = tuple([pid.hex() for pid in coin_ids]) + cursor = await self.coin_record_db.execute( + f'SELECT * from coin_record WHERE coin_name in ({"?," * (len(parent_ids_db) - 1)}?) ' + f"AND confirmed_index>=? AND confirmed_index List[CoinRecord]: """ Note that block_index can be negative, in which case everything is rolled back + Returns the list of coin records that have been modified """ # Update memory cache delete_queue: bytes32 = [] @@ -271,50 +353,78 @@ async def rollback_to_block(self, block_index: int): for coin_name in delete_queue: self.coin_record_cache.remove(coin_name) + coin_changes: Dict[bytes32, CoinRecord] = {} + cursor_deleted = await self.coin_record_db.execute( + "SELECT * FROM coin_record WHERE confirmed_index>?", (block_index,) + ) + rows = await cursor_deleted.fetchall() + for row in rows: + coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7])) + record = CoinRecord(coin, uint32(0), row[2], row[3], row[4], uint64(0)) + coin_changes[record.name] = record + await cursor_deleted.close() + # Delete from storage c1 = await self.coin_record_db.execute("DELETE FROM coin_record WHERE confirmed_index>?", (block_index,)) await c1.close() + + cursor_unspent = await self.coin_record_db.execute( + "SELECT * FROM coin_record WHERE confirmed_index>?", (block_index,) + ) + rows = await cursor_unspent.fetchall() + for row in rows: + coin = Coin(bytes32(bytes.fromhex(row[6])), bytes32(bytes.fromhex(row[5])), uint64.from_bytes(row[7])) + record = CoinRecord(coin, row[1], uint32(0), False, row[4], row[8]) + if record.name not in coin_changes: + coin_changes[record.name] = record + await cursor_unspent.close() + c2 = await self.coin_record_db.execute( "UPDATE coin_record SET spent_index = 0, spent = 0 WHERE spent_index>?", (block_index,), ) await c2.close() + return list(coin_changes.values()) # Store CoinRecord in DB and ram cache - async def _add_coin_record(self, record: CoinRecord, allow_replace: bool) -> None: - if self.coin_record_cache.get(record.coin.name()) is not None: - self.coin_record_cache.remove(record.coin.name()) + async def _add_coin_records(self, records: List[CoinRecord]) -> None: - cursor = await self.coin_record_db.execute( - f"INSERT {'OR REPLACE ' if allow_replace else ''}INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)", - ( - record.coin.name().hex(), - record.confirmed_block_index, - record.spent_block_index, - int(record.spent), - int(record.coinbase), - str(record.coin.puzzle_hash.hex()), - str(record.coin.parent_coin_info.hex()), - bytes(record.coin.amount), - record.timestamp, - ), + values = [] + for record in records: + self.coin_record_cache.put(record.coin.name(), record) + values.append( + ( + record.coin.name().hex(), + record.confirmed_block_index, + record.spent_block_index, + int(record.spent), + int(record.coinbase), + str(record.coin.puzzle_hash.hex()), + str(record.coin.parent_coin_info.hex()), + bytes(record.coin.amount), + record.timestamp, + ) + ) + + cursor = await self.coin_record_db.executemany( + "INSERT INTO coin_record VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?)", + values, ) await cursor.close() # Update coin_record to be spent in DB - async def _set_spent(self, coin_name: bytes32, index: uint32) -> uint64: - current: Optional[CoinRecord] = await self.get_coin_record(coin_name) - if current is None: - raise ValueError(f"Cannot spend a coin that does not exist in db: {coin_name}") - - assert not current.spent # Redundant sanity check, already checked in block_body_validation - spent: CoinRecord = CoinRecord( - current.coin, - current.confirmed_block_index, - index, - True, - current.coinbase, - current.timestamp, - ) # type: ignore # noqa - await self._add_coin_record(spent, True) - return current.coin.amount + async def _set_spent(self, coin_names: List[bytes32], index: uint32): + + # if this coin is in the cache, mark it as spent in there + updates = [] + for coin_name in coin_names: + r = self.coin_record_cache.get(coin_name) + if r is not None: + self.coin_record_cache.put( + r.name, CoinRecord(r.coin, r.confirmed_block_index, index, True, r.coinbase, r.timestamp) + ) + updates.append((index, coin_name.hex())) + + await self.coin_record_db.executemany( + "UPDATE OR FAIL coin_record SET spent=1,spent_index=? WHERE coin_name=?", updates + ) diff --git a/wheat/full_node/full_node.py b/wheat/full_node/full_node.py index 9bc5a5c..2db8472 100644 --- a/wheat/full_node/full_node.py +++ b/wheat/full_node/full_node.py @@ -24,6 +24,7 @@ from wheat.full_node.bundle_tools import detect_potential_template_generator from wheat.full_node.coin_store import CoinStore from wheat.full_node.full_node_store import FullNodeStore +from wheat.full_node.hint_store import HintStore from wheat.full_node.mempool_manager import MempoolManager from wheat.full_node.signage_point import SignagePoint from wheat.full_node.sync_store import SyncStore @@ -36,6 +37,7 @@ RespondSignagePoint, ) from wheat.protocols.protocol_message_types import ProtocolMessageTypes +from wheat.protocols.wallet_protocol import CoinState, CoinStateUpdate from wheat.server.node_discovery import FullNodePeers from wheat.server.outbound_message import Message, NodeType, make_msg from wheat.server.server import WheatServer @@ -44,6 +46,7 @@ from wheat.types.blockchain_format.sized_bytes import bytes32 from wheat.types.blockchain_format.sub_epoch_summary import SubEpochSummary from wheat.types.blockchain_format.vdf import CompressibleVDFField, VDFInfo, VDFProof +from wheat.types.coin_record import CoinRecord from wheat.types.end_of_slot_bundle import EndOfSubSlotBundle from wheat.types.full_block import FullBlock from wheat.types.header_block import HeaderBlock @@ -58,6 +61,7 @@ from wheat.util.path import mkdir, path_from_root from wheat.util.safe_cancel_task import cancel_task_safe from wheat.util.profiler import profile_task +from datetime import datetime class FullNode: @@ -110,6 +114,11 @@ def __init__( db_path_replaced: str = config["database_path"].replace("CHALLENGE", config["selected_network"]) self.db_path = path_from_root(root_path, db_path_replaced) + self.coin_subscriptions: Dict[bytes32, Set[bytes32]] = {} # Puzzle Hash : Set[Peer ID] + self.ph_subscriptions: Dict[bytes32, Set[bytes32]] = {} # Puzzle Hash : Set[Peer ID] + self.peer_coin_ids: Dict[bytes32, Set[bytes32]] = {} # Peer ID: Set[Coin ids] + self.peer_puzzle_hash: Dict[bytes32, Set[bytes32]] = {} # Peer ID: Set[puzzle_hash] + self.peer_sub_counter: Dict[bytes32, int] = {} # Peer ID: int (subscription count) mkdir(self.db_path.parent) def _set_state_changed_callback(self, callback: Callable): @@ -121,13 +130,27 @@ async def _start(self): self.new_peak_sem = asyncio.Semaphore(8) # create the store (db) and full node instance self.connection = await aiosqlite.connect(self.db_path) + await self.connection.execute("pragma journal_mode=wal") + await self.connection.execute("pragma synchronous=OFF") + if self.config.get("log_sqlite_cmds", False): + sql_log_path = path_from_root(self.root_path, "log/sql.log") + self.log.info(f"logging SQL commands to {sql_log_path}") + + def sql_trace_callback(req: str): + timestamp = datetime.now().strftime("%H:%M:%S.%f") + log = open(sql_log_path, "a") + log.write(timestamp + " " + req + "\n") + log.close() + + await self.connection.set_trace_callback(sql_trace_callback) self.db_wrapper = DBWrapper(self.connection) self.block_store = await BlockStore.create(self.db_wrapper) self.sync_store = await SyncStore.create() + self.hint_store = await HintStore.create(self.db_wrapper) self.coin_store = await CoinStore.create(self.db_wrapper) self.log.info("Initializing blockchain from disk") start_time = time.time() - self.blockchain = await Blockchain.create(self.coin_store, self.block_store, self.constants) + self.blockchain = await Blockchain.create(self.coin_store, self.block_store, self.constants, self.hint_store) self.mempool_manager = MempoolManager(self.coin_store, self.constants) self.weight_proof_handler = None self._init_weight_proof = asyncio.create_task(self.initialize_weight_proof()) @@ -152,7 +175,7 @@ async def _start(self): peak: Optional[BlockRecord] = self.blockchain.get_peak() if peak is not None: full_peak = await self.blockchain.get_full_peak() - await self.peak_post_processing(full_peak, peak, max(peak.height - 1, 0), None) + await self.peak_post_processing(full_peak, peak, max(peak.height - 1, 0), None, ([], {})) if self.config["send_uncompact_interval"] != 0: sanitize_weight_proof_only = False if "sanitize_weight_proof_only" in self.config: @@ -267,14 +290,16 @@ async def short_sync_batch(self, peer: ws.WSWheatConnection, start_height: uint3 if not response: raise ValueError(f"Error short batch syncing, invalid/no response for {height}-{end_height}") async with self.blockchain.lock: - success, advanced_peak, fork_height = await self.receive_block_batch(response.blocks, peer, None) + success, advanced_peak, fork_height, coin_changes = await self.receive_block_batch( + response.blocks, peer, None + ) if not success: raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}") if advanced_peak: peak = self.blockchain.get_peak() peak_fb: Optional[FullBlock] = await self.blockchain.get_full_peak() assert peak is not None and peak_fb is not None and fork_height is not None - await self.peak_post_processing(peak_fb, peak, fork_height, peer) + await self.peak_post_processing(peak_fb, peak, fork_height, peer, coin_changes) self.log.info(f"Added blocks {height}-{end_height}") except Exception: self.sync_store.batch_syncing.remove(peer.peer_node_id) @@ -546,6 +571,27 @@ def on_disconnect(self, connection: ws.WSWheatConnection): self._state_changed("sync_mode") if self.sync_store is not None: self.sync_store.peer_disconnected(connection.peer_node_id) + self.remove_subscriptions(connection) + + def remove_subscriptions(self, peer: ws.WSWheatConnection): + # Remove all ph | coin id subscription for this peer + node_id = peer.peer_node_id + if node_id in self.peer_puzzle_hash: + puzzle_hashes = self.peer_puzzle_hash[node_id] + for ph in puzzle_hashes: + if ph in self.ph_subscriptions: + if node_id in self.ph_subscriptions[ph]: + self.ph_subscriptions[ph].remove(node_id) + + if node_id in self.peer_coin_ids: + coin_ids = self.peer_coin_ids[node_id] + for coin_id in coin_ids: + if coin_id in self.coin_subscriptions: + if node_id in self.coin_subscriptions[coin_id]: + self.coin_subscriptions[coin_id].remove(node_id) + + if peer.peer_node_id in self.peer_sub_counter: + self.peer_sub_counter.pop(peer.peer_node_id) def _num_needed_peers(self) -> int: assert self.server is not None @@ -767,7 +813,7 @@ async def validate_block_batches(batch_queue): peer, blocks = res start_height = blocks[0].height end_height = blocks[-1].height - success, advanced_peak, _ = await self.receive_block_batch( + success, advanced_peak, fork_height, coin_states = await self.receive_block_batch( blocks, peer, None if advanced_peak else uint32(fork_point_height), summaries ) if success is False: @@ -777,6 +823,9 @@ async def validate_block_batches(batch_queue): raise ValueError(f"Failed to validate block batch {start_height} to {end_height}") self.log.info(f"Added blocks {start_height} to {end_height}") await self.send_peak_to_wallets() + peak = self.blockchain.get_peak() + if len(coin_states) > 0 and fork_height is not None: + await self.update_wallets(peak.height, fork_height, peak.header_hash, coin_states) self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE) loop = asyncio.get_event_loop() @@ -811,13 +860,56 @@ def get_peers_with_peak(self, peak_hash: bytes32) -> List: peers_with_peak: List = [c for c in self.server.all_connections.values() if c.peer_node_id in peer_ids] return peers_with_peak + async def update_wallets( + self, + height: uint32, + fork_height: uint32, + peak_hash: bytes32, + state_update: Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]], + ): + changes_for_peer: Dict[bytes32, Set[CoinState]] = {} + + states, hint_state = state_update + + for coin_record in states: + if coin_record.name in self.coin_subscriptions: + subscribed_peers = self.coin_subscriptions[coin_record.name] + for peer in subscribed_peers: + if peer not in changes_for_peer: + changes_for_peer[peer] = set() + changes_for_peer[peer].add(coin_record.coin_state) + + if coin_record.coin.puzzle_hash in self.ph_subscriptions: + subscribed_peers = self.ph_subscriptions[coin_record.coin.puzzle_hash] + for peer in subscribed_peers: + if peer not in changes_for_peer: + changes_for_peer[peer] = set() + changes_for_peer[peer].add(coin_record.coin_state) + + for hint, records in hint_state.items(): + if hint in self.ph_subscriptions: + subscribed_peers = self.ph_subscriptions[hint] + for peer in subscribed_peers: + if peer not in changes_for_peer: + changes_for_peer[peer] = set() + for record in records.values(): + changes_for_peer[peer].add(record.coin_state) + + for peer, changes in changes_for_peer.items(): + if peer not in self.server.all_connections: + continue + ws_peer: ws.WSWheatConnection = self.server.all_connections[peer] + state = CoinStateUpdate(height, fork_height, peak_hash, list(changes)) + msg = make_msg(ProtocolMessageTypes.coin_state_update, state) + await ws_peer.send_message(msg) + async def receive_block_batch( self, all_blocks: List[FullBlock], peer: ws.WSWheatConnection, fork_point: Optional[uint32], wp_summaries: Optional[List[SubEpochSummary]] = None, - ) -> Tuple[bool, bool, Optional[uint32]]: + ) -> Tuple[bool, bool, Optional[uint32], Tuple[List[CoinRecord], Dict[bytes, Dict[bytes, CoinRecord]]]]: advanced_peak = False fork_height: Optional[uint32] = uint32(0) @@ -827,33 +919,52 @@ async def receive_block_batch( blocks_to_validate = all_blocks[i:] break if len(blocks_to_validate) == 0: - return True, False, fork_height + return True, False, fork_height, ([], {}) pre_validate_start = time.time() pre_validation_results: Optional[ List[PreValidationResult] ] = await self.blockchain.pre_validate_blocks_multiprocessing(blocks_to_validate, {}, wp_summaries=wp_summaries) - self.log.debug(f"Block pre-validation time: {time.time() - pre_validate_start}") + pre_validate_end = time.time() + if pre_validate_end - pre_validate_start > 10: + self.log.warning(f"Block pre-validation time: {pre_validate_end - pre_validate_start:0.2f} seconds") + else: + self.log.debug(f"Block pre-validation time: {pre_validate_end - pre_validate_start:0.2f} seconds") if pre_validation_results is None: - return False, False, None + return False, False, None, ([], {}) for i, block in enumerate(blocks_to_validate): if pre_validation_results[i].error is not None: self.log.error( f"Invalid block from peer: {peer.get_peer_logging()} {Err(pre_validation_results[i].error)}" ) - return False, advanced_peak, fork_height + return False, advanced_peak, fork_height, ([], {}) + + # Dicts because deduping + all_coin_changes: Dict[bytes32, CoinRecord] = {} + all_hint_changes: Dict[bytes, Dict[bytes32, CoinRecord]] = {} for i, block in enumerate(blocks_to_validate): assert pre_validation_results[i].required_iters is not None - (result, error, fork_height,) = await self.blockchain.receive_block( + result, error, fork_height, coin_changes = await self.blockchain.receive_block( block, pre_validation_results[i], None if advanced_peak else fork_point ) + coin_record_list, hint_records = coin_changes + + # Update all changes + for record in coin_record_list: + all_coin_changes[record.name] = record + for hint, list_of_records in hint_records.items(): + if hint not in all_hint_changes: + all_hint_changes[hint] = {} + for record in list_of_records: + all_hint_changes[hint][record.name] = record + if result == ReceiveBlockResult.NEW_PEAK: advanced_peak = True elif result == ReceiveBlockResult.INVALID_BLOCK or result == ReceiveBlockResult.DISCONNECTED_BLOCK: if error is not None: self.log.error(f"Error: {error}, Invalid block from peer: {peer.get_peer_logging()} ") - return False, advanced_peak, fork_height + return False, advanced_peak, fork_height, ([], {}) block_record = self.blockchain.block_record(block.header_hash) if block_record.sub_epoch_summary_included is not None: if self.weight_proof_handler is not None: @@ -864,7 +975,7 @@ async def receive_block_batch( f"Total time for {len(blocks_to_validate)} blocks: {time.time() - pre_validate_start}, " f"advanced: {advanced_peak}" ) - return True, advanced_peak, fork_height + return True, advanced_peak, fork_height, (list(all_coin_changes.values()), all_hint_changes) async def _finish_sync(self): """ @@ -884,7 +995,7 @@ async def _finish_sync(self): peak_fb: FullBlock = await self.blockchain.get_full_peak() if peak is not None: - await self.peak_post_processing(peak_fb, peak, max(peak.height - 1, 0), None) + await self.peak_post_processing(peak_fb, peak, max(peak.height - 1, 0), None, ([], {})) if peak is not None and self.weight_proof_handler is not None: await self.weight_proof_handler.get_proof_of_weight(peak.header_hash) @@ -962,7 +1073,12 @@ async def signage_point_post_processing( await self.server.send_to_all([msg], NodeType.FARMER) async def peak_post_processing( - self, block: FullBlock, record: BlockRecord, fork_height: uint32, peer: Optional[ws.WSWheatConnection] + self, + block: FullBlock, + record: BlockRecord, + fork_height: uint32, + peer: Optional[ws.WSWheatConnection], + coin_changes: Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]], ): """ Must be called under self.blockchain.lock. This updates the internal state of the full node with the @@ -1097,6 +1213,7 @@ async def peak_post_processing( fork_height, ), ) + await self.update_wallets(record.height, fork_height, record.header_hash, coin_changes) await self.server.send_to_all([msg], NodeType.WALLET) # Check if we detected a spent transaction, to load up our generator cache @@ -1174,7 +1291,7 @@ async def respond_block( ) # This recursion ends here, we cannot recurse again because transactions_generator is not None return await self.respond_block(block_response, peer) - + coin_changes: Tuple[List[CoinRecord], Dict[bytes, Dict[bytes32, CoinRecord]]] = ([], {}) async with self.blockchain.lock: # After acquiring the lock, check again, because another asyncio thread might have added it if self.blockchain.contains_block(header_hash): @@ -1202,7 +1319,10 @@ async def respond_block( pre_validation_results[0] if pre_validation_result is None else pre_validation_result ) assert result_to_validate.required_iters == pre_validation_results[0].required_iters - added, error_code, fork_height = await self.blockchain.receive_block(block, result_to_validate, None) + added, error_code, fork_height, coin_changes = await self.blockchain.receive_block( + block, result_to_validate, None + ) + if ( self.full_node_store.previous_generator is not None and fork_height is not None @@ -1226,7 +1346,7 @@ async def respond_block( new_peak: Optional[BlockRecord] = self.blockchain.get_peak() assert new_peak is not None and fork_height is not None - await self.peak_post_processing(block, new_peak, fork_height, peer) + await self.peak_post_processing(block, new_peak, fork_height, peer, coin_changes) elif added == ReceiveBlockResult.ADDED_AS_ORPHAN: self.log.info( @@ -1244,10 +1364,11 @@ async def respond_block( if block.transactions_info is not None else "" ) - self.log.info( - f"Block validation time: {validation_time}, " + self.log.log( + logging.WARNING if validation_time > 2 else logging.DEBUG, + f"Block validation time: {validation_time:0.2f} seconds, " f"cost: {block.transactions_info.cost if block.transactions_info is not None else 'None'}" - f"{percent_full_str}" + f"{percent_full_str}", ) # This code path is reached if added == ADDED_AS_ORPHAN or NEW_TIP @@ -1362,7 +1483,7 @@ async def respond_unfinished_block( if farmed_block is True: self.log.info( f"🍀 ️Farmed unfinished_block {block_hash}, SP: {block.reward_chain_block.signage_point_index}, " - f"validation time: {validation_time}, " + f"validation time: {validation_time:0.4f} seconds, " f"cost: {block.transactions_info.cost if block.transactions_info else 'None'}" ) else: @@ -1378,9 +1499,9 @@ async def respond_unfinished_block( self.log.info( f"Added unfinished_block {block_hash}, not farmed by us," f" SP: {block.reward_chain_block.signage_point_index} farmer response time: " - f"{time.time() - self.signage_point_times[block.reward_chain_block.signage_point_index]}, " + f"{time.time() - self.signage_point_times[block.reward_chain_block.signage_point_index]:0.4f}, " f"Pool pk {encode_puzzle_hash(block.foliage.foliage_block_data.pool_target.puzzle_hash, 'wheat')}, " - f"validation time: {validation_time}, " + f"validation time: {validation_time:0.4f} seconds, " f"cost: {block.transactions_info.cost if block.transactions_info else 'None'}" f"{percent_full_str}" ) @@ -1637,7 +1758,7 @@ async def respond_transaction( self.mempool_manager.remove_seen(spend_name) else: try: - cost_result = await self.mempool_manager.pre_validate_spendbundle(transaction) + cost_result = await self.mempool_manager.pre_validate_spendbundle(transaction, spend_name) except Exception as e: self.mempool_manager.remove_seen(spend_name) raise e @@ -1819,6 +1940,7 @@ async def respond_compact_proof_of_time(self, request: timelord_protocol.Respond if not replaced: self.log.error(f"Could not replace compact proof: {request.height}") return None + self.log.info(f"Replaced compact proof at height {request.height}") msg = make_msg( ProtocolMessageTypes.new_compact_vdf, full_node_protocol.NewCompactVDF(request.height, request.header_hash, request.field_vdf, request.vdf_info), @@ -1919,29 +2041,17 @@ async def broadcast_uncompact_blocks( await asyncio.sleep(30) broadcast_list: List[timelord_protocol.RequestCompactProofOfTime] = [] - max_height = self.blockchain.get_peak_height() - if max_height is None: - await asyncio.sleep(30) - continue - assert max_height is not None - self.log.info("Getting minimum bluebox work height") - min_height = await self.block_store.get_first_not_compactified() - if min_height is None or min_height > max(0, max_height - 1000): - min_height = max(0, max_height - 1000) - assert min_height is not None - max_height = uint32(min(max_height, min_height + 2000)) - batches_finished = 0 - self.log.info(f"Scanning the blockchain for uncompact blocks. Range: {min_height}..{max_height}") - for h in range(min_height, max_height, 100): - # Got 10 times the target header count, sampling the target headers should contain - # enough randomness to split the work between blueboxes. - if len(broadcast_list) > target_uncompact_proofs * 10: - break - stop_height = min(h + 99, max_height) - headers = await self.blockchain.get_header_blocks_in_range(h, stop_height, tx_filter=False) + + self.log.info("Getting random heights for bluebox to compact") + heights = await self.block_store.get_random_not_compactified(target_uncompact_proofs) + self.log.info("Heights found for bluebox to compact: [%s]" % ", ".join(map(str, heights))) + + for h in heights: + + headers = await self.blockchain.get_header_blocks_in_range(h, h, tx_filter=False) records: Dict[bytes32, BlockRecord] = {} if sanitize_weight_proof_only: - records = await self.blockchain.get_block_records_in_range(h, stop_height) + records = await self.blockchain.get_block_records_in_range(h, h) for header in headers.values(): expected_header_hash = self.blockchain.height_to_hash(header.height) if header.header_hash != expected_header_hash: @@ -2007,21 +2117,17 @@ async def broadcast_uncompact_blocks( ) ) - # Small sleep between batches. - batches_finished += 1 - if batches_finished % 10 == 0: - await asyncio.sleep(1) - - # sample work randomly from the uncompact blocks we found if len(broadcast_list) > target_uncompact_proofs: - random.shuffle(broadcast_list) broadcast_list = broadcast_list[:target_uncompact_proofs] if self.sync_store.get_sync_mode(): continue if self.server is not None: + self.log.info(f"Broadcasting {len(broadcast_list)} items to the bluebox") + msgs = [] for new_pot in broadcast_list: msg = make_msg(ProtocolMessageTypes.request_compact_proof_of_time, new_pot) - await self.server.send_to_all([msg], NodeType.TIMELORD) + msgs.append(msg) + await self.server.send_to_all(msgs, NodeType.TIMELORD) await asyncio.sleep(uncompact_interval_scan) except Exception as e: error_stack = traceback.format_exc() diff --git a/wheat/full_node/full_node_api.py b/wheat/full_node/full_node_api.py index 1ea5e21..beab3bd 100644 --- a/wheat/full_node/full_node_api.py +++ b/wheat/full_node/full_node_api.py @@ -1,6 +1,7 @@ import asyncio import dataclasses import time +import traceback from secrets import token_bytes from typing import Callable, Dict, List, Optional, Tuple, Set @@ -18,12 +19,19 @@ from wheat.protocols import farmer_protocol, full_node_protocol, introducer_protocol, timelord_protocol, wallet_protocol from wheat.protocols.full_node_protocol import RejectBlock, RejectBlocks from wheat.protocols.protocol_message_types import ProtocolMessageTypes -from wheat.protocols.wallet_protocol import PuzzleSolutionResponse, RejectHeaderBlocks, RejectHeaderRequest +from wheat.protocols.wallet_protocol import ( + PuzzleSolutionResponse, + RejectHeaderBlocks, + RejectHeaderRequest, + CoinState, + RespondSESInfo, +) from wheat.server.outbound_message import Message, make_msg from wheat.types.blockchain_format.coin import Coin, hash_coin_list from wheat.types.blockchain_format.pool_target import PoolTarget from wheat.types.blockchain_format.program import Program from wheat.types.blockchain_format.sized_bytes import bytes32 +from wheat.types.blockchain_format.sub_epoch_summary import SubEpochSummary from wheat.types.coin_record import CoinRecord from wheat.types.end_of_slot_bundle import EndOfSubSlotBundle from wheat.types.full_block import FullBlock @@ -32,7 +40,7 @@ from wheat.types.mempool_item import MempoolItem from wheat.types.peer_info import PeerInfo from wheat.types.unfinished_block import UnfinishedBlock -from wheat.util.api_decorators import api_request, peer_required, bytes_required, execute_task +from wheat.util.api_decorators import api_request, peer_required, bytes_required, execute_task, reply_type from wheat.util.generator_tools import get_block_header from wheat.util.hash import std_hash from wheat.util.ints import uint8, uint32, uint64, uint128 @@ -62,6 +70,7 @@ def api_ready(self): @peer_required @api_request + @reply_type([ProtocolMessageTypes.respond_peers]) async def request_peers(self, _request: full_node_protocol.RequestPeers, peer: ws.WSWheatConnection): if peer.peer_server_port is None: return None @@ -189,6 +198,7 @@ async def tx_request_and_timeout(full_node: FullNode, transaction_id, task_id): return None @api_request + @reply_type([ProtocolMessageTypes.respond_transaction]) async def request_transaction(self, request: full_node_protocol.RequestTransaction) -> Optional[Message]: """Peer has requested a full transaction from us.""" # Ignore if syncing @@ -227,6 +237,7 @@ async def respond_transaction( return None @api_request + @reply_type([ProtocolMessageTypes.respond_proof_of_weight]) async def request_proof_of_weight(self, request: full_node_protocol.RequestProofOfWeight) -> Optional[Message]: if self.full_node.weight_proof_handler is None: return None @@ -272,6 +283,7 @@ async def respond_proof_of_weight(self, request: full_node_protocol.RespondProof return None @api_request + @reply_type([ProtocolMessageTypes.respond_block, ProtocolMessageTypes.reject_block]) async def request_block(self, request: full_node_protocol.RequestBlock) -> Optional[Message]: if not self.full_node.blockchain.contains_height(request.height): reject = RejectBlock(request.height) @@ -288,6 +300,7 @@ async def request_block(self, request: full_node_protocol.RequestBlock) -> Optio return msg @api_request + @reply_type([ProtocolMessageTypes.respond_blocks, ProtocolMessageTypes.reject_blocks]) async def request_blocks(self, request: full_node_protocol.RequestBlocks) -> Optional[Message]: if request.end_height < request.start_height or request.end_height - request.start_height > 32: reject = RejectBlocks(request.start_height, request.end_height) @@ -399,6 +412,7 @@ async def eventually_clear(): return msg @api_request + @reply_type([ProtocolMessageTypes.respond_unfinished_block]) async def request_unfinished_block( self, request_unfinished_block: full_node_protocol.RequestUnfinishedBlock ) -> Optional[Message]: @@ -509,6 +523,7 @@ async def new_signage_point_or_end_of_sub_slot( return make_msg(ProtocolMessageTypes.request_signage_point_or_end_of_sub_slot, full_node_request) @api_request + @reply_type([ProtocolMessageTypes.respond_signage_point, ProtocolMessageTypes.respond_end_of_sub_slot]) async def request_signage_point_or_end_of_sub_slot( self, request: full_node_protocol.RequestSignagePointOrEndOfSubSlot ) -> Optional[Message]: @@ -710,6 +725,7 @@ async def declare_proof_of_space( curr_l_tb.header_hash ) except Exception as e: + self.log.error(f"Traceback: {traceback.format_exc()}") self.full_node.log.error(f"Error making spend bundle {e} peak: {peak}") mempool_bundle = None if mempool_bundle is not None: @@ -1050,7 +1066,7 @@ async def request_block_header(self, request: wallet_protocol.RequestBlockHeader return msg block: Optional[FullBlock] = await self.full_node.block_store.get_full_block(header_hash) if block is not None: - tx_removals, tx_additions = await self.full_node.blockchain.get_tx_removals_and_additions(block) + tx_removals, tx_additions, _ = await self.full_node.blockchain.get_tx_removals_and_additions(block) header_block = get_block_header(block, tx_additions, tx_removals) msg = make_msg( ProtocolMessageTypes.respond_block_header, @@ -1300,6 +1316,7 @@ async def new_compact_vdf( @peer_required @api_request + @reply_type([ProtocolMessageTypes.respond_compact_vdf]) async def request_compact_vdf(self, request: full_node_protocol.RequestCompactVDF, peer: ws.WSWheatConnection): if self.full_node.sync_store.get_sync_mode(): return None @@ -1311,3 +1328,120 @@ async def respond_compact_vdf(self, request: full_node_protocol.RespondCompactVD if self.full_node.sync_store.get_sync_mode(): return None await self.full_node.respond_compact_vdf(request, peer) + + @peer_required + @api_request + async def register_interest_in_puzzle_hash( + self, request: wallet_protocol.RegisterForPhUpdates, peer: ws.WSWheatConnection + ): + if peer.peer_node_id not in self.full_node.peer_puzzle_hash: + self.full_node.peer_puzzle_hash[peer.peer_node_id] = set() + + if peer.peer_node_id not in self.full_node.peer_sub_counter: + self.full_node.peer_sub_counter[peer.peer_node_id] = 0 + + hint_coin_ids = [] + # Add peer to the "Subscribed" dictionary + for puzzle_hash in request.puzzle_hashes: + ph_hint_coins = await self.full_node.hint_store.get_coin_ids(puzzle_hash) + hint_coin_ids.extend(ph_hint_coins) + if puzzle_hash not in self.full_node.ph_subscriptions: + self.full_node.ph_subscriptions[puzzle_hash] = set() + if ( + peer.peer_node_id not in self.full_node.ph_subscriptions[puzzle_hash] + and self.full_node.peer_sub_counter[peer.peer_node_id] < 100000 + ): + self.full_node.ph_subscriptions[puzzle_hash].add(peer.peer_node_id) + self.full_node.peer_puzzle_hash[peer.peer_node_id].add(puzzle_hash) + self.full_node.peer_sub_counter[peer.peer_node_id] += 1 + + # Send all coins with requested puzzle hash that have been created after the specified height + states: List[CoinState] = await self.full_node.coin_store.get_coin_states_by_puzzle_hashes( + include_spent_coins=True, puzzle_hashes=request.puzzle_hashes, start_height=request.min_height + ) + + if len(hint_coin_ids) > 0: + hint_states = await self.full_node.coin_store.get_coin_state_by_ids( + include_spent_coins=True, coin_ids=hint_coin_ids, start_height=request.min_height + ) + states.extend(hint_states) + + response = wallet_protocol.RespondToPhUpdates(request.puzzle_hashes, request.min_height, states) + msg = make_msg(ProtocolMessageTypes.respond_to_ph_update, response) + return msg + + @peer_required + @api_request + async def register_interest_in_coin( + self, request: wallet_protocol.RegisterForCoinUpdates, peer: ws.WSWheatConnection + ): + if peer.peer_node_id not in self.full_node.peer_coin_ids: + self.full_node.peer_coin_ids[peer.peer_node_id] = set() + + if peer.peer_node_id not in self.full_node.peer_sub_counter: + self.full_node.peer_sub_counter[peer.peer_node_id] = 0 + + for coin_id in request.coin_ids: + if coin_id not in self.full_node.coin_subscriptions: + self.full_node.coin_subscriptions[coin_id] = set() + if ( + peer.peer_node_id not in self.full_node.coin_subscriptions[coin_id] + and self.full_node.peer_sub_counter[peer.peer_node_id] < 100000 + ): + self.full_node.coin_subscriptions[coin_id].add(peer.peer_node_id) + self.full_node.peer_coin_ids[peer.peer_node_id].add(coin_id) + self.full_node.peer_sub_counter[peer.peer_node_id] += 1 + + states: List[CoinState] = await self.full_node.coin_store.get_coin_state_by_ids( + include_spent_coins=True, coin_ids=request.coin_ids, start_height=request.min_height + ) + + response = wallet_protocol.RespondToCoinUpdates(request.coin_ids, request.min_height, states) + msg = make_msg(ProtocolMessageTypes.respond_to_coin_update, response) + return msg + + @api_request + async def request_children(self, request: wallet_protocol.RequestChildren) -> Optional[Message]: + coin_records: List[CoinRecord] = await self.full_node.coin_store.get_coin_records_by_parent_ids( + True, [request.coin_name] + ) + states = [record.coin_state for record in coin_records] + response = wallet_protocol.RespondChildren(states) + msg = make_msg(ProtocolMessageTypes.respond_children, response) + return msg + + @api_request + async def request_ses_hashes(self, request: wallet_protocol.RequestSESInfo): + """Returns the start and end height of a sub-epoch for the height specified in request""" + + ses_height = self.full_node.blockchain.get_ses_heights() + start_height = request.start_height + end_height = request.end_height + ses_hash_heights = [] + ses_reward_hashes = [] + + for idx, ses_start_height in enumerate(ses_height): + if idx == len(ses_height) - 1: + break + + next_ses_height = ses_height[idx + 1] + # start_ses_hash + if ses_start_height <= start_height < next_ses_height: + ses_hash_heights.append([ses_start_height, next_ses_height]) + ses: SubEpochSummary = self.full_node.blockchain.get_ses(ses_start_height) + ses_reward_hashes.append(ses.reward_chain_hash) + if ses_start_height < end_height < next_ses_height: + break + else: + if idx == len(ses_height) - 2: + break + # else add extra ses as request start <-> end spans two ses + next_next_height = ses_height[idx + 2] + ses_hash_heights.append([next_ses_height, next_next_height]) + nex_ses: SubEpochSummary = self.full_node.blockchain.get_ses(next_ses_height) + ses_reward_hashes.append(nex_ses.reward_chain_hash) + break + + response = RespondSESInfo(ses_reward_hashes, ses_hash_heights) + msg = make_msg(ProtocolMessageTypes.respond_ses_hashes, response) + return msg diff --git a/wheat/full_node/hint_store.py b/wheat/full_node/hint_store.py new file mode 100644 index 0000000..d2381e6 --- /dev/null +++ b/wheat/full_node/hint_store.py @@ -0,0 +1,40 @@ +from typing import List, Tuple +import aiosqlite +from wheat.types.blockchain_format.sized_bytes import bytes32 +from wheat.util.db_wrapper import DBWrapper +import logging + +log = logging.getLogger(__name__) + + +class HintStore: + coin_record_db: aiosqlite.Connection + db_wrapper: DBWrapper + + @classmethod + async def create(cls, db_wrapper: DBWrapper): + self = cls() + self.db_wrapper = db_wrapper + self.coin_record_db = db_wrapper.db + await self.coin_record_db.execute( + "CREATE TABLE IF NOT EXISTS hints(id INTEGER PRIMARY KEY AUTOINCREMENT, coin_id blob, hint blob)" + ) + await self.coin_record_db.execute("CREATE INDEX IF NOT EXISTS hint_index on hints(hint)") + await self.coin_record_db.commit() + return self + + async def get_coin_ids(self, hint: bytes) -> List[bytes32]: + cursor = await self.coin_record_db.execute("SELECT * from hints WHERE hint=?", (hint,)) + rows = await cursor.fetchall() + await cursor.close() + coin_ids = [] + for row in rows: + coin_ids.append(row[1]) + return coin_ids + + async def add_hints(self, coin_hint_list: List[Tuple[bytes32, bytes]]) -> None: + cursor = await self.coin_record_db.executemany( + "INSERT INTO hints VALUES(?, ?, ?)", + [(None,) + record for record in coin_hint_list], + ) + await cursor.close() diff --git a/wheat/full_node/mempool.py b/wheat/full_node/mempool.py index f395865..0f3371d 100644 --- a/wheat/full_node/mempool.py +++ b/wheat/full_node/mempool.py @@ -41,8 +41,8 @@ def remove_from_pool(self, item: MempoolItem): """ Removes an item from the mempool. """ - removals: List[Coin] = item.spend_bundle.removals() - additions: List[Coin] = item.spend_bundle.additions() + removals: List[Coin] = item.removals + additions: List[Coin] = item.additions for rem in removals: del self.removals[rem.name()] for add in additions: @@ -58,8 +58,6 @@ def remove_from_pool(self, item: MempoolItem): def add_to_pool( self, item: MempoolItem, - additions: List[Coin], - removals_dic: Dict[bytes32, Coin], ): """ Adds an item to the mempool by kicking out transactions (if it doesn't fit), in order of increasing fee per cost @@ -79,10 +77,10 @@ def add_to_pool( self.sorted_spends[item.fee_per_cost][item.name] = item - for add in additions: + for add in item.additions: self.additions[add.name()] = item - for key in removals_dic.keys(): - self.removals[key] = item + for coin in item.removals: + self.removals[coin.name()] = item self.total_mempool_cost += item.cost def at_full_capacity(self, cost: int) -> bool: diff --git a/wheat/full_node/mempool_check_conditions.py b/wheat/full_node/mempool_check_conditions.py index 1f7f6d9..f459bd8 100644 --- a/wheat/full_node/mempool_check_conditions.py +++ b/wheat/full_node/mempool_check_conditions.py @@ -1,22 +1,18 @@ import logging import time -from typing import Tuple, Dict, List, Optional, Set -from clvm import SExp +from typing import Dict, List, Optional from clvm_rs import STRICT_MODE from wheat.consensus.cost_calculator import NPCResult -from wheat.consensus.condition_costs import ConditionCost from wheat.full_node.generator import create_generator_args, setup_generator_args -from wheat.types.blockchain_format.coin import Coin from wheat.types.blockchain_format.program import NIL -from wheat.types.blockchain_format.sized_bytes import bytes32 from wheat.types.coin_record import CoinRecord from wheat.types.condition_with_args import ConditionWithArgs from wheat.types.generator_types import BlockGenerator from wheat.types.name_puzzle_condition import NPC -from wheat.util.clvm import int_from_bytes, int_to_bytes -from wheat.util.condition_tools import ConditionOpcode, conditions_by_opcode -from wheat.util.errors import Err, ValidationError +from wheat.util.clvm import int_from_bytes +from wheat.util.condition_tools import ConditionOpcode +from wheat.util.errors import Err from wheat.util.ints import uint32, uint64, uint16 from wheat.wallet.puzzles.generator_loader import GENERATOR_FOR_SINGLE_COIN_MOD from wheat.wallet.puzzles.rom_bootstrap_generator import get_generator @@ -24,31 +20,9 @@ GENERATOR_MOD = get_generator() -def mempool_assert_announcement(condition: ConditionWithArgs, announcements: Set[bytes32]) -> Optional[Err]: - """ - Check if an announcement is included in the list of announcements - """ - announcement_hash = bytes32(condition.vars[0]) - - if announcement_hash not in announcements: - return Err.ASSERT_ANNOUNCE_CONSUMED_FAILED - - return None - - log = logging.getLogger(__name__) -def mempool_assert_my_coin_id(condition: ConditionWithArgs, unspent: CoinRecord) -> Optional[Err]: - """ - Checks if CoinID matches the id from the condition - """ - if unspent.coin.name() != condition.vars[0]: - log.warning(f"My name: {unspent.coin.name()} got: {condition.vars[0].hex()}") - return Err.ASSERT_MY_COIN_ID_FAILED - return None - - def mempool_assert_absolute_block_height_exceeds( condition: ConditionWithArgs, prev_transaction_block_height: uint32 ) -> Optional[Err]: @@ -114,250 +88,7 @@ def mempool_assert_relative_time_exceeds( return None -def mempool_assert_my_parent_id(condition: ConditionWithArgs, unspent: CoinRecord) -> Optional[Err]: - """ - Checks if coin's parent ID matches the ID from the condition - """ - if unspent.coin.parent_coin_info != condition.vars[0]: - return Err.ASSERT_MY_PARENT_ID_FAILED - return None - - -def mempool_assert_my_puzzlehash(condition: ConditionWithArgs, unspent: CoinRecord) -> Optional[Err]: - """ - Checks if coin's puzzlehash matches the puzzlehash from the condition - """ - if unspent.coin.puzzle_hash != condition.vars[0]: - return Err.ASSERT_MY_PUZZLEHASH_FAILED - return None - - -def mempool_assert_my_amount(condition: ConditionWithArgs, unspent: CoinRecord) -> Optional[Err]: - """ - Checks if coin's amount matches the amount from the condition - """ - if unspent.coin.amount != int_from_bytes(condition.vars[0]): - return Err.ASSERT_MY_AMOUNT_FAILED - return None - - -def sanitize_int(n: SExp, safe_mode: bool) -> int: - buf = n.atom - if safe_mode and len(buf) > 2 and buf[0] == 0 and buf[1] == 0: - raise ValidationError(Err.INVALID_CONDITION) - return n.as_int() - - -def parse_aggsig(args: SExp) -> List[bytes]: - pubkey = args.first().atom - args = args.rest() - message = args.first().atom - if len(pubkey) != 48: - raise ValidationError(Err.INVALID_CONDITION) - if len(message) > 1024: - raise ValidationError(Err.INVALID_CONDITION) - # agg sig conditions only take 2 parameters - args = args.rest() - # the list is terminated by having a right-element that's not another pair, - # just like as_atom_list() (see wheat/types/blockchain_format/program.py) - if args.pair is not None: - raise ValidationError(Err.INVALID_CONDITION) - return [pubkey, message] - - -def parse_create_coin(args: SExp, safe_mode: bool) -> List[bytes]: - puzzle_hash = args.first().atom - args = args.rest() - if len(puzzle_hash) != 32: - raise ValidationError(Err.INVALID_CONDITION) - amount_int = sanitize_int(args.first(), safe_mode) - if amount_int >= 2 ** 64: - raise ValidationError(Err.COIN_AMOUNT_EXCEEDS_MAXIMUM) - if amount_int < 0: - raise ValidationError(Err.COIN_AMOUNT_NEGATIVE) - # note that this may change the representation of amount. If the original - # buffer had redundant leading zeroes, they will be stripped - return [puzzle_hash, int_to_bytes(amount_int)] - - -def parse_seconds(args: SExp, safe_mode: bool, error_code: Err) -> Optional[List[bytes]]: - seconds_int = sanitize_int(args.first(), safe_mode) - # this condition is inherently satisified, there is no need to keep it - if seconds_int <= 0: - return None - if seconds_int >= 2 ** 64: - raise ValidationError(error_code) - # note that this may change the representation of seconds. If the original - # buffer had redundant leading zeroes, they will be stripped - return [int_to_bytes(seconds_int)] - - -def parse_height(args: SExp, safe_mode: bool, error_code: Err) -> Optional[List[bytes]]: - height_int = sanitize_int(args.first(), safe_mode) - # this condition is inherently satisified, there is no need to keep it - if height_int < 0: - return None - if height_int >= 2 ** 32: - raise ValidationError(error_code) - # note that this may change the representation of the height. If the original - # buffer had redundant leading zeroes, they will be stripped - return [int_to_bytes(height_int)] - - -def parse_fee(args: SExp, safe_mode: bool) -> List[bytes]: - fee_int = sanitize_int(args.first(), safe_mode) - if fee_int >= 2 ** 64 or fee_int < 0: - raise ValidationError(Err.RESERVE_FEE_CONDITION_FAILED) - # note that this may change the representation of the fee. If the original - # buffer had redundant leading zeroes, they will be stripped - return [int_to_bytes(fee_int)] - - -def parse_hash(args: SExp, error_code: Err) -> List[bytes]: - h = args.first().atom - if len(h) != 32: - raise ValidationError(error_code) - return [h] - - -def parse_amount(args: SExp, safe_mode: bool) -> List[bytes]: - amount_int = sanitize_int(args.first(), safe_mode) - if amount_int < 0: - raise ValidationError(Err.ASSERT_MY_AMOUNT_FAILED) - if amount_int >= 2 ** 64: - raise ValidationError(Err.ASSERT_MY_AMOUNT_FAILED) - # note that this may change the representation of amount. If the original - # buffer had redundant leading zeroes, they will be stripped - return [int_to_bytes(amount_int)] - - -def parse_announcement(args: SExp) -> List[bytes]: - msg = args.first().atom - if len(msg) > 1024: - raise ValidationError(Err.INVALID_CONDITION) - return [msg] - - -def parse_condition_args(args: SExp, condition: ConditionOpcode, safe_mode: bool) -> Tuple[int, Optional[List[bytes]]]: - """ - Parse a list with exactly the expected args, given opcode, - from an SExp into a list of bytes. If there are fewer or more elements in - the list, raise a RuntimeError. If the condition is inherently true (such as - a time- or height lock with a negative time or height, the returned list is None - """ - op = ConditionOpcode - cc = ConditionCost - if condition is op.AGG_SIG_UNSAFE or condition is op.AGG_SIG_ME: - return cc.AGG_SIG.value, parse_aggsig(args) - elif condition is op.CREATE_COIN: - return cc.CREATE_COIN.value, parse_create_coin(args, safe_mode) - elif condition is op.ASSERT_SECONDS_ABSOLUTE: - return cc.ASSERT_SECONDS_ABSOLUTE.value, parse_seconds(args, safe_mode, Err.ASSERT_SECONDS_ABSOLUTE_FAILED) - elif condition is op.ASSERT_SECONDS_RELATIVE: - return cc.ASSERT_SECONDS_RELATIVE.value, parse_seconds(args, safe_mode, Err.ASSERT_SECONDS_RELATIVE_FAILED) - elif condition is op.ASSERT_HEIGHT_ABSOLUTE: - return cc.ASSERT_HEIGHT_ABSOLUTE.value, parse_height(args, safe_mode, Err.ASSERT_HEIGHT_ABSOLUTE_FAILED) - elif condition is op.ASSERT_HEIGHT_RELATIVE: - return cc.ASSERT_HEIGHT_RELATIVE.value, parse_height(args, safe_mode, Err.ASSERT_HEIGHT_RELATIVE_FAILED) - elif condition is op.ASSERT_MY_COIN_ID: - return cc.ASSERT_MY_COIN_ID.value, parse_hash(args, Err.ASSERT_MY_COIN_ID_FAILED) - elif condition is op.RESERVE_FEE: - return cc.RESERVE_FEE.value, parse_fee(args, safe_mode) - elif condition is op.CREATE_COIN_ANNOUNCEMENT: - return cc.CREATE_COIN_ANNOUNCEMENT.value, parse_announcement(args) - elif condition is op.ASSERT_COIN_ANNOUNCEMENT: - return cc.ASSERT_COIN_ANNOUNCEMENT.value, parse_hash(args, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED) - elif condition is op.CREATE_PUZZLE_ANNOUNCEMENT: - return cc.CREATE_PUZZLE_ANNOUNCEMENT.value, parse_announcement(args) - elif condition is op.ASSERT_PUZZLE_ANNOUNCEMENT: - return cc.ASSERT_PUZZLE_ANNOUNCEMENT.value, parse_hash(args, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED) - elif condition is op.ASSERT_MY_PARENT_ID: - return cc.ASSERT_MY_PARENT_ID.value, parse_hash(args, Err.ASSERT_MY_PARENT_ID_FAILED) - elif condition is op.ASSERT_MY_PUZZLEHASH: - return cc.ASSERT_MY_PUZZLEHASH.value, parse_hash(args, Err.ASSERT_MY_PUZZLEHASH_FAILED) - elif condition is op.ASSERT_MY_AMOUNT: - return cc.ASSERT_MY_AMOUNT.value, parse_amount(args, safe_mode) - else: - raise ValidationError(Err.INVALID_CONDITION) - - -CONDITION_OPCODES: Set[bytes] = set(item.value for item in ConditionOpcode) - - -def parse_condition(cond: SExp, safe_mode: bool) -> Tuple[int, Optional[ConditionWithArgs]]: - condition = cond.first().as_atom() - if condition in CONDITION_OPCODES: - opcode: ConditionOpcode = ConditionOpcode(condition) - cost, args = parse_condition_args(cond.rest(), opcode, safe_mode) - cvl = ConditionWithArgs(opcode, args) if args is not None else None - elif not safe_mode: - # we don't need to save unknown conditions. We can't do anything with them anyway - # safe_mode just tells us whether we can tolerate them or not - return 0, None - else: - raise ValidationError(Err.INVALID_CONDITION) - return cost, cvl - - -def get_name_puzzle_conditions_python( - generator: BlockGenerator, max_cost: int, *, cost_per_byte: int, safe_mode: bool -) -> NPCResult: - """ - This executes the generator program and returns the coins and their - conditions. If the cost of the program (size, CLVM execution and conditions) - exceed max_cost, the function fails. In order to accurately take the size - of the program into account when calculating cost, cost_per_byte must be - specified. - safe_mode determines whether the clvm program and conditions are executed in - strict mode or not. When in safe/strict mode, unknow operations or conditions - are considered failures. This is the mode when accepting transactions into - the mempool. - """ - block_program, block_program_args = setup_generator_args(generator) - max_cost -= len(bytes(generator.program)) * cost_per_byte - if max_cost < 0: - return NPCResult(uint16(Err.INVALID_BLOCK_COST.value), [], uint64(0)) - if safe_mode: - clvm_cost, result = GENERATOR_MOD.run_safe_with_cost(max_cost, block_program, block_program_args) - else: - clvm_cost, result = GENERATOR_MOD.run_with_cost(max_cost, block_program, block_program_args) - - max_cost -= clvm_cost - if max_cost < 0: - return NPCResult(uint16(Err.INVALID_BLOCK_COST.value), [], uint64(0)) - npc_list: List[NPC] = [] - - for res in result.first().as_iter(): - conditions_list: List[ConditionWithArgs] = [] - - if len(res.first().atom) != 32: - raise ValidationError(Err.INVALID_CONDITION) - spent_coin_parent_id: bytes32 = res.first().as_atom() - res = res.rest() - if len(res.first().atom) != 32: - raise ValidationError(Err.INVALID_CONDITION) - spent_coin_puzzle_hash: bytes32 = res.first().as_atom() - res = res.rest() - spent_coin_amount: uint64 = uint64(sanitize_int(res.first(), safe_mode)) - res = res.rest() - spent_coin: Coin = Coin(spent_coin_parent_id, spent_coin_puzzle_hash, spent_coin_amount) - - for cond in res.first().as_iter(): - cost, cvl = parse_condition(cond, safe_mode) - max_cost -= cost - if max_cost < 0: - return NPCResult(uint16(Err.INVALID_BLOCK_COST.value), [], uint64(0)) - if cvl is not None: - conditions_list.append(cvl) - - conditions_dict = conditions_by_opcode(conditions_list) - if conditions_dict is None: - conditions_dict = {} - npc_list.append(NPC(spent_coin.name(), spent_coin.puzzle_hash, [(a, b) for a, b in conditions_dict.items()])) - return NPCResult(None, npc_list, uint64(clvm_cost)) - - -def get_name_puzzle_conditions_rust( +def get_name_puzzle_conditions( generator: BlockGenerator, max_cost: int, *, cost_per_byte: int, safe_mode: bool ) -> NPCResult: block_program, block_program_args = setup_generator_args(generator) @@ -366,47 +97,21 @@ def get_name_puzzle_conditions_rust( return NPCResult(uint16(Err.INVALID_BLOCK_COST.value), [], uint64(0)) flags = STRICT_MODE if safe_mode else 0 - err, result, clvm_cost = GENERATOR_MOD.run_as_generator(max_cost, flags, block_program, block_program_args) - if err is not None: - return NPCResult(uint16(err), [], uint64(0)) - else: - npc_list = [] - for r in result: - conditions = [] - for c in r.conditions: - cwa = [] - for cond_list in c[1]: - cwa.append(ConditionWithArgs(ConditionOpcode(bytes([cond_list.opcode])), cond_list.vars)) - conditions.append((ConditionOpcode(bytes([c[0]])), cwa)) - npc_list.append(NPC(r.coin_name, r.puzzle_hash, conditions)) - return NPCResult(None, npc_list, uint64(clvm_cost)) - - -def get_name_puzzle_conditions( - generator: BlockGenerator, max_cost: int, *, cost_per_byte: int, safe_mode: bool, rust_checker: bool -) -> NPCResult: - """ - This executes the generator program and returns the coins and their - conditions. If the cost of the program (size, CLVM execution and conditions) - exceed max_cost, the function fails. In order to accurately take the size - of the program into account when calculating cost, cost_per_byte must be - specified. - safe_mode determines whether the clvm program and conditions are executed in - strict mode or not. When in safe/strict mode, unknow operations or conditions - are considered failures. This is the mode when accepting transactions into - the mempool. - """ try: - if rust_checker: - return get_name_puzzle_conditions_rust( - generator, max_cost, cost_per_byte=cost_per_byte, safe_mode=safe_mode - ) + err, result, clvm_cost = GENERATOR_MOD.run_as_generator(max_cost, flags, block_program, block_program_args) + if err is not None: + return NPCResult(uint16(err), [], uint64(0)) else: - return get_name_puzzle_conditions_python( - generator, max_cost, cost_per_byte=cost_per_byte, safe_mode=safe_mode - ) - except ValidationError as e: - return NPCResult(uint16(e.code.value), [], uint64(0)) + npc_list = [] + for r in result: + conditions = [] + for c in r.conditions: + cwa = [] + for cond_list in c[1]: + cwa.append(ConditionWithArgs(ConditionOpcode(bytes([cond_list.opcode])), cond_list.vars)) + conditions.append((ConditionOpcode(bytes([c[0]])), cwa)) + npc_list.append(NPC(r.coin_name, r.puzzle_hash, conditions)) + return NPCResult(None, npc_list, uint64(clvm_cost)) except Exception as e: log.debug(f"get_name_puzzle_condition failed: {e}") return NPCResult(uint16(Err.GENERATOR_RUNTIME_ERROR.value), [], uint64(0)) @@ -432,8 +137,6 @@ def get_puzzle_and_solution_for_coin(generator: BlockGenerator, coin_name: bytes def mempool_check_conditions_dict( unspent: CoinRecord, - coin_announcement_names: Set[bytes32], - puzzle_announcement_names: Set[bytes32], conditions_dict: Dict[ConditionOpcode, List[ConditionWithArgs]], prev_transaction_block_height: uint32, timestamp: uint64, @@ -445,13 +148,7 @@ def mempool_check_conditions_dict( cvp: ConditionWithArgs for cvp in con_list: error: Optional[Err] = None - if cvp.opcode is ConditionOpcode.ASSERT_MY_COIN_ID: - error = mempool_assert_my_coin_id(cvp, unspent) - elif cvp.opcode is ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT: - error = mempool_assert_announcement(cvp, coin_announcement_names) - elif cvp.opcode is ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT: - error = mempool_assert_announcement(cvp, puzzle_announcement_names) - elif cvp.opcode is ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE: + if cvp.opcode is ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE: error = mempool_assert_absolute_block_height_exceeds(cvp, prev_transaction_block_height) elif cvp.opcode is ConditionOpcode.ASSERT_HEIGHT_RELATIVE: error = mempool_assert_relative_block_height_exceeds(cvp, unspent, prev_transaction_block_height) @@ -459,12 +156,18 @@ def mempool_check_conditions_dict( error = mempool_assert_absolute_time_exceeds(cvp, timestamp) elif cvp.opcode is ConditionOpcode.ASSERT_SECONDS_RELATIVE: error = mempool_assert_relative_time_exceeds(cvp, unspent, timestamp) + elif cvp.opcode is ConditionOpcode.ASSERT_MY_COIN_ID: + assert False + elif cvp.opcode is ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT: + assert False + elif cvp.opcode is ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT: + assert False elif cvp.opcode is ConditionOpcode.ASSERT_MY_PARENT_ID: - error = mempool_assert_my_parent_id(cvp, unspent) + assert False elif cvp.opcode is ConditionOpcode.ASSERT_MY_PUZZLEHASH: - error = mempool_assert_my_puzzlehash(cvp, unspent) + assert False elif cvp.opcode is ConditionOpcode.ASSERT_MY_AMOUNT: - error = mempool_assert_my_amount(cvp, unspent) + assert False if error: return error diff --git a/wheat/full_node/mempool_manager.py b/wheat/full_node/mempool_manager.py index 80a17ce..99a832d 100644 --- a/wheat/full_node/mempool_manager.py +++ b/wheat/full_node/mempool_manager.py @@ -29,8 +29,6 @@ from wheat.util.clvm import int_from_bytes from wheat.util.condition_tools import ( pkm_pairs_for_conditions_dict, - coin_announcements_names_for_npc, - puzzle_announcements_names_for_npc, ) from wheat.util.errors import Err from wheat.util.generator_tools import additions_for_npc @@ -43,9 +41,7 @@ def get_npc_multiprocess(spend_bundle_bytes: bytes, max_cost: int, cost_per_byte: int) -> bytes: program = simple_solution_generator(SpendBundle.from_bytes(spend_bundle_bytes)) # npc contains names of the coins removed, puzzle_hashes and their spend conditions - return bytes( - get_name_puzzle_conditions(program, max_cost, cost_per_byte=cost_per_byte, safe_mode=True, rust_checker=True) - ) + return bytes(get_name_puzzle_conditions(program, max_cost, cost_per_byte=cost_per_byte, safe_mode=True)) class MempoolManager: @@ -119,7 +115,6 @@ async def create_bundle_from_mempool( f"full: {cost_sum / self.constants.MAX_BLOCK_COST_CLVM}" ) agg = SpendBundle.aggregate(spend_bundles) - assert set(agg.additions()) == set(additions) assert set(agg.removals()) == set(removals) return agg, additions, removals else: @@ -207,7 +202,7 @@ def can_replace( log.info(f"Replacing conflicting tx in mempool. New tx fee: {fees}, old tx fees: {conflicting_fees}") return True - async def pre_validate_spendbundle(self, new_spend: SpendBundle) -> NPCResult: + async def pre_validate_spendbundle(self, new_spend: SpendBundle, spend_name: bytes32) -> NPCResult: """ Errors are included within the cached_result. This runs in another process so we don't block the main thread @@ -220,9 +215,13 @@ async def pre_validate_spendbundle(self, new_spend: SpendBundle) -> NPCResult: int(self.limit_factor * self.constants.MAX_BLOCK_COST_CLVM), self.constants.COST_PER_BYTE, ) + ret = NPCResult.from_bytes(cached_result_bytes) end_time = time.time() - log.info(f"It took {end_time - start_time} to pre validate transaction") - return NPCResult.from_bytes(cached_result_bytes) + log.log( + logging.WARNING if end_time - start_time > 1 else logging.DEBUG, + f"pre_validate_spendbundle took {end_time - start_time:0.4f} seconds for {spend_name}", + ) + return ret async def add_spendbundle( self, @@ -256,6 +255,8 @@ async def add_spendbundle( return None, MempoolInclusionStatus.FAILED, Err(npc_result.error) # build removal list removal_names: List[bytes32] = [npc.coin_name for npc in npc_list] + if set(removal_names) != set([s.name() for s in new_spend.removals()]): + return None, MempoolInclusionStatus.FAILED, Err.INVALID_SPEND_BUNDLE additions = additions_for_npc(npc_list) @@ -383,8 +384,6 @@ async def add_spendbundle( pks: List[G1Element] = [] msgs: List[bytes32] = [] error: Optional[Err] = None - coin_announcements_in_spend: Set[bytes32] = coin_announcements_names_for_npc(npc_list) - puzzle_announcements_in_spend: Set[bytes32] = puzzle_announcements_names_for_npc(npc_list) for npc in npc_list: coin_record: CoinRecord = removal_record_dict[npc.coin_name] # Check that the revealed removal puzzles actually match the puzzle hash @@ -399,8 +398,6 @@ async def add_spendbundle( assert self.peak.timestamp is not None error = mempool_check_conditions_dict( coin_record, - coin_announcements_in_spend, - puzzle_announcements_in_spend, npc.condition_dict, uint32(wheatlisp_height), self.peak.timestamp, @@ -436,11 +433,14 @@ async def add_spendbundle( self.mempool.remove_from_pool(mempool_item) new_item = MempoolItem(new_spend, uint64(fees), npc_result, cost, spend_name, additions, removals, program) - self.mempool.add_to_pool(new_item, additions, removal_coin_dict) - log.info( - f"add_spendbundle took {time.time() - start_time} seconds, cost {cost} " - f"({round(100.0 * cost/self.constants.MAX_BLOCK_COST_CLVM, 3)}%)" + self.mempool.add_to_pool(new_item) + now = time.time() + log.log( + logging.WARNING if now - start_time > 1 else logging.DEBUG, + f"add_spendbundle {spend_name} took {now - start_time:0.2f} seconds. " + f"Cost: {cost} ({round(100.0 * cost/self.constants.MAX_BLOCK_COST_CLVM, 3)}% of max block cost)", ) + return uint64(cost), MempoolInclusionStatus.SUCCESS, None async def check_removals(self, removals: Dict[bytes32, CoinRecord]) -> Tuple[Optional[Err], List[Coin]]: diff --git a/wheat/plotting/check_plots.py b/wheat/plotting/check_plots.py index 7c9abf2..c4b1e84 100644 --- a/wheat/plotting/check_plots.py +++ b/wheat/plotting/check_plots.py @@ -29,7 +29,7 @@ def plot_refresh_callback(refresh_result: PlotRefreshResult): def check_plots(root_path, num, challenge_start, grep_string, list_duplicates, debug_show_memo): config = load_config(root_path, "config.yaml") - plot_refresh_parameter: PlotsRefreshParameter = PlotsRefreshParameter(100, 100, 1) + plot_refresh_parameter: PlotsRefreshParameter = PlotsRefreshParameter(batch_sleep_milliseconds=0) plot_manager: PlotManager = PlotManager( root_path, match_str=grep_string, diff --git a/wheat/plotting/manager.py b/wheat/plotting/manager.py index 1e18999..d3914a9 100644 --- a/wheat/plotting/manager.py +++ b/wheat/plotting/manager.py @@ -1,8 +1,8 @@ +from dataclasses import dataclass import logging import threading import time import traceback -from functools import reduce from pathlib import Path from typing import Any, Callable, Dict, List, Optional, Set, Tuple from concurrent.futures.thread import ThreadPoolExecutor @@ -15,18 +15,103 @@ PlotInfo, PlotRefreshResult, PlotsRefreshParameter, - get_plot_directories, get_plot_filenames, parse_plot_info, stream_plot_info_pk, stream_plot_info_ph, ) +from wheat.util.ints import uint16 +from wheat.util.path import mkdir +from wheat.util.streamable import Streamable, streamable from wheat.types.blockchain_format.proof_of_space import ProofOfSpace from wheat.types.blockchain_format.sized_bytes import bytes32 from wheat.wallet.derive_keys import master_sk_to_local_sk log = logging.getLogger(__name__) +CURRENT_VERSION: uint16 = uint16(0) + + +@dataclass(frozen=True) +@streamable +class CacheEntry(Streamable): + pool_public_key: Optional[G1Element] + pool_contract_puzzle_hash: Optional[bytes32] + plot_public_key: G1Element + + +@dataclass(frozen=True) +@streamable +class DiskCache(Streamable): + version: uint16 + data: List[Tuple[bytes32, CacheEntry]] + + +class Cache: + _changed: bool + _data: Dict[bytes32, CacheEntry] + + def __init__(self, path: Path): + self._changed = False + self._data = {} + self._path = path + if not path.parent.exists(): + mkdir(path.parent) + + def __len__(self): + return len(self._data) + + def update(self, plot_id: bytes32, entry: CacheEntry): + self._data[plot_id] = entry + self._changed = True + + def remove(self, cache_keys: List[bytes32]): + for key in cache_keys: + if key in self._data: + del self._data[key] + self._changed = True + + def save(self): + try: + disk_cache: DiskCache = DiskCache( + CURRENT_VERSION, [(plot_id, cache_entry) for plot_id, cache_entry in self.items()] + ) + serialized: bytes = bytes(disk_cache) + self._path.write_bytes(serialized) + self._changed = False + log.info(f"Saved {len(serialized)} bytes of cached data") + except Exception as e: + log.error(f"Failed to save cache: {e}, {traceback.format_exc()}") + + def load(self): + try: + serialized = self._path.read_bytes() + log.info(f"Loaded {len(serialized)} bytes of cached data") + stored_cache: DiskCache = DiskCache.from_bytes(serialized) + if stored_cache.version != CURRENT_VERSION: + # TODO, Migrate or drop current cache if the version changes. + raise ValueError(f"Invalid cache version {stored_cache.version}. Expected version {CURRENT_VERSION}.") + self._data = {plot_id: cache_entry for plot_id, cache_entry in stored_cache.data} + except FileNotFoundError: + log.debug(f"Cache {self._path} not found") + except Exception as e: + log.error(f"Failed to load cache: {e}, {traceback.format_exc()}") + + def keys(self): + return self._data.keys() + + def items(self): + return self._data.items() + + def get(self, plot_id): + return self._data.get(plot_id) + + def changed(self): + return self._changed + + def path(self): + return self._path + class PlotManager: plots: Dict[Path, PlotInfo] @@ -36,6 +121,7 @@ class PlotManager: no_key_filenames: Set[Path] farmer_public_keys: List[G1Element] pool_public_keys: List[G1Element] + cache: Cache match_str: Optional[str] show_memo: bool open_no_key_filenames: bool @@ -64,6 +150,7 @@ def __init__( self.no_key_filenames = set() self.farmer_public_keys = [] self.pool_public_keys = [] + self.cache = Cache(self.root_path.resolve() / "cache" / "plot_manager.dat") self.match_str = match_str self.show_memo = show_memo self.open_no_key_filenames = open_no_key_filenames @@ -101,6 +188,7 @@ def needs_refresh(self) -> bool: def start_refreshing(self): self._refreshing_enabled = True if self._refresh_thread is None or not self._refresh_thread.is_alive(): + self.cache.load() self._refresh_thread = threading.Thread(target=self._refresh_task) self._refresh_thread.start() @@ -120,18 +208,34 @@ def _refresh_task(self): while not self.needs_refresh() and self._refreshing_enabled: time.sleep(1) + plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(self.root_path) + plot_directories: Set[Path] = set(plot_filenames.keys()) + plot_paths: List[Path] = [] + for paths in plot_filenames.values(): + plot_paths += paths + total_result: PlotRefreshResult = PlotRefreshResult() while self.needs_refresh() and self._refreshing_enabled: - batch_result: PlotRefreshResult = self.refresh_batch() + batch_result: PlotRefreshResult = self.refresh_batch(plot_paths, plot_directories) total_result += batch_result self._refresh_callback(batch_result) if batch_result.remaining_files == 0: - self.last_refresh_time = time.time() break batch_sleep = self.refresh_parameter.batch_sleep_milliseconds self.log.debug(f"refresh_plots: Sleep {batch_sleep} milliseconds") time.sleep(float(batch_sleep) / 1000.0) + # Cleanup unused cache + available_ids = set([plot_info.prover.get_id() for plot_info in self.plots.values()]) + invalid_cache_keys = [plot_id for plot_id in self.cache.keys() if plot_id not in available_ids] + self.cache.remove(invalid_cache_keys) + self.log.debug(f"_refresh_task: cached entries removed: {len(invalid_cache_keys)}") + + if self.cache.changed(): + self.cache.save() + + self.last_refresh_time = time.time() + self.log.debug( f"_refresh_task: total_result.loaded_plots {total_result.loaded_plots}, " f"total_result.removed_plots {total_result.removed_plots}, " @@ -139,73 +243,69 @@ def _refresh_task(self): f"total_duration {total_result.duration:.2f} seconds" ) - def refresh_batch(self) -> PlotRefreshResult: + def refresh_batch(self, plot_paths: List[Path], plot_directories: Set[Path]) -> PlotRefreshResult: start_time: float = time.time() - plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(self.root_path) - all_filenames: List[Path] = [] - for paths in plot_filenames.values(): - all_filenames += paths - result: PlotRefreshResult = PlotRefreshResult() counter_lock = threading.Lock() - log.debug(f"refresh_batch: {len(all_filenames)} files in directories {get_plot_directories(self.root_path)}") + log.debug(f"refresh_batch: {len(plot_paths)} files in directories {plot_directories}") if self.match_str is not None: log.info(f'Only loading plots that contain "{self.match_str}" in the file or directory name') - def process_file(file_path: Path) -> Dict: - new_provers: Dict[Path, PlotInfo] = {} + def process_file(file_path: Path) -> Optional[PlotInfo]: filename_str = str(file_path) if self.match_str is not None and self.match_str not in filename_str: - return new_provers - if file_path.exists(): - if ( - file_path in self.failed_to_open_filenames - and (time.time() - self.failed_to_open_filenames[file_path]) - < self.refresh_parameter.retry_invalid_seconds - ): - # Try once every `refresh_parameter.retry_invalid_seconds` seconds to open the file - return new_provers - if file_path in self.plots: - try: - stat_info = file_path.stat() - except Exception as e: - log.error(f"Failed to open file {file_path}. {e}") - return new_provers - if stat_info.st_mtime == self.plots[file_path].time_modified: - new_provers[file_path] = self.plots[file_path] - return new_provers - entry: Optional[Tuple[str, Set[str]]] = self.plot_filename_paths.get(file_path.name) - if entry is not None: - loaded_parent, duplicates = entry - if str(file_path.parent) in duplicates: - log.debug(f"Skip duplicated plot {str(file_path)}") - return new_provers + return None + if not file_path.exists(): + return None + if ( + file_path in self.failed_to_open_filenames + and (time.time() - self.failed_to_open_filenames[file_path]) + < self.refresh_parameter.retry_invalid_seconds + ): + # Try once every `refresh_parameter.retry_invalid_seconds` seconds to open the file + return None + if file_path in self.plots: try: - with counter_lock: - if result.processed_files >= self.refresh_parameter.batch_size: - result.remaining_files += 1 - return new_provers - result.processed_files += 1 - - prover = DiskProver(str(file_path)) - - log.debug(f"process_file {str(file_path)}") - - expected_size = _expected_plot_size(prover.get_size()) * UI_ACTUAL_SPACE_CONSTANT_FACTOR stat_info = file_path.stat() + except Exception as e: + log.error(f"Failed to open file {file_path}. {e}") + return None + if stat_info.st_mtime == self.plots[file_path].time_modified: + return self.plots[file_path] + entry: Optional[Tuple[str, Set[str]]] = self.plot_filename_paths.get(file_path.name) + if entry is not None: + loaded_parent, duplicates = entry + if str(file_path.parent) in duplicates: + log.debug(f"Skip duplicated plot {str(file_path)}") + return None + try: + with counter_lock: + if result.processed_files >= self.refresh_parameter.batch_size: + result.remaining_files += 1 + return None + result.processed_files += 1 + + prover = DiskProver(str(file_path)) + + log.debug(f"process_file {str(file_path)}") + + expected_size = _expected_plot_size(prover.get_size()) * UI_ACTUAL_SPACE_CONSTANT_FACTOR + stat_info = file_path.stat() + + # TODO: consider checking if the file was just written to (which would mean that the file is still + # being copied). A segfault might happen in this edge case. + + if prover.get_size() >= 30 and stat_info.st_size < 0.98 * expected_size: + log.warning( + f"Not farming plot {file_path}. Size is {stat_info.st_size / (1024**3)} GiB, but expected" + f" at least: {expected_size / (1024 ** 3)} GiB. We assume the file is being copied." + ) + return None - # TODO: consider checking if the file was just written to (which would mean that the file is still - # being copied). A segfault might happen in this edge case. - - if prover.get_size() >= 30 and stat_info.st_size < 0.98 * expected_size: - log.warning( - f"Not farming plot {file_path}. Size is {stat_info.st_size / (1024**3)} GiB, but expected" - f" at least: {expected_size / (1024 ** 3)} GiB. We assume the file is being copied." - ) - return new_provers - + cache_entry = self.cache.get(prover.get_id()) + if cache_entry is None: ( pool_public_key_or_puzzle_hash, farmer_public_key, @@ -213,92 +313,86 @@ def process_file(file_path: Path) -> Dict: ) = parse_plot_info(prover.get_memo()) # Only use plots that correct keys associated with them - if self.farmer_public_keys is not None and farmer_public_key not in self.farmer_public_keys: + if farmer_public_key not in self.farmer_public_keys: log.warning(f"Plot {file_path} has a farmer public key that is not in the farmer's pk list.") self.no_key_filenames.add(file_path) if not self.open_no_key_filenames: - return new_provers + return None + pool_public_key: Optional[G1Element] = None + pool_contract_puzzle_hash: Optional[bytes32] = None if isinstance(pool_public_key_or_puzzle_hash, G1Element): pool_public_key = pool_public_key_or_puzzle_hash - pool_contract_puzzle_hash = None else: assert isinstance(pool_public_key_or_puzzle_hash, bytes32) - pool_public_key = None pool_contract_puzzle_hash = pool_public_key_or_puzzle_hash - if ( - self.pool_public_keys is not None - and pool_public_key is not None - and pool_public_key not in self.pool_public_keys - ): + if pool_public_key is not None and pool_public_key not in self.pool_public_keys: log.warning(f"Plot {file_path} has a pool public key that is not in the farmer's pool pk list.") self.no_key_filenames.add(file_path) if not self.open_no_key_filenames: - return new_provers + return None - stat_info = file_path.stat() local_sk = master_sk_to_local_sk(local_master_sk) plot_public_key: G1Element = ProofOfSpace.generate_plot_public_key( local_sk.get_g1(), farmer_public_key, pool_contract_puzzle_hash is not None ) - with self.plot_filename_paths_lock: - if file_path.name not in self.plot_filename_paths: - self.plot_filename_paths[file_path.name] = (str(Path(prover.get_filename()).parent), set()) - else: - self.plot_filename_paths[file_path.name][1].add(str(Path(prover.get_filename()).parent)) - if len(self.plot_filename_paths[file_path.name][1]) > 0: - log.warning( - f"Have multiple copies of the plot {file_path} in " - f"{self.plot_filename_paths[file_path.name][1]}." - ) - return new_provers - - new_provers[file_path] = PlotInfo( - prover, - pool_public_key, - pool_contract_puzzle_hash, - plot_public_key, - stat_info.st_size, - stat_info.st_mtime, - ) - - with counter_lock: - result.loaded_plots += 1 - result.loaded_size += stat_info.st_size + cache_entry = CacheEntry(pool_public_key, pool_contract_puzzle_hash, plot_public_key) + self.cache.update(prover.get_id(), cache_entry) - if file_path in self.failed_to_open_filenames: - del self.failed_to_open_filenames[file_path] - - except Exception as e: - tb = traceback.format_exc() - log.error(f"Failed to open file {file_path}. {e} {tb}") - self.failed_to_open_filenames[file_path] = int(time.time()) - return new_provers - log.info(f"Found plot {file_path} of size {new_provers[file_path].prover.get_size()}") - - if self.show_memo: - plot_memo: bytes32 - if pool_contract_puzzle_hash is None: - plot_memo = stream_plot_info_pk(pool_public_key, farmer_public_key, local_master_sk) + with self.plot_filename_paths_lock: + if file_path.name not in self.plot_filename_paths: + self.plot_filename_paths[file_path.name] = (str(Path(prover.get_filename()).parent), set()) else: - plot_memo = stream_plot_info_ph(pool_contract_puzzle_hash, farmer_public_key, local_master_sk) - plot_memo_str: str = plot_memo.hex() - log.info(f"Memo: {plot_memo_str}") - - return new_provers - return new_provers - - def reduce_function(x: Dict, y: Dict) -> Dict: - return {**x, **y} + self.plot_filename_paths[file_path.name][1].add(str(Path(prover.get_filename()).parent)) + if len(self.plot_filename_paths[file_path.name][1]) > 0: + log.warning( + f"Have multiple copies of the plot {file_path} in " + f"{self.plot_filename_paths[file_path.name][1]}." + ) + return None + + new_plot_info: PlotInfo = PlotInfo( + prover, + cache_entry.pool_public_key, + cache_entry.pool_contract_puzzle_hash, + cache_entry.plot_public_key, + stat_info.st_size, + stat_info.st_mtime, + ) + + with counter_lock: + result.loaded_plots += 1 + result.loaded_size += stat_info.st_size + + if file_path in self.failed_to_open_filenames: + del self.failed_to_open_filenames[file_path] + + except Exception as e: + tb = traceback.format_exc() + log.error(f"Failed to open file {file_path}. {e} {tb}") + self.failed_to_open_filenames[file_path] = int(time.time()) + return None + log.info(f"Found plot {file_path} of size {new_plot_info.prover.get_size()}") + + if self.show_memo: + plot_memo: bytes32 + if pool_contract_puzzle_hash is None: + plot_memo = stream_plot_info_pk(pool_public_key, farmer_public_key, local_master_sk) + else: + plot_memo = stream_plot_info_ph(pool_contract_puzzle_hash, farmer_public_key, local_master_sk) + plot_memo_str: str = plot_memo.hex() + log.info(f"Memo: {plot_memo_str}") + + return new_plot_info with self, ThreadPoolExecutor() as executor: # First drop all plots we have in plot_filename_paths but not longer in the filesystem or set in config def plot_removed(test_path: Path): - return not test_path.exists() or test_path.parent not in plot_filenames + return not test_path.exists() or test_path.parent not in plot_directories with self.plot_filename_paths_lock: filenames_to_remove: List[str] = [] @@ -321,8 +415,11 @@ def plot_removed(test_path: Path): for filename in filenames_to_remove: del self.plot_filename_paths[filename] - initial_value: Dict[Path, PlotInfo] = {} - self.plots = reduce(reduce_function, executor.map(process_file, all_filenames), initial_value) + plots_refreshed: Dict[Path, PlotInfo] = {} + for new_plot in executor.map(process_file, plot_paths): + if new_plot is not None: + plots_refreshed[Path(new_plot.prover.get_filename())] = new_plot + self.plots = plots_refreshed result.duration = time.time() - start_time diff --git a/wheat/pools/pool_puzzles.py b/wheat/pools/pool_puzzles.py index 762084a..04a2491 100644 --- a/wheat/pools/pool_puzzles.py +++ b/wheat/pools/pool_puzzles.py @@ -367,7 +367,7 @@ def pool_state_from_extra_data(extra_data: Program) -> Optional[PoolState]: return None -def solution_to_extra_data(full_spend: CoinSpend) -> Optional[PoolState]: +def solution_to_pool_state(full_spend: CoinSpend) -> Optional[PoolState]: full_solution_ser: SerializedProgram = full_spend.solution full_solution: Program = Program.from_bytes(bytes(full_solution_ser)) diff --git a/wheat/pools/pool_wallet.py b/wheat/pools/pool_wallet.py index 14c7043..c8ff4aa 100644 --- a/wheat/pools/pool_wallet.py +++ b/wheat/pools/pool_wallet.py @@ -30,7 +30,7 @@ create_full_puzzle, SINGLETON_LAUNCHER, create_pooling_inner_puzzle, - solution_to_extra_data, + solution_to_pool_state, pool_state_to_inner_puzzle, get_most_recent_singleton_coin_from_coin_spend, launcher_id_to_p2_puzzle_hash, @@ -194,24 +194,24 @@ async def get_current_state(self) -> PoolWalletInfo: assert tip_singleton_coin is not None curr_spend_i = len(all_spends) - 1 - extra_data: Optional[PoolState] = None + pool_state: Optional[PoolState] = None last_singleton_spend_height = uint32(0) - while extra_data is None: + while pool_state is None: full_spend: CoinSpend = all_spends[curr_spend_i] - extra_data = solution_to_extra_data(full_spend) + pool_state = solution_to_pool_state(full_spend) last_singleton_spend_height = uint32(history[curr_spend_i][0]) curr_spend_i -= 1 - assert extra_data is not None + assert pool_state is not None current_inner = pool_state_to_inner_puzzle( - extra_data, + pool_state, launcher_coin.name(), self.wallet_state_manager.constants.GENESIS_CHALLENGE, delayed_seconds, delayed_puzhash, ) return PoolWalletInfo( - extra_data, + pool_state, self.target_state, launcher_coin, launcher_id, @@ -291,7 +291,7 @@ async def apply_state_transitions(self, block_spends: List[CoinSpend], block_hei # If we have reached the target state, resets it to None. Loops back to get current state for _, added_spend in reversed(self.wallet_state_manager.pool_store.get_spends_for_wallet(self.wallet_id)): - latest_state: Optional[PoolState] = solution_to_extra_data(added_spend) + latest_state: Optional[PoolState] = solution_to_pool_state(added_spend) if latest_state is not None: if self.target_state == latest_state: self.target_state = None @@ -623,9 +623,9 @@ async def generate_launcher_spend( full_pooling_puzzle: Program = create_full_puzzle(puzzle, launcher_id=launcher_coin.name()) puzzle_hash: bytes32 = full_pooling_puzzle.get_tree_hash() - extra_data_bytes = Program.to([("p", bytes(initial_target_state)), ("t", delay_time), ("h", delay_ph)]) + pool_state_bytes = Program.to([("p", bytes(initial_target_state)), ("t", delay_time), ("h", delay_ph)]) announcement_set: Set[Announcement] = set() - announcement_message = Program.to([puzzle_hash, amount, extra_data_bytes]).get_tree_hash() + announcement_message = Program.to([puzzle_hash, amount, pool_state_bytes]).get_tree_hash() announcement_set.add(Announcement(launcher_coin.name(), announcement_message).name()) create_launcher_tx_record: Optional[TransactionRecord] = await standard_wallet.generate_signed_transaction( @@ -640,7 +640,7 @@ async def generate_launcher_spend( ) assert create_launcher_tx_record is not None and create_launcher_tx_record.spend_bundle is not None - genesis_launcher_solution: Program = Program.to([puzzle_hash, amount, extra_data_bytes]) + genesis_launcher_solution: Program = Program.to([puzzle_hash, amount, pool_state_bytes]) launcher_cs: CoinSpend = CoinSpend( launcher_coin, diff --git a/wheat/protocols/protocol_message_types.py b/wheat/protocols/protocol_message_types.py index 93781a2..7596f45 100644 --- a/wheat/protocols/protocol_message_types.py +++ b/wheat/protocols/protocol_message_types.py @@ -86,3 +86,14 @@ class ProtocolMessageTypes(Enum): new_signage_point_harvester = 66 request_plots = 67 respond_plots = 68 + + # More wallet protocol + coin_state_update = 69 + register_interest_in_puzzle_hash = 70 + respond_to_ph_update = 71 + register_interest_in_coin = 72 + respond_to_coin_update = 73 + request_children = 74 + respond_children = 75 + request_ses_hashes = 76 + respond_ses_hashes = 77 diff --git a/wheat/protocols/protocol_state_machine.py b/wheat/protocols/protocol_state_machine.py new file mode 100644 index 0000000..5fcd54c --- /dev/null +++ b/wheat/protocols/protocol_state_machine.py @@ -0,0 +1,64 @@ +from wheat.protocols.protocol_message_types import ProtocolMessageTypes as pmt, ProtocolMessageTypes + +NO_REPLY_EXPECTED = [ + # full_node -> full_node messages + pmt.new_peak, + pmt.new_transaction, + pmt.new_unfinished_block, + pmt.new_signage_point_or_end_of_sub_slot, + pmt.request_mempool_transactions, + pmt.new_compact_vdf, + pmt.request_mempool_transactions, +] + +""" +VAILD_REPLY_MESSAGE_MAP: +key: sent message type. +value: valid reply message types, from the view of the requester. +A state machine can be built from this message map. +""" + +VAILD_REPLY_MESSAGE_MAP = { + # messages for all services + # pmt.handshake is handled in WSWheatConnection.perform_handshake + # full_node -> full_node protocol messages + pmt.request_transaction: [pmt.respond_transaction], + pmt.request_proof_of_weight: [pmt.respond_proof_of_weight], + pmt.request_block: [pmt.respond_block, pmt.reject_block], + pmt.request_blocks: [pmt.respond_blocks, pmt.reject_blocks], + pmt.request_unfinished_block: [pmt.respond_unfinished_block], + pmt.request_signage_point_or_end_of_sub_slot: [pmt.respond_signage_point, pmt.respond_end_of_sub_slot], + pmt.request_compact_vdf: [pmt.respond_compact_vdf], + pmt.request_peers: [pmt.respond_peers], +} + + +def static_check_sent_message_response() -> None: + """Check that allowed message data structures VALID_REPLY_MESSAGE_MAP and NO_REPLY_EXPECTED are consistent.""" + # Reply and non-reply sets should not overlap: This check should be static + overlap = set(NO_REPLY_EXPECTED).intersection(set(VAILD_REPLY_MESSAGE_MAP.keys())) + if len(overlap) != 0: + raise AssertionError("Overlapping NO_REPLY_EXPECTED and VAILD_REPLY_MESSAGE_MAP values: {}") + + +def message_requires_reply(sent: ProtocolMessageTypes) -> bool: + """Return True if message has an entry in the full node P2P message map""" + # If we knew the peer NodeType is FULL_NODE, we could also check `sent not in NO_REPLY_EXPECTED` + return sent in VAILD_REPLY_MESSAGE_MAP + + +def message_response_ok(sent: ProtocolMessageTypes, received: ProtocolMessageTypes) -> bool: + """ + Check to see that peers respect protocol message types in reply. + Call with received == None to indicate that we do not expect a specific reply message type. + """ + # Errors below are runtime protocol message mismatches from peers + if sent in VAILD_REPLY_MESSAGE_MAP: + if received not in VAILD_REPLY_MESSAGE_MAP[sent]: + return False + + return True + + +# Run `static_check_sent_message_response` to check this static invariant at import time +static_check_sent_message_response() diff --git a/wheat/protocols/protocol_timing.py b/wheat/protocols/protocol_timing.py new file mode 100644 index 0000000..7a9bfa6 --- /dev/null +++ b/wheat/protocols/protocol_timing.py @@ -0,0 +1,4 @@ +# These settings should not be end-user configurable +INVALID_PROTOCOL_BAN_SECONDS = 10 +API_EXCEPTION_BAN_SECONDS = 10 +INTERNAL_PROTOCOL_ERROR_BAN_SECONDS = 10 # Don't flap if our client is at fault diff --git a/wheat/protocols/shared_protocol.py b/wheat/protocols/shared_protocol.py index cbd96c1..91930a6 100644 --- a/wheat/protocols/shared_protocol.py +++ b/wheat/protocols/shared_protocol.py @@ -5,7 +5,7 @@ from wheat.util.ints import uint8, uint16 from wheat.util.streamable import Streamable, streamable -protocol_version = "0.0.32" +protocol_version = "0.0.33" """ Handshake when establishing a connection between two servers. diff --git a/wheat/protocols/wallet_protocol.py b/wheat/protocols/wallet_protocol.py index e8adfd7..39e19c2 100644 --- a/wheat/protocols/wallet_protocol.py +++ b/wheat/protocols/wallet_protocol.py @@ -113,7 +113,7 @@ class RejectRemovalsRequest(Streamable): @streamable class RequestAdditions(Streamable): height: uint32 - header_hash: bytes32 + header_hash: Optional[bytes32] puzzle_hashes: Optional[List[bytes32]] @@ -153,3 +153,76 @@ class RespondHeaderBlocks(Streamable): start_height: uint32 end_height: uint32 header_blocks: List[HeaderBlock] + + +@dataclass(frozen=True) +@streamable +class CoinState(Streamable): + coin: Coin + spent_height: Optional[uint32] + created_height: Optional[uint32] + + +@dataclass(frozen=True) +@streamable +class RegisterForPhUpdates(Streamable): + puzzle_hashes: List[bytes32] + min_height: uint32 + + +@dataclass(frozen=True) +@streamable +class RespondToPhUpdates(Streamable): + puzzle_hashes: List[bytes32] + min_height: uint32 + coin_states: List[CoinState] + + +@dataclass(frozen=True) +@streamable +class RegisterForCoinUpdates(Streamable): + coin_ids: List[bytes32] + min_height: uint32 + + +@dataclass(frozen=True) +@streamable +class RespondToCoinUpdates(Streamable): + coin_ids: List[bytes32] + min_height: uint32 + coin_states: List[CoinState] + + +@dataclass(frozen=True) +@streamable +class CoinStateUpdate(Streamable): + height: uint32 + fork_height: uint32 + peak_hash: bytes32 + items: List[CoinState] + + +@dataclass(frozen=True) +@streamable +class RequestChildren(Streamable): + coin_name: bytes32 + + +@dataclass(frozen=True) +@streamable +class RespondChildren(Streamable): + coin_states: List[CoinState] + + +@dataclass(frozen=True) +@streamable +class RequestSESInfo(Streamable): + start_height: uint32 + end_height: uint32 + + +@dataclass(frozen=True) +@streamable +class RespondSESInfo(Streamable): + reward_chain_hash: List[bytes32] + heights: List[List[uint32]] diff --git a/wheat/rpc/rpc_client.py b/wheat/rpc/rpc_client.py index 29cb0b1..57bf6f6 100644 --- a/wheat/rpc/rpc_client.py +++ b/wheat/rpc/rpc_client.py @@ -16,7 +16,7 @@ class RpcClient: Client to Wheat RPC, connects to a local service. Uses HTTP/JSON, and converts back from JSON into native python objects before returning. All api calls use POST requests. Note that this is not the same as the peer protocol, or wallet protocol (which run Wheat's - protocol on top of TCP), it's a separate protocol on top of HTTP thats provides easy access + protocol on top of TCP), it's a separate protocol on top of HTTP that provides easy access to the full node. """ @@ -24,10 +24,14 @@ class RpcClient: session: aiohttp.ClientSession closing_task: Optional[asyncio.Task] ssl_context: Optional[SSLContext] + hostname: str + port: uint16 @classmethod async def create(cls, self_hostname: str, port: uint16, root_path, net_config): self = cls() + self.hostname = self_hostname + self.port = port self.url = f"https://{self_hostname}:{str(port)}/" self.session = aiohttp.ClientSession() ca_crt_path, ca_key_path = private_ssl_ca_paths(root_path, net_config) diff --git a/wheat/rpc/wallet_rpc_api.py b/wheat/rpc/wallet_rpc_api.py index b56031b..a571304 100644 --- a/wheat/rpc/wallet_rpc_api.py +++ b/wheat/rpc/wallet_rpc_api.py @@ -95,7 +95,6 @@ def get_routes(self) -> Dict[str, Callable]: "/cancel_trade": self.cancel_trade, # DID Wallet "/did_update_recovery_ids": self.did_update_recovery_ids, - "/did_spend": self.did_spend, "/did_get_pubkey": self.did_get_pubkey, "/did_get_did": self.did_get_did, "/did_recovery_spend": self.did_recovery_spend, @@ -112,6 +111,7 @@ def get_routes(self) -> Dict[str, Callable]: "/pw_self_pool": self.pw_self_pool, "/pw_absorb_rewards": self.pw_absorb_rewards, "/pw_status": self.pw_status, + "/recover_pool_nft": self.recover_pool_nft, } async def _state_changed(self, *args) -> List[WsRpcMessage]: @@ -453,7 +453,7 @@ async def create_new_wallet(self, request: Dict): if request["mode"] == "new": async with self.service.wallet_state_manager.lock: cc_wallet: CCWallet = await CCWallet.create_new_cc( - wallet_state_manager, main_wallet, request["amount"] + wallet_state_manager, main_wallet, uint64(request["amount"]) ) colour = cc_wallet.get_colour() asyncio.create_task(self._create_backup_and_upload(host)) @@ -523,7 +523,7 @@ async def create_new_wallet(self, request: Dict): did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet( wallet_state_manager, main_wallet, - int(request["amount"]), + uint64(request["amount"]), backup_dids, uint64(num_needed), ) @@ -965,21 +965,11 @@ async def did_update_recovery_ids(self, request): async with self.service.wallet_state_manager.lock: update_success = await wallet.update_recovery_list(recovery_list, new_amount_verifications_required) # Update coin with new ID info - updated_puz = await wallet.get_new_puzzle() - spend_bundle = await wallet.create_spend(updated_puz.get_tree_hash()) + spend_bundle = await wallet.create_update_spend() success = spend_bundle is not None and update_success return {"success": success} - async def did_spend(self, request): - wallet_id = int(request["wallet_id"]) - async with self.service.wallet_state_manager.lock: - wallet: DIDWallet = self.service.wallet_state_manager.wallets[wallet_id] - spend_bundle = await wallet.create_spend(request["puzzlehash"]) - - success = spend_bundle is not None - return {"success": success} - async def did_get_did(self, request): wallet_id = int(request["wallet_id"]) wallet: DIDWallet = self.service.wallet_state_manager.wallets[wallet_id] diff --git a/wheat/rpc/wallet_rpc_client.py b/wheat/rpc/wallet_rpc_client.py index 4f8aeb8..855fc3b 100644 --- a/wheat/rpc/wallet_rpc_client.py +++ b/wheat/rpc/wallet_rpc_client.py @@ -175,6 +175,47 @@ async def create_signed_transaction( response = await self.fetch("create_signed_transaction", {"additions": additions_hex, "fee": fee}) return TransactionRecord.from_json_dict(response["signed_tx"]) + async def create_new_did_wallet(self, amount): + request: Dict[str, Any] = { + "wallet_type": "did_wallet", + "did_type": "new", + "backup_dids": [], + "num_of_backup_ids_needed": 0, + "amount": amount, + "host": f"{self.hostname}:{self.port}", + } + response = await self.fetch("create_new_wallet", request) + return response + + async def create_new_did_wallet_from_recovery(self, filename): + request: Dict[str, Any] = { + "wallet_type": "did_wallet", + "did_type": "recovery", + "filename": filename, + "host": f"{self.hostname}:{self.port}", + } + response = await self.fetch("create_new_wallet", request) + return response + + async def did_create_attest(self, wallet_id, coin_name, pubkey, puzhash, file_name): + request: Dict[str, Any] = { + "wallet_id": wallet_id, + "coin_name": coin_name, + "pubkey": pubkey, + "puzhash": puzhash, + "filename": file_name, + } + response = await self.fetch("did_create_attest", request) + return response + + async def did_recovery_spend(self, wallet_id, attest_filenames): + request: Dict[str, Any] = { + "wallet_id": wallet_id, + "attest_filenames": attest_filenames, + } + response = await self.fetch("did_recovery_spend", request) + return response + async def create_new_pool_wallet( self, target_puzzlehash: Optional[bytes32], @@ -238,4 +279,4 @@ async def recover_pool_nft(self, contract_hash: str, launcher_hash: str, coins: "launcher_hash": launcher_hash, "contract_hash": contract_hash, "coins": coins, - }) \ No newline at end of file + }) diff --git a/wheat/server/address_manager_store.py b/wheat/server/address_manager_store.py index a0d9426..064acdd 100644 --- a/wheat/server/address_manager_store.py +++ b/wheat/server/address_manager_store.py @@ -38,8 +38,6 @@ async def create(cls, connection) -> "AddressManagerStore": self = cls() self.db = connection await self.db.commit() - await self.db.execute("pragma journal_mode=wal") - await self.db.execute("pragma synchronous=2") await self.db.execute("CREATE TABLE IF NOT EXISTS peer_metadata(key text,value text)") await self.db.commit() diff --git a/wheat/server/node_discovery.py b/wheat/server/node_discovery.py index 2110a52..067f865 100644 --- a/wheat/server/node_discovery.py +++ b/wheat/server/node_discovery.py @@ -27,6 +27,7 @@ MAX_CONCURRENT_OUTBOUND_CONNECTIONS = 70 NETWORK_ID_DEFAULT_PORTS = { "mainnet": 21333, + "testnet0": 23333, "testnet7": 23333, } @@ -91,6 +92,8 @@ def __init__( async def initialize_address_manager(self) -> None: mkdir(self.peer_db_path.parent) self.connection = await aiosqlite.connect(self.peer_db_path) + await self.connection.execute("pragma journal_mode=wal") + await self.connection.execute("pragma synchronous=OFF") self.address_manager_store = await AddressManagerStore.create(self.connection) if not await self.address_manager_store.is_empty(): self.address_manager = await self.address_manager_store.deserialize() @@ -383,9 +386,7 @@ async def _connect_to_peers(self, random) -> None: if time.time() - last_timestamp_local_info > 1800 or local_peerinfo is None: local_peerinfo = await self.server.get_peer_info() last_timestamp_local_info = uint64(int(time.time())) - if local_peerinfo is not None and addr == local_peerinfo or ( - addr is not None and addr.port != self.default_port - ): + if local_peerinfo is not None and addr == local_peerinfo: continue got_peer = True self.log.debug(f"Addrman selected address: {addr}.") diff --git a/wheat/server/rate_limits.py b/wheat/server/rate_limits.py index 8e257a9..33d37e8 100644 --- a/wheat/server/rate_limits.py +++ b/wheat/server/rate_limits.py @@ -76,9 +76,9 @@ class RLSettings: ProtocolMessageTypes.new_compact_vdf: RLSettings(100, 1024), ProtocolMessageTypes.request_peers: RLSettings(10, 100), ProtocolMessageTypes.respond_peers: RLSettings(10, 1 * 1024 * 1024), - ProtocolMessageTypes.request_puzzle_solution: RLSettings(100, 100), - ProtocolMessageTypes.respond_puzzle_solution: RLSettings(100, 1024 * 1024), - ProtocolMessageTypes.reject_puzzle_solution: RLSettings(100, 100), + ProtocolMessageTypes.request_puzzle_solution: RLSettings(1000, 100), + ProtocolMessageTypes.respond_puzzle_solution: RLSettings(1000, 1024 * 1024), + ProtocolMessageTypes.reject_puzzle_solution: RLSettings(1000, 100), ProtocolMessageTypes.new_peak_wallet: RLSettings(200, 300), ProtocolMessageTypes.request_block_header: RLSettings(500, 100), ProtocolMessageTypes.respond_block_header: RLSettings(500, 500 * 1024), @@ -97,6 +97,11 @@ class RLSettings: ProtocolMessageTypes.farm_new_block: RLSettings(200, 200), ProtocolMessageTypes.request_plots: RLSettings(10, 10 * 1024 * 1024), ProtocolMessageTypes.respond_plots: RLSettings(10, 100 * 1024 * 1024), + ProtocolMessageTypes.coin_state_update: RLSettings(1000, 100 * 1024 * 1024), + ProtocolMessageTypes.register_interest_in_puzzle_hash: RLSettings(1000, 100 * 1024 * 1024), + ProtocolMessageTypes.respond_to_ph_update: RLSettings(1000, 100 * 1024 * 1024), + ProtocolMessageTypes.register_interest_in_coin: RLSettings(1000, 100 * 1024 * 1024), + ProtocolMessageTypes.respond_to_coin_update: RLSettings(1000, 100 * 1024 * 1024), } diff --git a/wheat/server/server.py b/wheat/server/server.py index c3f732c..cc1bdd8 100644 --- a/wheat/server/server.py +++ b/wheat/server/server.py @@ -16,6 +16,8 @@ from cryptography.hazmat.primitives import hashes, serialization from wheat.protocols.protocol_message_types import ProtocolMessageTypes +from wheat.protocols.protocol_state_machine import message_requires_reply +from wheat.protocols.protocol_timing import INVALID_PROTOCOL_BAN_SECONDS, API_EXCEPTION_BAN_SECONDS from wheat.protocols.shared_protocol import protocol_version from wheat.server.introducer_peers import IntroducerPeers from wheat.server.outbound_message import Message, NodeType @@ -159,8 +161,8 @@ def __init__( self.tasks_from_peer: Dict[bytes32, Set[bytes32]] = {} self.banned_peers: Dict[str, float] = {} - self.invalid_protocol_ban_seconds = 10 - self.api_exception_ban_seconds = 10 + self.invalid_protocol_ban_seconds = INVALID_PROTOCOL_BAN_SECONDS + self.api_exception_ban_seconds = API_EXCEPTION_BAN_SECONDS self.exempt_peer_networks: List[Union[IPv4Network, IPv6Network]] = [ ip_network(net, strict=False) for net in config.get("exempt_peer_networks", []) ] @@ -275,12 +277,8 @@ async def incoming_connection(self, request): ) assert handshake is True - if connection.connection_type == NodeType.FULL_NODE and connection.peer_server_port != self._port: - self.log.info(f" {connection.peer_server_port} PORT NOT MATCH FULL_NODE PORT {self._port} ") - await connection.close() - close_event.set() # Limit inbound connections to config's specifications. - elif not self.accept_inbound_connections(connection.connection_type) and not is_in_network( + if not self.accept_inbound_connections(connection.connection_type) and not is_in_network( connection.peer_host, self.exempt_peer_networks ): self.log.info( @@ -376,7 +374,11 @@ async def start_client( session = None connection: Optional[WSWheatConnection] = None try: - timeout = ClientTimeout(total=30) + # Crawler/DNS introducer usually uses a lower timeout than the default + timeout_value = ( + 30 if "peer_connect_timeout" not in self.config else float(self.config["peer_connect_timeout"]) + ) + timeout = ClientTimeout(total=timeout_value) session = ClientSession(timeout=timeout) try: @@ -611,13 +613,29 @@ async def send_to_others( for message in messages: await connection.send_message(message) + async def validate_broadcast_message_type(self, messages: List[Message], node_type: NodeType): + for message in messages: + if message_requires_reply(ProtocolMessageTypes(message.type)): + # Internal protocol logic error - we will raise, blocking messages to all peers + self.log.error(f"Attempt to broadcast message requiring protocol response: {message.type}") + for _, connection in self.all_connections.items(): + if connection.connection_type is node_type: + await connection.close( + self.invalid_protocol_ban_seconds, + WSCloseCode.INTERNAL_ERROR, + Err.INTERNAL_PROTOCOL_ERROR, + ) + raise ProtocolError(Err.INTERNAL_PROTOCOL_ERROR, [message.type]) + async def send_to_all(self, messages: List[Message], node_type: NodeType): + await self.validate_broadcast_message_type(messages, node_type) for _, connection in self.all_connections.items(): if connection.connection_type is node_type: for message in messages: await connection.send_message(message) async def send_to_all_except(self, messages: List[Message], node_type: NodeType, exclude: bytes32): + await self.validate_broadcast_message_type(messages, node_type) for _, connection in self.all_connections.items(): if connection.connection_type is node_type and connection.peer_node_id != exclude: for message in messages: @@ -692,7 +710,7 @@ async def get_peer_info(self) -> Optional[PeerInfo]: ip = None port = self._port - # Use wheat's service first. + # Use chia's service first. try: timeout = ClientTimeout(total=15) async with ClientSession(timeout=timeout) as session: diff --git a/wheat/server/upnp.py b/wheat/server/upnp.py index e203cd6..106ea01 100644 --- a/wheat/server/upnp.py +++ b/wheat/server/upnp.py @@ -65,7 +65,7 @@ def shutdown(self): return self.queue.put(("shutdown",)) log.info("UPnP, shutting down thread") - self.thread.join() + self.thread.join(5) self.thread = None # this is here just in case the UPnP object is destroyed non-gracefully, diff --git a/wheat/server/ws_connection.py b/wheat/server/ws_connection.py index 9b5857a..fb49a1d 100644 --- a/wheat/server/ws_connection.py +++ b/wheat/server/ws_connection.py @@ -8,6 +8,8 @@ from wheat.cmds.init_funcs import wheat_full_version_str from wheat.protocols.protocol_message_types import ProtocolMessageTypes +from wheat.protocols.protocol_state_machine import message_response_ok +from wheat.protocols.protocol_timing import INTERNAL_PROTOCOL_ERROR_BAN_SECONDS from wheat.protocols.shared_protocol import Capability, Handshake from wheat.server.outbound_message import Message, NodeType, make_msg from wheat.server.rate_limits import RateLimiter @@ -103,6 +105,9 @@ def __init__( self.outbound_rate_limiter = RateLimiter(incoming=False, percentage_of_limit=outbound_rate_limit_percent) self.inbound_rate_limiter = RateLimiter(incoming=True, percentage_of_limit=inbound_rate_limit_percent) + # Used by crawler/dns introducer + self.version = None + async def perform_handshake(self, network_id: str, protocol_version: str, server_port: int, local_type: NodeType): if self.is_outbound: outbound_handshake = make_msg( @@ -135,6 +140,8 @@ async def perform_handshake(self, network_id: str, protocol_version: str, server if inbound_handshake.network_id != network_id: raise ProtocolError(Err.INCOMPATIBLE_NETWORK_ID) + self.version = inbound_handshake.software_version + self.peer_server_port = inbound_handshake.server_port self.connection_type = NodeType(inbound_handshake.node_type) @@ -212,6 +219,12 @@ async def close(self, ban_time: int = 0, ws_close_code: WSCloseCode = WSCloseCod raise self.close_callback(self, ban_time) + async def ban_peer_bad_protocol(self, log_err_msg: str): + """Ban peer for protocol violation""" + ban_seconds = INTERNAL_PROTOCOL_ERROR_BAN_SECONDS + self.log.error(f"Banning peer for {ban_seconds} seconds: {self.peer_host} {log_err_msg}") + await self.close(ban_seconds, WSCloseCode.PROTOCOL_ERROR, Err.INVALID_PROTOCOL_MESSAGE) + def cancel_pending_timeouts(self): for _, task in self.pending_timeouts.items(): task.cancel() @@ -269,14 +282,22 @@ async def invoke(*args, **kwargs): if attribute is None: raise AttributeError(f"Node type {self.connection_type} does not have method {attr_name}") - msg = Message(uint8(getattr(ProtocolMessageTypes, attr_name).value), None, args[0]) + msg: Message = Message(uint8(getattr(ProtocolMessageTypes, attr_name).value), None, args[0]) request_start_t = time.time() - result = await self.create_request(msg, timeout) + result = await self.send_request(msg, timeout) self.log.debug( f"Time for request {attr_name}: {self.get_peer_logging()} = {time.time() - request_start_t}, " f"None? {result is None}" ) if result is not None: + sent_message_type = ProtocolMessageTypes(msg.type) + recv_message_type = ProtocolMessageTypes(result.type) + if not message_response_ok(sent_message_type, recv_message_type): + # peer protocol violation + error_message = f"WSConnection.invoke sent message {sent_message_type.name} " + f"but received {recv_message_type.name}" + await self.ban_peer_bad_protocol(self.error_message) + raise ProtocolError(Err.INVALID_PROTOCOL_MESSAGE, [error_message]) ret_attr = getattr(class_for_type(self.local_type), ProtocolMessageTypes(result.type).name, None) req_annotations = ret_attr.__annotations__ @@ -292,7 +313,7 @@ async def invoke(*args, **kwargs): return invoke - async def create_request(self, message_no_id: Message, timeout: int) -> Optional[Message]: + async def send_request(self, message_no_id: Message, timeout: int) -> Optional[Message]: """Sends a message and waits for a response.""" if self.closed: return None @@ -461,6 +482,10 @@ async def _read_one_message(self) -> Optional[Message]: await asyncio.sleep(3) return None + # Used by crawler/dns introducer + def get_version(self): + return self.version + def get_peer_info(self) -> Optional[PeerInfo]: result = self.ws._writer.transport.get_extra_info("peername") if result is None: diff --git a/wheat/ssl/create_ssl.py b/wheat/ssl/create_ssl.py index c5e8c67..b2fd0a0 100644 --- a/wheat/ssl/create_ssl.py +++ b/wheat/ssl/create_ssl.py @@ -68,7 +68,7 @@ def generate_ca_signed_cert(ca_crt: bytes, ca_key: bytes, cert_out: Path, key_ou .not_valid_before(datetime.datetime.today() - one_day) .not_valid_after(datetime.datetime(2100, 8, 2)) .add_extension( - x509.SubjectAlternativeName([x509.DNSName("wheat.net")]), + x509.SubjectAlternativeName([x509.DNSName("wheat.network")]), critical=False, ) .sign(root_key, hashes.SHA256(), default_backend()) diff --git a/wheat/timelord/timelord_launcher.py b/wheat/timelord/timelord_launcher.py index e67a448..ac0e770 100644 --- a/wheat/timelord/timelord_launcher.py +++ b/wheat/timelord/timelord_launcher.py @@ -33,7 +33,7 @@ async def kill_processes(): def find_vdf_client() -> pathlib.Path: - p = pathlib.Path(pkg_resources.get_distribution("wheatvdf").location) / "vdf_client" + p = pathlib.Path(pkg_resources.get_distribution("chiavdf").location) / "vdf_client" if p.is_file(): return p raise FileNotFoundError("can't find vdf_client binary") @@ -79,9 +79,10 @@ async def spawn_process(host: str, port: int, counter: int): async def spawn_all_processes(config: Dict, net_config: Dict): await asyncio.sleep(5) + hostname = net_config["self_hostname"] if "host" not in config else config["host"] port = config["port"] process_count = config["process_count"] - awaitables = [spawn_process(net_config["self_hostname"], port, i) for i in range(process_count)] + awaitables = [spawn_process(hostname, port, i) for i in range(process_count)] await asyncio.gather(*awaitables) diff --git a/wheat/types/coin_record.py b/wheat/types/coin_record.py index 33c25fa..ba7b1f8 100644 --- a/wheat/types/coin_record.py +++ b/wheat/types/coin_record.py @@ -1,5 +1,7 @@ from dataclasses import dataclass +from typing import Optional +from wheat.protocols.wallet_protocol import CoinState from wheat.types.blockchain_format.coin import Coin from wheat.types.blockchain_format.sized_bytes import bytes32 from wheat.util.ints import uint32, uint64 @@ -24,3 +26,13 @@ class CoinRecord(Streamable): @property def name(self) -> bytes32: return self.coin.name() + + @property + def coin_state(self) -> CoinState: + spent_h = None + if self.spent: + spent_h = self.spent_block_index + confirmed_height: Optional[uint32] = self.confirmed_block_index + if self.confirmed_block_index == 0 and self.timestamp == 0: + confirmed_height = None + return CoinState(self.coin, spent_h, confirmed_height) diff --git a/wheat/types/coin_spend.py b/wheat/types/coin_spend.py index 2654749..45664e1 100644 --- a/wheat/types/coin_spend.py +++ b/wheat/types/coin_spend.py @@ -3,7 +3,7 @@ from wheat.types.blockchain_format.coin import Coin from wheat.types.blockchain_format.program import SerializedProgram, INFINITE_COST -from wheat.util.chain_utils import additions_for_solution +from wheat.util.chain_utils import additions_for_solution, fee_for_solution from wheat.util.streamable import Streamable, streamable @@ -22,3 +22,6 @@ class CoinSpend(Streamable): def additions(self) -> List[Coin]: return additions_for_solution(self.coin.name(), self.puzzle_reveal, self.solution, INFINITE_COST) + + def reserved_fee(self) -> int: + return fee_for_solution(self.puzzle_reveal, self.solution, INFINITE_COST) diff --git a/wheat/types/spend_bundle.py b/wheat/types/spend_bundle.py index 46e0471..15857f3 100644 --- a/wheat/types/spend_bundle.py +++ b/wheat/types/spend_bundle.py @@ -6,6 +6,7 @@ from blspy import AugSchemeMPL, G2Element +from wheat.consensus.default_constants import DEFAULT_CONSTANTS from wheat.types.blockchain_format.coin import Coin from wheat.types.blockchain_format.sized_bytes import bytes32 from wheat.util.streamable import Streamable, dataclass_from_dict, recurse_jsonify, streamable @@ -61,7 +62,7 @@ def fees(self) -> int: def name(self) -> bytes32: return self.get_hash() - def debug(self, agg_sig_additional_data=bytes([3] * 32)): + def debug(self, agg_sig_additional_data=DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA): debug_spend_bundle(self, agg_sig_additional_data) def not_ephemeral_additions(self): diff --git a/wheat/util/api_decorators.py b/wheat/util/api_decorators.py index 4a7ec0d..fa9a4df 100644 --- a/wheat/util/api_decorators.py +++ b/wheat/util/api_decorators.py @@ -59,3 +59,14 @@ def inner(): return func return inner() + + +def reply_type(type): + def wrap(func): + def inner(): + setattr(func, "reply_type", type) + return func + + return inner() + + return wrap diff --git a/wheat/util/chain_utils.py b/wheat/util/chain_utils.py index 99802d6..15e3ed2 100644 --- a/wheat/util/chain_utils.py +++ b/wheat/util/chain_utils.py @@ -1,8 +1,11 @@ from typing import List +from clvm.casts import int_from_bytes + from wheat.types.blockchain_format.coin import Coin from wheat.types.blockchain_format.program import SerializedProgram from wheat.types.blockchain_format.sized_bytes import bytes32 +from wheat.types.condition_opcodes import ConditionOpcode from wheat.util.condition_tools import ( conditions_dict_for_solution, created_outputs_for_conditions_dict, @@ -19,3 +22,16 @@ def additions_for_solution( if err or dic is None: return [] return created_outputs_for_conditions_dict(dic, coin_name) + + +def fee_for_solution(puzzle_reveal: SerializedProgram, solution: SerializedProgram, max_cost: int) -> int: + err, dic, cost = conditions_dict_for_solution(puzzle_reveal, solution, max_cost) + if err or dic is None: + return 0 + + total = 0 + for cvp in dic.get(ConditionOpcode.RESERVE_FEE, []): + amount_bin = cvp.vars[0] + amount = int_from_bytes(amount_bin) + total += amount + return total diff --git a/wheat/util/condition_tools.py b/wheat/util/condition_tools.py index 6ce0e51..9e5d62d 100644 --- a/wheat/util/condition_tools.py +++ b/wheat/util/condition_tools.py @@ -124,32 +124,6 @@ def puzzle_announcements_for_conditions_dict( return output_announcements -def coin_announcements_names_for_npc(npc_list) -> Set[bytes32]: - output_announcements: Set[bytes32] = set() - for npc in npc_list: - for condition, cvp_list in npc.conditions: - if condition == ConditionOpcode.CREATE_COIN_ANNOUNCEMENT: - for cvp in cvp_list: - message = cvp.vars[0] - assert len(message) <= 1024 - announcement = Announcement(npc.coin_name, message) - output_announcements.add(announcement.name()) - return output_announcements - - -def puzzle_announcements_names_for_npc(npc_list) -> Set[bytes32]: - output_announcements: Set[bytes32] = set() - for npc in npc_list: - for condition, cvp_list in npc.conditions: - if condition == ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT: - for cvp in cvp_list: - message = cvp.vars[0] - assert len(message) <= 1024 - announcement = Announcement(npc.puzzle_hash, message) - output_announcements.add(announcement.name()) - return output_announcements - - def coin_announcement_names_for_conditions_dict( conditions_dict: Dict[ConditionOpcode, List[ConditionWithArgs]], input_coin: Coin, diff --git a/wheat/util/config.py b/wheat/util/config.py index 3135327..f91eb4a 100644 --- a/wheat/util/config.py +++ b/wheat/util/config.py @@ -17,11 +17,16 @@ def initial_config_file(filename: Union[str, Path]) -> str: def create_default_wheat_config(root_path: Path, filenames=["config.yaml"]) -> None: for filename in filenames: - default_config_file_data = initial_config_file(filename) - path = config_path_for_filename(root_path, filename) + default_config_file_data: str = initial_config_file(filename) + path: Path = config_path_for_filename(root_path, filename) + tmp_path: Path = path.with_suffix("." + str(os.getpid())) mkdir(path.parent) - with open(path, "w") as f: + with open(tmp_path, "w") as f: f.write(default_config_file_data) + try: + os.replace(str(tmp_path), str(path)) + except PermissionError: + shutil.move(str(tmp_path), str(path)) def config_path_for_filename(root_path: Path, filename: Union[str, Path]) -> Path: @@ -32,10 +37,14 @@ def config_path_for_filename(root_path: Path, filename: Union[str, Path]) -> Pat def save_config(root_path: Path, filename: Union[str, Path], config_data: Any): - path = config_path_for_filename(root_path, filename) - with open(path.with_suffix("." + str(os.getpid())), "w") as f: + path: Path = config_path_for_filename(root_path, filename) + tmp_path: Path = path.with_suffix("." + str(os.getpid())) + with open(tmp_path, "w") as f: yaml.safe_dump(config_data, f) - shutil.move(str(path.with_suffix("." + str(os.getpid()))), path) + try: + os.replace(str(tmp_path), path) + except PermissionError: + shutil.move(str(tmp_path), str(path)) def load_config( diff --git a/wheat/util/dump_keyring.py b/wheat/util/dump_keyring.py new file mode 100644 index 0000000..8f93f78 --- /dev/null +++ b/wheat/util/dump_keyring.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 + +import click +import colorama +import threading +import yaml + +from wheat.cmds.passphrase_funcs import read_passphrase_from_file +from wheat.util.default_root import DEFAULT_KEYS_ROOT_PATH +from wheat.util.file_keyring import FileKeyring +from wheat.util.keyring_wrapper import DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE +from cryptography.exceptions import InvalidTag +from getpass import getpass +from io import TextIOWrapper +from pathlib import Path +from typing import Any, Dict, Optional + +DEFAULT_KEYRING_YAML = DEFAULT_KEYS_ROOT_PATH / "keyring.yaml" + + +class DumpKeyring(FileKeyring): # lgtm [py/missing-call-to-init] + def __init__(self, keyring_file: Path): + self.keyring_path = keyring_file + self.payload_cache = {} + self.load_keyring_lock = threading.RLock() + # We don't call super().__init__() to avoid side-effects + + +def get_passphrase_prompt(keyring_file: str) -> str: + prompt = ( + colorama.Fore.YELLOW + + colorama.Style.BRIGHT + + "(Unlock Keyring: " + + colorama.Fore.MAGENTA + + keyring_file + + colorama.Style.RESET_ALL + + colorama.Fore.YELLOW + + colorama.Style.BRIGHT + + ")" + + colorama.Style.RESET_ALL + + " Passphrase: " + ) # noqa: E501 + return prompt + + +@click.command() +@click.argument("keyring_file", nargs=1, default=DEFAULT_KEYRING_YAML) +@click.option( + "--full-payload", is_flag=True, default=False, help="Print the full keyring contents, including plaintext" +) +@click.option("--passphrase-file", type=click.File("r"), help="File or descriptor to read the passphrase from") +@click.option("--pretty-print", is_flag=True, default=False) +def dump(keyring_file, full_payload: bool, passphrase_file: Optional[TextIOWrapper], pretty_print: bool): + passphrase: str = DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE + prompt: str = get_passphrase_prompt(str(keyring_file)) + data: Dict[str, Any] = {} + + print(f"Attempting to dump contents of keyring file: {keyring_file}\n") + + if passphrase_file is not None: + passphrase = read_passphrase_from_file(passphrase_file) + + keyring = DumpKeyring(keyring_file) + + if full_payload: + keyring.load_outer_payload() + data = keyring.outer_payload_cache + + for i in range(5): + try: + keyring.load_keyring(passphrase) + if len(data) > 0: + data["data"] = keyring.payload_cache + else: + data = keyring.payload_cache + + if pretty_print: + print(yaml.dump(data)) + else: + print(data) + break + except (ValueError, InvalidTag): + passphrase = getpass(prompt) + except Exception as e: + print(f"Unhandled exception: {e}") + break + + +def main(): + colorama.init() + dump() # pylint: disable=no-value-for-parameter + + +if __name__ == "__main__": + main() diff --git a/wheat/util/errors.py b/wheat/util/errors.py index 0afbd0b..ca35127 100644 --- a/wheat/util/errors.py +++ b/wheat/util/errors.py @@ -7,7 +7,7 @@ class Err(Enum): DOES_NOT_EXTEND = -1 BAD_HEADER_SIGNATURE = -2 MISSING_FROM_STORAGE = -3 - INVALID_PROTOCOL_MESSAGE = -4 + INVALID_PROTOCOL_MESSAGE = -4 # We WILL ban for a protocol violation. SELF_CONNECTION = -5 INVALID_HANDSHAKE = -6 INVALID_ACK = -7 @@ -129,8 +129,8 @@ class Err(Enum): INVALID_PREFARM = 104 ASSERT_SECONDS_RELATIVE_FAILED = 105 BAD_COINBASE_SIGNATURE = 106 - # removed - # INITIAL_TRANSACTION_FREEZE = 107 + + # INITIAL_TRANSACTION_FREEZE = 107 # removed NO_TRANSACTIONS_WHILE_SYNCING = 108 ALREADY_INCLUDING_TRANSACTION = 109 INCOMPATIBLE_NETWORK_ID = 110 @@ -151,6 +151,8 @@ class Err(Enum): INVALID_FEE_TOO_CLOSE_TO_ZERO = 123 COIN_AMOUNT_NEGATIVE = 124 + INTERNAL_PROTOCOL_ERROR = 125 + INVALID_SPEND_BUNDLE = 126 class ValidationError(Exception): diff --git a/wheat/util/file_keyring.py b/wheat/util/file_keyring.py index 9b1dd38..b56a7a0 100644 --- a/wheat/util/file_keyring.py +++ b/wheat/util/file_keyring.py @@ -57,10 +57,6 @@ def acquire_writer_lock(lock_path: Path, timeout=5, max_iters=6): if lock.acquire_write_lock(timeout=timeout): yield # <---- lock.release_write_lock() - try: - os.remove(lock_path) - except Exception: - pass break else: print(f"Failed to acquire keyring writer lock after {timeout} seconds.", end="") @@ -80,10 +76,6 @@ def acquire_reader_lock(lock_path: Path, timeout=5, max_iters=6): if lock.acquire_read_lock(timeout=timeout): yield # <---- lock.release_read_lock() - try: - os.remove(lock_path) - except Exception: - pass break else: print(f"Failed to acquire keyring reader lock after {timeout} seconds.", end="") @@ -123,7 +115,7 @@ class FileKeyring(FileSystemEventHandler): The salt is updated each time the master passphrase is changed. """ - keyring_path: Optional[Path] = None + keyring_path: Path keyring_lock_path: Path keyring_observer: Observer = None load_keyring_lock: threading.RLock # Guards access to needs_load_keyring @@ -142,7 +134,12 @@ def keyring_path_from_root(keys_root_path: Path) -> Path: @staticmethod def lockfile_path_for_file_path(file_path: Path) -> Path: - return file_path.with_suffix(".lock") + """ + Returns a path suitable for creating a lockfile derived from the input path. + Currently used to provide a lockfile path to be used by + fasteners.InterProcessReaderWriterLock when guarding access to keyring.yaml + """ + return file_path.with_name(f".{file_path.name}.lock") def __init__(self, keys_root_path: Path = DEFAULT_KEYS_ROOT_PATH): """ @@ -173,16 +170,24 @@ def setup_keyring_file_watcher(self): self.keyring_observer = Observer() + def cleanup_keyring_file_watcher(self): + if getattr(self, "keyring_observer"): + self.keyring_observer.unschedule_all() + def on_modified(self, event): self.check_if_keyring_file_modified() def check_if_keyring_file_modified(self): if self.keyring_path.exists(): - last_modified = os.stat(self.keyring_path).st_mtime - if not self.keyring_last_mod_time or self.keyring_last_mod_time < last_modified: - self.keyring_last_mod_time = last_modified - with self.load_keyring_lock: - self.needs_load_keyring = True + try: + last_modified = os.stat(self.keyring_path).st_mtime + if not self.keyring_last_mod_time or self.keyring_last_mod_time < last_modified: + self.keyring_last_mod_time = last_modified + with self.load_keyring_lock: + self.needs_load_keyring = True + except FileNotFoundError: + # Shouldn't happen, but if the file doesn't exist there's nothing to do... + pass @staticmethod def default_outer_payload() -> dict: @@ -425,10 +430,13 @@ def write_keyring(self, fresh_salt: bool = False): def write_data_to_keyring(self, data): os.makedirs(os.path.dirname(self.keyring_path), 0o700, True) - temp_path = self.keyring_path.with_suffix("." + str(os.getpid())) + temp_path: Path = self.keyring_path.with_suffix("." + str(os.getpid())) with open(os.open(str(temp_path), os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o600), "w") as f: _ = yaml.safe_dump(data, f) - shutil.move(str(temp_path), self.keyring_path) + try: + os.replace(str(temp_path), self.keyring_path) + except PermissionError: + shutil.move(str(temp_path), str(self.keyring_path)) def prepare_for_migration(self): if not self.payload_cache: diff --git a/wheat/util/initial-config.yaml b/wheat/util/initial-config.yaml index 0f132e1..e792134 100644 --- a/wheat/util/initial-config.yaml +++ b/wheat/util/initial-config.yaml @@ -4,6 +4,7 @@ min_mainnet_k_size: 32 ping_interval: 120 self_hostname: &self_hostname "localhost" daemon_port: 21200 +daemon_max_message_size: 50000000 # maximum size of RPC message in bytes inbound_rate_limit_percent: 100 outbound_rate_limit_percent: 30 @@ -30,7 +31,6 @@ network_overrides: &network_overrides MEMPOOL_BLOCK_BUFFER: 10 EPOCH_BLOCKS: 768 DIFFICULTY_STARTING: 30 - RUST_CONDITION_CHECKER: 0 config: mainnet: address_prefix: "wheat" @@ -43,7 +43,7 @@ network_overrides: &network_overrides selected_network: &selected_network "mainnet" ALERTS_URL: https://download.wheat.network/notify/mainnet_alert.txt -WHEAT_ALERTS_PUBKEY: 1a287fd87cb56e926ecefb879a29aae308be01f31980569f6a75a69d2a9a69daefd71fb778d865f7c50d6c967e3025937 +WHEAT_ALERTS_PUBKEY: 89b7fd87cb56e926ecefb879a29aae308be01f31980569f6a75a69d2a9a69daefd71fb778d865f7c50d6c967e3025937 # public ssl ca is included in source code # Private ssl ca is used for trusted connections between machines user owns @@ -154,7 +154,8 @@ farmer: # Don't run this unless you want to run VDF clients on the local machine. timelord_launcher: # The server where the VDF clients will connect to. - port: 8000 + host: *self_hostname + port: 21000 # Number of VDF client processes to keep alive in the local machine. process_count: 3 logging: *logging @@ -162,7 +163,7 @@ timelord_launcher: timelord: # The timelord server (if run) will run on this port - port: 21000 + port: 21446 # Provides a list of VDF clients expected to connect to this timelord. # For each client, an IP is provided, together with the estimated iterations per second. vdf_clients: @@ -232,6 +233,8 @@ full_node: # How often to initiate outbound connections to other full nodes. peer_connect_interval: 30 + # How long to wait for a peer connection + peer_connect_timeout: 30 # Accept peers until this number of connections target_peer_count: 80 # Initiate outbound connections until this number is hit. @@ -261,6 +264,10 @@ full_node: # analyze with wheat/utils/profiler.py enable_profiler: False + # this is a debug and profiling facility that logs all SQLite commands to a + # separate log file (under logging/sql.log). + log_sqlite_cmds: False + # List of trusted DNS seeders to bootstrap from. # If you modify this, please change the hardcode as well from FullNode.set_server() dns_servers: diff --git a/wheat/util/ints.py b/wheat/util/ints.py index a1ebf0a..80ce41d 100644 --- a/wheat/util/ints.py +++ b/wheat/util/ints.py @@ -39,7 +39,7 @@ class uint128(int): def __new__(cls: Any, value: int): value = int(value) if value > (2 ** 128) - 1 or value < 0: - raise ValueError(f"Value {value} of does not fit into uin128") + raise ValueError(f"Value {value} of does not fit into uint128") return int.__new__(cls, value) # type: ignore @classmethod diff --git a/wheat/util/keychain.py b/wheat/util/keychain.py index f4b5cb4..69f0173 100644 --- a/wheat/util/keychain.py +++ b/wheat/util/keychain.py @@ -12,15 +12,19 @@ from pathlib import Path from secrets import token_bytes from time import sleep -from typing import List, Optional, Tuple +from typing import Any, Dict, List, Optional, Tuple +CURRENT_KEY_VERSION = "1.8" +DEFAULT_USER = f"user-wheat-{CURRENT_KEY_VERSION}" # e.g. user-wheat-1.8 +DEFAULT_SERVICE = f"wheat-{DEFAULT_USER}" # e.g. wheat-user-wheat-1.8 DEFAULT_PASSPHRASE_PROMPT = ( colorama.Fore.YELLOW + colorama.Style.BRIGHT + "(Unlock Keyring)" + colorama.Style.RESET_ALL + " Passphrase: " ) # noqa: E501 FAILED_ATTEMPT_DELAY = 0.5 MAX_KEYS = 100 MAX_RETRIES = 3 +MIN_PASSPHRASE_LEN = 8 class KeyringIsLocked(Exception): @@ -31,7 +35,7 @@ class KeyringRequiresMigration(Exception): pass -class KeyringCurrentPassphaseIsInvalid(Exception): +class KeyringCurrentPassphraseIsInvalid(Exception): pass @@ -47,6 +51,20 @@ def supports_keyring_passphrase() -> bool: # return platform == "linux" +def supports_os_passphrase_storage() -> bool: + return sys.platform in ["darwin"] + + +def passphrase_requirements() -> Dict[str, Any]: + """ + Returns a dictionary specifying current passphrase requirements + """ + if not supports_keyring_passphrase: + return {} + + return {"is_optional": True, "min_length": MIN_PASSPHRASE_LEN} # lgtm [py/clear-text-logging-sensitive-data] + + def set_keys_root_path(keys_root_path: Path) -> None: """ Used to set the keys_root_path prior to instantiating the KeyringWrapper shared instance. @@ -192,6 +210,21 @@ def mnemonic_to_seed(mnemonic: str, passphrase: str) -> bytes: return seed +def default_keychain_user() -> str: + return DEFAULT_USER + + +def default_keychain_service() -> str: + return DEFAULT_SERVICE + + +def get_private_key_user(user: str, index: int) -> str: + """ + Returns the keychain user string for a key index. + """ + return f"wallet-{user}-{index}" + + class Keychain: """ The keychain stores two types of keys: private keys, which are PrivateKeys from blspy, @@ -203,24 +236,11 @@ class Keychain: list of all keys. """ - testing: bool - keyring_wrapper: KeyringWrapper - user: str - - def __init__(self, user: str = "user-wheat-1.8", testing: bool = False): - self.user = user - self.testing = testing + def __init__(self, user: Optional[str] = None, service: Optional[str] = None): + self.user = user if user is not None else default_keychain_user() + self.service = service if service is not None else default_keychain_service() self.keyring_wrapper = KeyringWrapper.get_shared_instance() - def _get_service(self) -> str: - """ - The keychain stores keys under a different name for tests. - """ - if self.testing: - return f"wheat-{self.user}-test" - else: - return f"wheat-{self.user}" - @unlocks_keyring(use_passphrase_cache=True) def _get_pk_and_entropy(self, user: str) -> Optional[Tuple[G1Element, bytes]]: """ @@ -228,7 +248,7 @@ def _get_pk_and_entropy(self, user: str) -> Optional[Tuple[G1Element, bytes]]: include an G1Element and the entropy required to generate the private key. Note that generating the actual private key also requires the passphrase. """ - read_str = self.keyring_wrapper.get_passphrase(self._get_service(), user) + read_str = self.keyring_wrapper.get_passphrase(self.service, user) if read_str is None or len(read_str) == 0: return None str_bytes = bytes.fromhex(read_str) @@ -237,22 +257,13 @@ def _get_pk_and_entropy(self, user: str) -> Optional[Tuple[G1Element, bytes]]: str_bytes[G1Element.SIZE :], # flake8: noqa ) - def _get_private_key_user(self, index: int) -> str: - """ - Returns the keychain user string for a key index. - """ - if self.testing: - return f"wallet-{self.user}-test-{index}" - else: - return f"wallet-{self.user}-{index}" - def _get_free_private_key_index(self) -> int: """ Get the index of the first free spot in the keychain. """ index = 0 while True: - pk = self._get_private_key_user(index) + pk = get_private_key_user(self.user, index) pkent = self._get_pk_and_entropy(pk) if pkent is None: return index @@ -276,8 +287,8 @@ def add_private_key(self, mnemonic: str, passphrase: str) -> PrivateKey: return key self.keyring_wrapper.set_passphrase( - self._get_service(), - self._get_private_key_user(index), + self.service, + get_private_key_user(self.user, index), bytes(key.get_g1()).hex() + entropy.hex(), ) return key @@ -287,7 +298,7 @@ def get_first_private_key(self, passphrases: List[str] = [""]) -> Optional[Tuple Returns the first key in the keychain that has one of the passed in passphrases. """ index = 0 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) while index <= MAX_KEYS: if pkent is not None: pk, ent = pkent @@ -298,7 +309,7 @@ def get_first_private_key(self, passphrases: List[str] = [""]) -> Optional[Tuple if key.get_g1() == pk: return (key, ent) index += 1 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) return None def get_private_key_by_fingerprint( @@ -308,7 +319,7 @@ def get_private_key_by_fingerprint( Return first private key which have the given public key fingerprint. """ index = 0 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) while index <= MAX_KEYS: if pkent is not None: pk, ent = pkent @@ -319,7 +330,7 @@ def get_private_key_by_fingerprint( if pk.get_fingerprint() == fingerprint: return (key, ent) index += 1 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) return None def get_all_private_keys(self, passphrases: List[str] = [""]) -> List[Tuple[PrivateKey, bytes]]: @@ -330,7 +341,7 @@ def get_all_private_keys(self, passphrases: List[str] = [""]) -> List[Tuple[Priv all_keys: List[Tuple[PrivateKey, bytes]] = [] index = 0 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) while index <= MAX_KEYS: if pkent is not None: pk, ent = pkent @@ -341,7 +352,7 @@ def get_all_private_keys(self, passphrases: List[str] = [""]) -> List[Tuple[Priv if key.get_g1() == pk: all_keys.append((key, ent)) index += 1 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) return all_keys def get_all_public_keys(self) -> List[G1Element]: @@ -351,13 +362,13 @@ def get_all_public_keys(self) -> List[G1Element]: all_keys: List[Tuple[G1Element, bytes]] = [] index = 0 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) while index <= MAX_KEYS: if pkent is not None: pk, ent = pkent all_keys.append(pk) index += 1 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) return all_keys def get_first_public_key(self) -> Optional[G1Element]: @@ -365,13 +376,13 @@ def get_first_public_key(self) -> Optional[G1Element]: Returns the first public key. """ index = 0 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) while index <= MAX_KEYS: if pkent is not None: pk, ent = pkent return pk index += 1 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) return None def delete_key_by_fingerprint(self, fingerprint: int): @@ -380,14 +391,14 @@ def delete_key_by_fingerprint(self, fingerprint: int): """ index = 0 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) while index <= MAX_KEYS: if pkent is not None: pk, ent = pkent if pk.get_fingerprint() == fingerprint: - self.keyring_wrapper.delete_passphrase(self._get_service(), self._get_private_key_user(index)) + self.keyring_wrapper.delete_passphrase(self.service, get_private_key_user(self.user, index)) index += 1 - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) def delete_all_keys(self): """ @@ -399,8 +410,8 @@ def delete_all_keys(self): pkent = None while True: try: - pkent = self._get_pk_and_entropy(self._get_private_key_user(index)) - self.keyring_wrapper.delete_passphrase(self._get_service(), self._get_private_key_user(index)) + pkent = self._get_pk_and_entropy(get_private_key_user(self.user, index)) + self.keyring_wrapper.delete_passphrase(self.service, get_private_key_user(self.user, index)) except Exception: # Some platforms might throw on no existing key delete_exception = True @@ -416,9 +427,9 @@ def delete_all_keys(self): while True: try: pkent = self._get_pk_and_entropy( - self._get_private_key_user(index) + get_private_key_user(self.user, index) ) # changed from _get_fingerprint_and_entropy to _get_pk_and_entropy - GH - self.keyring_wrapper.delete_passphrase(self._get_service(), self._get_private_key_user(index)) + self.keyring_wrapper.delete_passphrase(self.service, get_private_key_user(self.user, index)) except Exception: # Some platforms might throw on no existing key delete_exception = True @@ -452,6 +463,56 @@ def needs_migration() -> bool: """ return KeyringWrapper.get_shared_instance().using_legacy_keyring() + @staticmethod + def handle_migration_completed(): + """ + When migration completes outside of the current process, we rely on a notification to inform + the current process that it needs to reset/refresh its keyring. This allows us to stop using + the legacy keyring in an already-running daemon if migration is completed using the CLI. + """ + KeyringWrapper.get_shared_instance().refresh_keyrings() + + @staticmethod + def migrate_legacy_keyring(passphrase: Optional[str] = None, cleanup_legacy_keyring: bool = False) -> None: + """ + Begins legacy keyring migration in a non-interactive manner + """ + if passphrase is not None and passphrase != "": + KeyringWrapper.get_shared_instance().set_master_passphrase( + current_passphrase=None, new_passphrase=passphrase, write_to_keyring=False, allow_migration=False + ) + + KeyringWrapper.get_shared_instance().migrate_legacy_keyring(cleanup_legacy_keyring=cleanup_legacy_keyring) + + @staticmethod + def passphrase_is_optional() -> bool: + """ + Returns whether a user-supplied passphrase is optional, as specified by the passphrase requirements. + """ + return passphrase_requirements().get("is_optional", False) + + @staticmethod + def minimum_passphrase_length() -> int: + """ + Returns the minimum passphrase length, as specified by the passphrase requirements. + """ + return passphrase_requirements().get("min_length", 0) + + @staticmethod + def passphrase_meets_requirements(passphrase: Optional[str]) -> bool: + """ + Returns whether the provided passphrase satisfies the passphrase requirements. + """ + # Passphrase is not required and None was provided + if (passphrase is None or passphrase == "") and Keychain.passphrase_is_optional(): + return True + + # Passphrase meets the minimum length requirement + if passphrase is not None and len(passphrase) >= Keychain.minimum_passphrase_length(): + return True + + return False + @staticmethod def has_master_passphrase() -> bool: """ @@ -493,14 +554,18 @@ def set_cached_master_passphrase(passphrase: Optional[str]) -> None: @staticmethod def set_master_passphrase( - current_passphrase: Optional[str], new_passphrase: str, allow_migration: bool = True + current_passphrase: Optional[str], + new_passphrase: str, + *, + allow_migration: bool = True, + save_passphrase: bool = False, ) -> None: """ Encrypts the keyring contents to new passphrase, provided that the current passphrase can decrypt the contents """ KeyringWrapper.get_shared_instance().set_master_passphrase( - current_passphrase, new_passphrase, allow_migration=allow_migration + current_passphrase, new_passphrase, allow_migration=allow_migration, save_passphrase=save_passphrase ) @staticmethod diff --git a/wheat/util/keyring_wrapper.py b/wheat/util/keyring_wrapper.py index a30219c..d474814 100644 --- a/wheat/util/keyring_wrapper.py +++ b/wheat/util/keyring_wrapper.py @@ -1,17 +1,58 @@ +import asyncio import keyring as keyring_main +from blspy import PrivateKey # pyright: reportMissingImports=false from wheat.util.default_root import DEFAULT_KEYS_ROOT_PATH from wheat.util.file_keyring import FileKeyring from wheat.util.misc import prompt_yes_no from keyrings.cryptfile.cryptfile import CryptFileKeyring # pyright: reportMissingImports=false +from keyring.backends.macOS import Keyring as MacKeyring +from keyring.errors import KeyringError from pathlib import Path from sys import exit, platform -from typing import Any, Optional, Tuple, Union +from typing import Any, List, Optional, Tuple, Type, Union # We want to protect the keyring, even if a user-specified master passphrase isn't provided +# +# WARNING: Changing the default passphrase will prevent passphrase-less users from accessing +# their existing keys. Using a new default passphrase requires migrating existing users to +# the new passphrase. DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE = "$ wheat passphrase set # all the cool kids are doing it!" +MAC_KEYCHAIN_MASTER_PASSPHRASE_SERVICE = "Wheat Passphrase" +MAC_KEYCHAIN_MASTER_PASSPHRASE_USER = "Wheat Passphrase" + + +def check_macos_keychain_keys_present(mac_keychain: MacKeyring) -> bool: + from keyring.credentials import SimpleCredential + from wheat.util.keychain import default_keychain_user, default_keychain_service, get_private_key_user, MAX_KEYS + + keychain_user: str = default_keychain_user() + keychain_service: str = default_keychain_service() + + for index in range(0, MAX_KEYS): + current_user: str = get_private_key_user(keychain_user, index) + credential: Optional[SimpleCredential] = mac_keychain.get_credential(keychain_service, current_user) + if credential is not None: + return True + return False + + +def warn_if_macos_errSecInteractionNotAllowed(error: KeyringError): + """ + Check if the macOS Keychain error is errSecInteractionNotAllowed. This commonly + occurs when the keychain is accessed while headless (such as remoting into a Mac + via SSH). Because macOS Keychain operations may require prompting for login creds, + a connection to the WindowServer is required. + """ + + if "-25308" in str(error): + print( + "WARNING: Unable to access the macOS Keychain (-25308 errSecInteractionNotAllowed). " + "Are you logged-in remotely?" + ) + class KeyringWrapper: """ @@ -31,8 +72,8 @@ class KeyringWrapper: # Instance members keys_root_path: Path keyring: Union[Any, FileKeyring] = None - cached_passphase: Optional[str] = DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE - cached_passphase_is_validated: bool = False + cached_passphrase: Optional[str] = None + cached_passphrase_is_validated: bool = False legacy_keyring = None def __init__(self, keys_root_path: Path = DEFAULT_KEYS_ROOT_PATH): @@ -42,11 +83,18 @@ def __init__(self, keys_root_path: Path = DEFAULT_KEYS_ROOT_PATH): the data from the legacy CryptFileKeyring (on write). """ self.keys_root_path = keys_root_path + self.refresh_keyrings() + + def refresh_keyrings(self): + self.keyring = None self.keyring = self._configure_backend() # Configure the legacy keyring if keyring passphrases are supported to support migration (if necessary) self.legacy_keyring = self._configure_legacy_backend() + # Initialize the cached_passphrase + self.cached_passphrase = self._get_initial_cached_passphrase() + def _configure_backend(self) -> Union[Any, FileKeyring]: from wheat.util.keychain import supports_keyring_passphrase @@ -57,18 +105,20 @@ def _configure_backend(self) -> Union[Any, FileKeyring]: import keyring.backends.Windows keyring.set_keyring(keyring.backends.Windows.WinVaultKeyring()) - elif platform == "darwin": - import keyring.backends.macOS - - keyring.set_keyring(keyring.backends.macOS.Keyring()) - # TODO: New keyring + passphrase support can be enabled for macOS by updating + # TODO: New keyring + passphrase support can be enabled for Windows by updating # supports_keyring_passphrase() and uncommenting the lines below. Leaving the # lines below in place for testing. # # if supports_keyring_passphrase(): # keyring = FileKeyring(keys_root_path=self.keys_root_path) # type: ignore # else: - # keyring.set_keyring(keyring.backends.macOS.Keyring()) + # keyring.set_keyring(keyring.backends.Windows.WinVaultKeyring()) + elif platform == "darwin": + if supports_keyring_passphrase(): + keyring = FileKeyring(keys_root_path=self.keys_root_path) # type: ignore + else: + keyring = MacKeyring() # type: ignore + keyring_main.set_keyring(keyring) elif platform == "linux": if supports_keyring_passphrase(): keyring = FileKeyring(keys_root_path=self.keys_root_path) # type: ignore @@ -80,18 +130,36 @@ def _configure_backend(self) -> Union[Any, FileKeyring]: return keyring - def _configure_legacy_backend(self) -> CryptFileKeyring: - # If keyring.yaml isn't found or is empty, check if we're using CryptFileKeyring + def _configure_legacy_backend(self) -> Union[CryptFileKeyring, MacKeyring]: + # If keyring.yaml isn't found or is empty, check if we're using CryptFileKeyring or the Mac Keychain filekeyring = self.keyring if type(self.keyring) == FileKeyring else None if filekeyring and not filekeyring.has_content(): - old_keyring = CryptFileKeyring() - if Path(old_keyring.file_path).is_file(): - # After migrating content from legacy_keyring, we'll prompt to clear those keys - old_keyring.keyring_key = "your keyring password" # type: ignore - return old_keyring + if platform == "linux": + old_keyring = CryptFileKeyring() + if Path(old_keyring.file_path).is_file(): + # After migrating content from legacy_keyring, we'll prompt to clear those keys + old_keyring.keyring_key = "your keyring password" # type: ignore + return old_keyring + elif platform == "darwin": + mac_keychain: MacKeyring = MacKeyring() + if check_macos_keychain_keys_present(mac_keychain): + return mac_keychain return None + def _get_initial_cached_passphrase(self) -> str: + from wheat.util.keychain import supports_os_passphrase_storage + + passphrase: Optional[str] = None + + if supports_os_passphrase_storage(): + passphrase = self.get_master_passphrase_from_credential_store() + + if passphrase is None: + passphrase = DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE + + return passphrase + @staticmethod def set_keys_root_path(keys_root_path: Path): """ @@ -129,15 +197,15 @@ def get_cached_master_passphrase(self) -> Tuple[Optional[str], bool]: Returns a tuple including the currently cached passphrase and a bool indicating whether the passphrase has been previously validated. """ - return self.cached_passphase, self.cached_passphase_is_validated + return self.cached_passphrase, self.cached_passphrase_is_validated def set_cached_master_passphrase(self, passphrase: Optional[str], validated=False) -> None: """ Cache the provided passphrase and optionally indicate whether the passphrase has been validated. """ - self.cached_passphase = passphrase - self.cached_passphase_is_validated = validated + self.cached_passphrase = passphrase + self.cached_passphrase_is_validated = validated def has_cached_master_passphrase(self) -> bool: passphrase = self.get_cached_master_passphrase() @@ -157,14 +225,20 @@ def set_master_passphrase( self, current_passphrase: Optional[str], new_passphrase: str, + *, write_to_keyring: bool = True, allow_migration: bool = True, + save_passphrase: bool = False, ) -> None: """ Sets a new master passphrase for the keyring """ - from wheat.util.keychain import KeyringCurrentPassphaseIsInvalid, KeyringRequiresMigration + from wheat.util.keychain import ( + KeyringCurrentPassphraseIsInvalid, + KeyringRequiresMigration, + supports_os_passphrase_storage, + ) # Require a valid current_passphrase if ( @@ -172,7 +246,7 @@ def set_master_passphrase( and current_passphrase is not None and not self.master_passphrase_is_valid(current_passphrase) ): - raise KeyringCurrentPassphaseIsInvalid("invalid current passphrase") + raise KeyringCurrentPassphraseIsInvalid("invalid current passphrase") self.set_cached_master_passphrase(new_passphrase, validated=True) @@ -182,13 +256,19 @@ def set_master_passphrase( if not allow_migration: raise KeyringRequiresMigration("keyring requires migration") - self.migrate_legacy_keyring() + self.migrate_legacy_keyring_interactive() else: # We're reencrypting the keyring contents using the new passphrase. Ensure that the # payload has been decrypted by calling load_keyring with the current passphrase. self.keyring.load_keyring(passphrase=current_passphrase) self.keyring.write_keyring(fresh_salt=True) # Create a new salt since we're changing the passphrase + if supports_os_passphrase_storage(): + if save_passphrase: + self.save_master_passphrase_to_credential_store(new_passphrase) + else: + self.remove_master_passphrase_from_credential_store() + def remove_master_passphrase(self, current_passphrase: Optional[str]) -> None: """ Remove the user-specific master passphrase. We still keep the keyring contents encrypted @@ -196,8 +276,54 @@ def remove_master_passphrase(self, current_passphrase: Optional[str]) -> None: """ self.set_master_passphrase(current_passphrase, DEFAULT_PASSPHRASE_IF_NO_MASTER_PASSPHRASE) + def save_master_passphrase_to_credential_store(self, passphrase: str) -> None: + if platform == "darwin": + mac_keychain = MacKeyring() + try: + mac_keychain.set_password( + MAC_KEYCHAIN_MASTER_PASSPHRASE_SERVICE, MAC_KEYCHAIN_MASTER_PASSPHRASE_USER, passphrase + ) + except KeyringError as e: + warn_if_macos_errSecInteractionNotAllowed(e) + return None + + def remove_master_passphrase_from_credential_store(self) -> None: + if platform == "darwin": + mac_keychain = MacKeyring() + try: + mac_keychain.delete_password( + MAC_KEYCHAIN_MASTER_PASSPHRASE_SERVICE, MAC_KEYCHAIN_MASTER_PASSPHRASE_USER + ) + except KeyringError as e: + warn_if_macos_errSecInteractionNotAllowed(e) + return None + + def get_master_passphrase_from_credential_store(self) -> Optional[str]: + if platform == "darwin": + mac_keychain = MacKeyring() + try: + return mac_keychain.get_password( + MAC_KEYCHAIN_MASTER_PASSPHRASE_SERVICE, MAC_KEYCHAIN_MASTER_PASSPHRASE_USER + ) + except KeyringError as e: + warn_if_macos_errSecInteractionNotAllowed(e) + return None + # Legacy keyring migration + class MigrationResults: + def __init__( + self, + original_private_keys: List[Tuple[PrivateKey, bytes]], + legacy_keyring: Any, + keychain_service: str, + keychain_users: List[str], + ): + self.original_private_keys = original_private_keys + self.legacy_keyring = legacy_keyring + self.keychain_service = keychain_service + self.keychain_users = keychain_users + def confirm_migration(self) -> bool: """ Before beginning migration, we'll notify the user that the legacy keyring needs to be @@ -222,9 +348,12 @@ def confirm_migration(self) -> bool: from wheat.cmds.passphrase_funcs import prompt_for_new_passphrase # Prompt for a master passphrase and cache it - new_passphrase = prompt_for_new_passphrase() + new_passphrase, save_passphrase = prompt_for_new_passphrase() self.set_master_passphrase( - current_passphrase=None, new_passphrase=new_passphrase, write_to_keyring=False + current_passphrase=None, + new_passphrase=new_passphrase, + write_to_keyring=False, + save_passphrase=save_passphrase, ) else: print( @@ -244,34 +373,19 @@ def confirm_migration(self) -> bool: return prompt_yes_no("Begin keyring migration? (y/n) ") - def migrate_legacy_keyring(self): - """ - Handle importing keys from the legacy keyring into the new keyring. - - Prior to beginning, we'll ensure that we at least suggest setting a master passphrase - and backing up mnemonic seeds. After importing keys from the legacy keyring, we'll - perform a before/after comparison of the keyring contents, and on success we'll prompt - to cleanup the legacy keyring. - """ - - from wheat.util.keychain import Keychain, MAX_KEYS - - # Make sure the user is ready to begin migration. We want to ensure that - response = self.confirm_migration() - if not response: - print("Skipping migration. Unable to proceed") - exit(0) + def migrate_legacy_keys(self) -> MigrationResults: + from wheat.util.keychain import get_private_key_user, Keychain, MAX_KEYS print("Migrating contents from legacy keyring") - keychain = Keychain() + keychain: Keychain = Keychain() # Obtain contents from the legacy keyring. When using the Keychain interface # to read, the legacy keyring will be preferred over the new keyring. original_private_keys = keychain.get_all_private_keys() - service = keychain._get_service() + service = keychain.service user_passphrase_pairs = [] index = 0 - user = keychain._get_private_key_user(index) + user = get_private_key_user(keychain.user, index) while index <= MAX_KEYS: # Build up a list of user/passphrase tuples from the legacy keyring contents if user is not None: @@ -281,51 +395,122 @@ def migrate_legacy_keyring(self): user_passphrase_pairs.append((user, passphrase)) index += 1 - user = keychain._get_private_key_user(index) + user = get_private_key_user(keychain.user, index) # Write the keys directly to the new keyring (self.keyring) for (user, passphrase) in user_passphrase_pairs: self.keyring.set_password(service, user, passphrase) + return KeyringWrapper.MigrationResults( + original_private_keys, self.legacy_keyring, service, [user for (user, _) in user_passphrase_pairs] + ) + + def verify_migration_results(self, migration_results: MigrationResults) -> bool: + from wheat.util.keychain import Keychain + # Stop using the legacy keyring. This will direct subsequent reads to the new keyring. - old_keyring = self.legacy_keyring self.legacy_keyring = None + success: bool = False print("Verifying migration results...", end="") # Compare the original keyring contents with the new try: + keychain: Keychain = Keychain() + original_private_keys = migration_results.original_private_keys post_migration_private_keys = keychain.get_all_private_keys() + # Sort the key collections prior to comparing + original_private_keys.sort(key=lambda e: str(e[0])) + post_migration_private_keys.sort(key=lambda e: str(e[0])) + if post_migration_private_keys == original_private_keys: + success = True print(" Verified") + else: + print(" Failed") + raise ValueError("Migrated keys don't match original keys") except Exception as e: print(f"\nMigration failed: {e}") print("Leaving legacy keyring intact") - exit(1) + self.legacy_keyring = migration_results.legacy_keyring # Restore the legacy keyring + raise e - print(f"Keyring migration completed successfully ({str(self.keyring.keyring_path)})\n") + return success - # Ask if we should clean up the legacy keyring - self.confirm_legacy_keyring_cleanup(old_keyring, service, [user for (user, _) in user_passphrase_pairs]) + def confirm_legacy_keyring_cleanup(self, migration_results) -> bool: + """ + Ask the user whether we should remove keys from the legacy keyring. In the case + of CryptFileKeyring, we can't just delete the file because other python processes + might use the same keyring file. + """ + keyring_name: str = "" + legacy_keyring_type: Type = type(migration_results.legacy_keyring) + + if legacy_keyring_type is CryptFileKeyring: + keyring_name = str(migration_results.legacy_keyring.file_path) + elif legacy_keyring_type is MacKeyring: + keyring_name = "macOS Keychain" + # leaving this here for when Windows migration is supported + # elif legacy_keyring_type is Win32Keyring: + # keyring_name = "Windows Credential Manager" + + prompt = "Remove keys from old keyring" + if len(keyring_name) > 0: + prompt += f" ({keyring_name})?" + else: + prompt += "?" + prompt += " (y/n) " + return prompt_yes_no(prompt) + + def cleanup_legacy_keyring(self, migration_results: MigrationResults): + for user in migration_results.keychain_users: + migration_results.legacy_keyring.delete_password(migration_results.keychain_service, user) - def confirm_legacy_keyring_cleanup(self, legacy_keyring, service, users): + def migrate_legacy_keyring(self, cleanup_legacy_keyring: bool = False): + results = self.migrate_legacy_keys() + success = self.verify_migration_results(results) + + if success and cleanup_legacy_keyring: + self.cleanup_legacy_keyring(results) + + def migrate_legacy_keyring_interactive(self): """ - Ask the user whether we should remove keys from the legacy keyring. We can't just - delete the file because other python processes might use the same keyring file. + Handle importing keys from the legacy keyring into the new keyring. + + Prior to beginning, we'll ensure that we at least suggest setting a master passphrase + and backing up mnemonic seeds. After importing keys from the legacy keyring, we'll + perform a before/after comparison of the keyring contents, and on success we'll prompt + to cleanup the legacy keyring. """ + from wheat.cmds.passphrase_funcs import async_update_daemon_migration_completed_if_running + + # Make sure the user is ready to begin migration. + response = self.confirm_migration() + if not response: + print("Skipping migration. Unable to proceed") + exit(0) + + try: + results = self.migrate_legacy_keys() + success = self.verify_migration_results(results) - response = prompt_yes_no(f"Remove keys from old keyring ({str(legacy_keyring.file_path)})? (y/n) ") + if success: + print(f"Keyring migration completed successfully ({str(self.keyring.keyring_path)})\n") + except Exception as e: + print(f"\nMigration failed: {e}") + print("Leaving legacy keyring intact") + exit(1) - if response: - for user in users: - legacy_keyring.delete_password(service, user) + # Ask if we should clean up the legacy keyring + if self.confirm_legacy_keyring_cleanup(results): + self.cleanup_legacy_keyring(results) print("Removed keys from old keyring") else: print("Keys in old keyring left intact") - # TODO: CryptFileKeyring doesn't cleanup section headers - # [wheat_2Duser_2Dwheat_2D1_2E8] is left behind + # Notify the daemon (if running) that migration has completed + asyncio.get_event_loop().run_until_complete(async_update_daemon_migration_completed_if_running()) # Keyring interface @@ -340,13 +525,13 @@ def get_passphrase(self, service: str, user: str) -> str: def set_passphrase(self, service: str, user: str, passphrase: str): # On the first write while using the legacy keyring, we'll start migration if self.using_legacy_keyring() and self.has_cached_master_passphrase(): - self.migrate_legacy_keyring() + self.migrate_legacy_keyring_interactive() self.get_keyring().set_password(service, user, passphrase) def delete_passphrase(self, service: str, user: str): # On the first write while using the legacy keyring, we'll start migration if self.using_legacy_keyring() and self.has_cached_master_passphrase(): - self.migrate_legacy_keyring() + self.migrate_legacy_keyring_interactive() self.get_keyring().delete_password(service, user) diff --git a/wheat/util/significant_bits.py b/wheat/util/significant_bits.py index b24b88b..25547e6 100644 --- a/wheat/util/significant_bits.py +++ b/wheat/util/significant_bits.py @@ -6,7 +6,7 @@ def truncate_to_significant_bits(input_x: int, num_significant_bits: int) -> int """ x = abs(input_x) if num_significant_bits > x.bit_length(): - return x + return input_x lower = x.bit_length() - num_significant_bits mask = (1 << (x.bit_length())) - 1 - ((1 << lower) - 1) if input_x < 0: diff --git a/wheat/util/ssl.py b/wheat/util/ssl.py index bd8c33d..666f9eb 100644 --- a/wheat/util/ssl.py +++ b/wheat/util/ssl.py @@ -161,7 +161,7 @@ def check_and_fix_permissions_for_ssl_file(file: Path, mask: int, updated_mode: if not good_perms: valid = False print( - f"Attempting to set permissions {octal_mode_string(mode)} on " + f"Attempting to set permissions {octal_mode_string(updated_mode)} on " f"{file}" # lgtm [py/clear-text-logging-sensitive-data] ) os.chmod(str(file), updated_mode) diff --git a/wheat/util/streamable.py b/wheat/util/streamable.py index e9c87ae..ec41698 100644 --- a/wheat/util/streamable.py +++ b/wheat/util/streamable.py @@ -11,7 +11,6 @@ from blspy import G1Element, G2Element, PrivateKey -from wheat.types.blockchain_format.program import Program, SerializedProgram from wheat.types.blockchain_format.sized_bytes import bytes32 from wheat.util.byte_types import hexstr_to_bytes from wheat.util.hash import std_hash @@ -39,11 +38,11 @@ def get_args(t: Type[Any]) -> Tuple[Any, ...]: "ConditionOpcode": 1, } unhashable_types = [ - PrivateKey, - G1Element, - G2Element, - Program, - SerializedProgram, + "PrivateKey", + "G1Element", + "G2Element", + "Program", + "SerializedProgram", ] # JSON does not support big ints, so these types must be serialized differently in JSON big_ints = [uint64, int64, uint128, int512] @@ -77,7 +76,7 @@ def dataclass_from_dict(klass, d): elif issubclass(klass, bytes): # Type is bytes, data is a hex string return klass(hexstr_to_bytes(d)) - elif klass in unhashable_types: + elif klass.__name__ in unhashable_types: # Type is unhashable (bls type), so cast from hex string return klass.from_bytes(hexstr_to_bytes(d)) else: @@ -93,7 +92,7 @@ def recurse_jsonify(d): if isinstance(d, list) or isinstance(d, tuple): new_list = [] for item in d: - if type(item) in unhashable_types or issubclass(type(item), bytes): + if type(item).__name__ in unhashable_types or issubclass(type(item), bytes): item = f"0x{bytes(item).hex()}" if isinstance(item, dict): item = recurse_jsonify(item) @@ -110,7 +109,7 @@ def recurse_jsonify(d): else: for key, value in d.items(): - if type(value) in unhashable_types or issubclass(type(value), bytes): + if type(value).__name__ in unhashable_types or issubclass(type(value), bytes): d[key] = f"0x{bytes(value).hex()}" if isinstance(value, dict): d[key] = recurse_jsonify(value) diff --git a/wheat/wallet/cc_wallet/cc_wallet.py b/wheat/wallet/cc_wallet/cc_wallet.py index a6af69f..e62530d 100644 --- a/wheat/wallet/cc_wallet/cc_wallet.py +++ b/wheat/wallet/cc_wallet/cc_wallet.py @@ -74,7 +74,10 @@ async def create_new_cc( self.base_inner_puzzle_hash = None self.standard_wallet = wallet self.log = logging.getLogger(__name__) - + std_wallet_id = self.standard_wallet.wallet_id + bal = await wallet_state_manager.get_confirmed_balance_for_wallet(std_wallet_id, None) + if amount > bal: + raise ValueError("Not enough balance") self.wallet_state_manager = wallet_state_manager self.cc_info = CCInfo(None, []) @@ -90,6 +93,9 @@ async def create_new_cc( except Exception: await wallet_state_manager.user_store.delete_wallet(self.id()) raise + if spend_bundle is None: + await wallet_state_manager.user_store.delete_wallet(self.id()) + raise ValueError("Failed to create spend.") await self.wallet_state_manager.add_new_wallet(self, self.id()) @@ -250,7 +256,6 @@ async def get_max_send_amount(self, records=None): self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM, cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE, safe_mode=True, - rust_checker=True, ) cost_result: uint64 = calculate_cost_of_program( program.program, result, self.wallet_state_manager.constants.COST_PER_BYTE diff --git a/wheat/wallet/derive_keys.py b/wheat/wallet/derive_keys.py index 49f220f..8fb6def 100644 --- a/wheat/wallet/derive_keys.py +++ b/wheat/wallet/derive_keys.py @@ -7,7 +7,7 @@ # EIP 2334 bls key derivation # https://eips.ethereum.org/EIPS/eip-2334 # 12381 = bls spec number -# 8444 = Wheat blockchain number and port number +# 21333 = Wheat blockchain number and port number # 0, 1, 2, 3, 4, 5, 6 farmer, pool, wallet, local, backup key, singleton, pooling authentication key numbers diff --git a/wheat/wallet/did_wallet/did_info.py b/wheat/wallet/did_wallet/did_info.py index 482863b..62c1fb6 100644 --- a/wheat/wallet/did_wallet/did_info.py +++ b/wheat/wallet/did_wallet/did_info.py @@ -12,7 +12,7 @@ @dataclass(frozen=True) @streamable class DIDInfo(Streamable): - origin_coin: Optional[Coin] # puzzlehash of this coin is our DID + origin_coin: Optional[Coin] # Coin ID of this coin is our DID backup_ids: List[bytes] num_of_backup_ids_needed: uint64 parent_info: List[Tuple[bytes32, Optional[LineageProof]]] # {coin.name(): LineageProof} @@ -20,3 +20,4 @@ class DIDInfo(Streamable): temp_coin: Optional[Coin] # partially recovered wallet uses these to hold info temp_puzhash: Optional[bytes32] temp_pubkey: Optional[bytes] + sent_recovery_transaction: bool diff --git a/wheat/wallet/did_wallet/did_wallet.py b/wheat/wallet/did_wallet/did_wallet.py index d2edaca..2b45f3b 100644 --- a/wheat/wallet/did_wallet/did_wallet.py +++ b/wheat/wallet/did_wallet/did_wallet.py @@ -44,7 +44,7 @@ class DIDWallet: async def create_new_did_wallet( wallet_state_manager: Any, wallet: Wallet, - amount: int, + amount: uint64, backups_ids: List = [], num_of_backup_ids_needed: uint64 = None, name: str = None, @@ -57,7 +57,10 @@ async def create_new_did_wallet( self.base_inner_puzzle_hash = None self.standard_wallet = wallet self.log = logging.getLogger(name if name else __name__) - + std_wallet_id = self.standard_wallet.wallet_id + bal = await wallet_state_manager.get_confirmed_balance_for_wallet_already_locked(std_wallet_id) + if amount > bal: + raise ValueError("Not enough balance") if amount & 1 == 0: raise ValueError("DID amount must be odd number") self.wallet_state_manager = wallet_state_manager @@ -65,7 +68,7 @@ async def create_new_did_wallet( num_of_backup_ids_needed = uint64(len(backups_ids)) if num_of_backup_ids_needed > len(backups_ids): raise ValueError("Cannot require more IDs than are known.") - self.did_info = DIDInfo(None, backups_ids, num_of_backup_ids_needed, [], None, None, None, None) + self.did_info = DIDInfo(None, backups_ids, num_of_backup_ids_needed, [], None, None, None, None, False) info_as_string = json.dumps(self.did_info.to_json_dict()) self.wallet_info = await wallet_state_manager.user_store.create_wallet( "DID Wallet", WalletType.DISTRIBUTED_ID.value, info_as_string @@ -73,15 +76,23 @@ async def create_new_did_wallet( if self.wallet_info is None: raise ValueError("Internal Error") self.wallet_id = self.wallet_info.id - bal = await self.wallet_state_manager.get_confirmed_balance_for_wallet(self.standard_wallet.id()) + std_wallet_id = self.standard_wallet.wallet_id + bal = await wallet_state_manager.get_confirmed_balance_for_wallet_already_locked(std_wallet_id) if amount > bal: raise ValueError("Not enough balance") - spend_bundle = await self.generate_new_decentralised_id(uint64(amount)) + try: + spend_bundle = await self.generate_new_decentralised_id(uint64(amount)) + except Exception: + await wallet_state_manager.user_store.delete_wallet(self.id(), False) + raise + if spend_bundle is None: - raise ValueError("failed to generate ID for wallet") + await wallet_state_manager.user_store.delete_wallet(self.id(), False) + raise ValueError("Failed to create spend.") await self.wallet_state_manager.add_new_wallet(self, self.wallet_info.id) assert self.did_info.origin_coin is not None + assert self.did_info.current_inner is not None did_puzzle_hash = did_wallet_puzzles.create_fullpuz( self.did_info.current_inner, self.did_info.origin_coin.name() ).get_tree_hash() @@ -137,7 +148,7 @@ async def create_new_did_wallet_from_recovery( self.standard_wallet = wallet self.log = logging.getLogger(name if name else __name__) self.wallet_state_manager = wallet_state_manager - self.did_info = DIDInfo(None, [], uint64(0), [], None, None, None, None) + self.did_info = DIDInfo(None, [], uint64(0), [], None, None, None, None, False) info_as_string = json.dumps(self.did_info.to_json_dict()) self.wallet_info = await wallet_state_manager.user_store.create_wallet( "DID Wallet", WalletType.DISTRIBUTED_ID.value, info_as_string @@ -212,22 +223,7 @@ async def get_pending_change_balance(self) -> uint64: async def get_unconfirmed_balance(self, record_list=None) -> uint64: confirmed = await self.get_confirmed_balance(record_list) - unconfirmed_tx: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet( - self.wallet_info.id - ) - addition_amount = 0 - removal_amount = 0 - - for record in unconfirmed_tx: - if record.type == TransactionType.INCOMING_TX: - addition_amount += record.amount - else: - removal_amount += record.amount - - result = confirmed - removal_amount + addition_amount - - self.log.info(f"Unconfirmed balance for did wallet is {result}") - return uint64(result) + return await self.wallet_state_manager._get_unconfirmed_balance(self.id(), confirmed) async def select_coins(self, amount, exclude: List[Coin] = None) -> Optional[Set[Coin]]: """Returns a set of coins that can be used for generating a new transaction.""" @@ -279,6 +275,8 @@ async def coin_added(self, coin: Coin, _: uint32): """Notification from wallet state manager that wallet has been received.""" self.log.info("DID wallet has been notified that coin was added") inner_puzzle = await self.inner_puzzle_for_did_puzzle(coin.puzzle_hash) + if self.did_info.temp_coin is not None: + self.wallet_state_manager.state_changed("did_coin_added", self.wallet_info.id) new_info = DIDInfo( self.did_info.origin_coin, self.did_info.backup_ids, @@ -288,6 +286,7 @@ async def coin_added(self, coin: Coin, _: uint32): None, None, None, + False, ) await self.save_info(new_info, True) @@ -331,8 +330,8 @@ async def load_backup(self, filename: str): num_of_backup_ids_needed = uint64(int(details[5])) if num_of_backup_ids_needed > len(backup_ids): raise Exception - innerpuz = Program.from_bytes(bytes.fromhex(details[4])) - did_info = DIDInfo( + innerpuz: Program = Program.from_bytes(bytes.fromhex(details[4])) + did_info: DIDInfo = DIDInfo( origin, backup_ids, num_of_backup_ids_needed, @@ -341,6 +340,7 @@ async def load_backup(self, filename: str): None, None, None, + False, ) await self.save_info(did_info, False) await self.wallet_state_manager.update_wallet_puzzle_hashes(self.wallet_info.id) @@ -366,7 +366,7 @@ async def load_backup(self, filename: str): assert additions is not None assert isinstance(additions, RespondAdditions) # All additions in this block here: - new_puzhash = (await self.get_new_puzzle()).get_tree_hash() + new_puzhash = await self.get_new_inner_hash() new_pubkey = bytes( (await self.wallet_state_manager.get_unused_derivation_record(self.wallet_info.id)).pubkey ) @@ -376,6 +376,7 @@ async def load_backup(self, filename: str): puzzle_hash, coins = puzzle_list_coin for coin in coins: all_parents.add(coin.parent_coin_info) + parent_info = None for puzzle_list_coin in additions.coins: puzzle_hash, coins = puzzle_list_coin if puzzle_hash == full_puzzle_hash: @@ -387,20 +388,40 @@ async def load_backup(self, filename: str): coin.amount, ) await self.add_parent(coin.name(), future_parent, False) - if coin.name() in all_parents: - continue - did_info = DIDInfo( - origin, - backup_ids, - num_of_backup_ids_needed, - self.did_info.parent_info, - innerpuz, - coin, - new_puzhash, - new_pubkey, - ) - await self.save_info(did_info, False) - + if coin.name() not in all_parents: + did_info = DIDInfo( + origin, + backup_ids, + num_of_backup_ids_needed, + self.did_info.parent_info, + innerpuz, + coin, + new_puzhash, + new_pubkey, + False, + ) + await self.save_info(did_info, False) + removal_request = wallet_protocol.RequestRemovals(sub_height, header_hash, None) + removals_response = await node.request_removals(removal_request) + for coin_tuple in removals_response.coins: + if coin_tuple[0] == coin.parent_coin_info: + puzzle_solution_request = wallet_protocol.RequestPuzzleSolution( + coin.parent_coin_info, sub_height + ) + response = await node.request_puzzle_solution(puzzle_solution_request) + req_puz_sol = response.response + assert req_puz_sol.puzzle is not None + parent_innerpuz = did_wallet_puzzles.get_innerpuzzle_from_puzzle(req_puz_sol.puzzle) + assert parent_innerpuz is not None + parent_info = LineageProof( + coin_tuple[1].parent_coin_info, + parent_innerpuz.get_tree_hash(), + coin_tuple[1].amount, + ) + await self.add_parent(coin.parent_coin_info, parent_info, False) + break + + assert parent_info is not None return None except Exception as e: raise e @@ -491,7 +512,7 @@ async def create_update_spend(self): return spend_bundle # The message spend can send messages and also change your innerpuz - async def create_message_spend(self, messages: List[bytes], new_innerpuzhash: Optional[bytes32] = None): + async def create_message_spend(self, messages: List[Tuple[int, bytes]], new_innerpuzhash: Optional[bytes32] = None): assert self.did_info.current_inner is not None assert self.did_info.origin_coin is not None coins = await self.select_coins(1) @@ -638,7 +659,8 @@ async def create_attestment( innermessage = message.get_tree_hash() innerpuz: Program = self.did_info.current_inner # innerpuz solution is (mode, amount, message, new_inner_puzhash) - innersol = Program.to([1, coin.amount, [innermessage], innerpuz.get_tree_hash()]) + messages = [(0, innermessage)] + innersol = Program.to([1, coin.amount, messages, innerpuz.get_tree_hash()]) # full solution is (corehash parent_info my_amount innerpuz_reveal solution) full_puzzle: Program = did_wallet_puzzles.create_fullpuz( @@ -663,7 +685,7 @@ async def create_attestment( message_spend = did_wallet_puzzles.create_spend_for_message(coin.name(), recovering_coin_name, newpuz, pubkey) message_spend_bundle = SpendBundle([message_spend], AugSchemeMPL.aggregate([])) # sign for AGG_SIG_ME - to_sign = Program.to([innerpuz.get_tree_hash(), coin.amount, [innermessage]]).get_tree_hash() + to_sign = Program.to([innerpuz.get_tree_hash(), coin.amount, messages]).get_tree_hash() message = to_sign + coin.name() + self.wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA pubkey = did_wallet_puzzles.get_pubkey_from_innerpuz(innerpuz) index = await self.wallet_state_manager.puzzle_store.index_for_pubkey(pubkey) @@ -755,15 +777,15 @@ async def load_attest_files_for_recovery_spend(self, filenames): async def recovery_spend( self, coin: Coin, - puzhash: bytes, + puzhash: bytes32, parent_innerpuzhash_amounts_for_recovery_ids: List[Tuple[bytes, bytes, int]], pubkey: G1Element, spend_bundle: SpendBundle, ) -> SpendBundle: assert self.did_info.origin_coin is not None - # innersol is (mode amount new_puz my_puzhash parent_innerpuzhash_amounts_for_recovery_ids pubkey recovery_list_reveal) # noqa - innersol = Program.to( + # innersol is mode new_amount message new_inner_puzhash parent_innerpuzhash_amounts_for_recovery_ids pubkey recovery_list_reveal) # noqa + innersol: Program = Program.to( [ 2, coin.amount, @@ -775,7 +797,8 @@ async def recovery_spend( ] ) # full solution is (parent_info my_amount solution) - innerpuz = self.did_info.current_inner + assert self.did_info.current_inner is not None + innerpuz: Program = self.did_info.current_inner full_puzzle: Program = did_wallet_puzzles.create_fullpuz( innerpuz, self.did_info.origin_coin.name(), @@ -828,6 +851,18 @@ async def recovery_spend( name=token_bytes(), ) await self.standard_wallet.push_transaction(did_record) + new_did_info = DIDInfo( + self.did_info.origin_coin, + self.did_info.backup_ids, + self.did_info.num_of_backup_ids_needed, + self.did_info.parent_info, + self.did_info.current_inner, + self.did_info.temp_coin, + self.did_info.temp_puzhash, + self.did_info.temp_pubkey, + True, + ) + await self.save_info(new_did_info, True) return spend_bundle async def get_new_innerpuz(self) -> Program: @@ -929,6 +964,7 @@ async def generate_new_decentralised_id(self, amount: uint64) -> Optional[SpendB None, None, None, + False, ) await self.save_info(did_info, False) eve_spend = await self.generate_eve_spend(eve_coin, did_full_puz, did_inner) @@ -939,7 +975,7 @@ async def generate_eve_spend(self, coin: Coin, full_puzzle: Program, innerpuz: P assert self.did_info.origin_coin is not None # innerpuz solution is (mode amount message new_puzhash) innersol = Program.to([1, coin.amount, [], innerpuz.get_tree_hash()]) - # full solution is (parent_info my_amount innersolution) + # full solution is (lineage_proof my_amount inner_solution) fullsol = Program.to( [ [self.did_info.origin_coin.parent_coin_info, self.did_info.origin_coin.amount], @@ -990,6 +1026,7 @@ async def add_parent(self, name: bytes32, parent: Optional[LineageProof], in_tra self.did_info.temp_coin, self.did_info.temp_puzhash, self.did_info.temp_pubkey, + self.did_info.sent_recovery_transaction, ) await self.save_info(did_info, in_transaction) @@ -1005,6 +1042,7 @@ async def update_recovery_list(self, recover_list: List[bytes], num_of_backup_id self.did_info.temp_coin, self.did_info.temp_puzhash, self.did_info.temp_pubkey, + self.did_info.sent_recovery_transaction, ) await self.save_info(did_info, False) await self.wallet_state_manager.update_wallet_puzzle_hashes(self.wallet_info.id) diff --git a/wheat/wallet/did_wallet/did_wallet_puzzles.py b/wheat/wallet/did_wallet/did_wallet_puzzles.py index 52875d2..dc12060 100644 --- a/wheat/wallet/did_wallet/did_wallet_puzzles.py +++ b/wheat/wallet/did_wallet/did_wallet_puzzles.py @@ -22,9 +22,11 @@ def create_innerpuz(pubkey: bytes, identities: List[bytes], num_of_backup_ids_ne return DID_INNERPUZ_MOD.curry(pubkey, backup_ids_hash, num_of_backup_ids_needed) -def create_fullpuz(innerpuz, genesis_id) -> Program: +def create_fullpuz(innerpuz: Program, genesis_id: bytes32) -> Program: mod_hash = SINGLETON_TOP_LAYER_MOD.get_tree_hash() - return SINGLETON_TOP_LAYER_MOD.curry(mod_hash, genesis_id, LAUNCHER_PUZZLE.get_tree_hash(), innerpuz) + # singleton_struct = (MOD_HASH . (LAUNCHER_ID . LAUNCHER_PUZZLE_HASH)) + singleton_struct = Program.to((mod_hash, (genesis_id, LAUNCHER_PUZZLE.get_tree_hash()))) + return SINGLETON_TOP_LAYER_MOD.curry(singleton_struct, innerpuz) def get_pubkey_from_innerpuz(innerpuz: Program) -> G1Element: @@ -71,8 +73,8 @@ def get_innerpuzzle_from_puzzle(puzzle: Program) -> Optional[Program]: inner_f, args = r if not is_did_core(inner_f): return None - mod_hash, genesis_id, inner_puzzle = list(args.as_iter()) - return inner_puzzle + SINGLETON_STRUCT, INNER_PUZZLE = list(args.as_iter()) + return INNER_PUZZLE def create_recovery_message_puzzle(recovering_coin_id: bytes32, newpuz: bytes32, pubkey: G1Element): diff --git a/wheat/wallet/key_val_store.py b/wheat/wallet/key_val_store.py index 47711d3..e739659 100644 --- a/wheat/wallet/key_val_store.py +++ b/wheat/wallet/key_val_store.py @@ -20,9 +20,6 @@ async def create(cls, db_wrapper: DBWrapper): self = cls() self.db_wrapper = db_wrapper self.db_connection = db_wrapper.db - await self.db_connection.execute("pragma journal_mode=wal") - await self.db_connection.execute("pragma synchronous=2") - await self.db_connection.execute( ("CREATE TABLE IF NOT EXISTS key_val_store(" " key text PRIMARY KEY," " value text)") ) diff --git a/wheat/wallet/puzzles/did_innerpuz.clvm b/wheat/wallet/puzzles/did_innerpuz.clvm index c409796..4671de0 100644 --- a/wheat/wallet/puzzles/did_innerpuz.clvm +++ b/wheat/wallet/puzzles/did_innerpuz.clvm @@ -1,16 +1,21 @@ +; The DID innerpuzzle is designed to sit inside the singleton layer and provide functionality related to being an identity. +; At the moment the two pieces of functionality are recovery and message creation. +; A DID's ID is it's Singleton ID +; Recovery is based around having a list of known other DIDs which can send messages approving you change the innerpuzzle of your DID singleton + (mod ( - MY_PUBKEY - RECOVERY_DID_LIST_HASH - NUM_VERIFICATIONS_REQUIRED - Truths - mode - amount - message - new_inner_puzhash - parent_innerpuzhash_amounts_for_recovery_ids - pubkey - recovery_list_reveal + MY_PUBKEY ; the public key of the owner used for signing transactions + RECOVERY_DID_LIST_HASH ; the list of DIDs that can send messages to you for recovery we store only the hash so that we don't have to reveal every time we make a message spend + NUM_VERIFICATIONS_REQUIRED ; how many of the above list are required for a recovery + Truths ; Truths are sent from the singleton layer + mode ; this indicates which spend mode we want. Create message, recover, or self-destruct + new_amount ; DIDs can receive payments so when we recreate ourselves sometimes we want to change our amount + message ; this is a list of messages when creating a message spend, or a new puzhash when recovering or self destructing + new_inner_puzhash ; this is used during the message creation spend to optionally give ourselves a new inner puzzle - this is useful for updating our recovery list + parent_innerpuzhash_amounts_for_recovery_ids ; during a recovery we need extra information about our recovery list coins + pubkey ; this is the new pubkey used for a recovery + recovery_list_reveal ; this is the reveal of the stored list of DIDs approved for recovery ) ;message is the new puzzle in the recovery and standard spend cases @@ -21,17 +26,6 @@ (include curry-and-treehash.clinc) (include singleton_truths.clib) - (defun is-in-list (atom items) - ;; returns 1 iff `atom` is in the list of `items` - (if items - (if (= atom (f items)) - 1 - (is-in-list atom (r items)) - ) - 0 - ) - ) - ; takes a lisp tree and returns the hash of it (defun sha256tree1 (TREE) (if (l TREE) @@ -40,36 +34,45 @@ ) ) - ;recovery message module - gets values curried in to make the puzzle - ;TODO - this should probably be imported + ; recovery message module - gets values curried in to make the puzzle (defun make_message_puzzle (recovering_coin newpuz pubkey) (qq (q . (((unquote CREATE_COIN_ANNOUNCEMENT) (unquote recovering_coin)) ((unquote AGG_SIG_UNSAFE) (unquote pubkey) (unquote newpuz))))) ) + ; this function creates the assert announcement for each message coin approving a recovery (defun-inline create_consume_message (coin_id my_id new_innerpuz pubkey) (list ASSERT_COIN_ANNOUNCEMENT (sha256 (sha256 coin_id (sha256tree1 (make_message_puzzle my_id new_innerpuz pubkey))) my_id)) ) -(defun-inline create_coin_ID_for_recovery (MOD_HASH LAUNCHER_ID LAUNCHER_PUZZLE_HASH parent innerpuzhash amount) - (sha256 parent (calculate_full_puzzle_hash MOD_HASH LAUNCHER_ID LAUNCHER_PUZZLE_HASH innerpuzhash) amount) + ; this function calculates a coin ID given the inner puzzle and singleton information + (defun create_coin_ID_for_recovery (SINGLETON_STRUCT launcher_id parent innerpuzhash amount) + (sha256 parent (calculate_full_puzzle_hash (c (f SINGLETON_STRUCT) (c launcher_id (r (r SINGLETON_STRUCT)))) innerpuzhash) amount) ) + ;; return the full puzzlehash for a singleton with the innerpuzzle curried in ; puzzle-hash-of-curried-function is imported from curry-and-treehash.clinc - (defun-inline calculate_full_puzzle_hash (MOD_HASH LAUNCHER_ID LAUNCHER_PUZZLE_HASH inner_puzzle_hash) - (puzzle-hash-of-curried-function MOD_HASH inner_puzzle_hash (sha256 1 LAUNCHER_PUZZLE_HASH) (sha256 1 LAUNCHER_ID) (sha256 1 MOD_HASH)) + (defun-inline calculate_full_puzzle_hash (SINGLETON_STRUCT inner_puzzle_hash) + (puzzle-hash-of-curried-function (f SINGLETON_STRUCT) + inner_puzzle_hash + (sha256tree1 SINGLETON_STRUCT) + ) ) (defmacro create_new_coin (amount new_puz) (qq (c CREATE_COIN (c (unquote new_puz) (c (unquote amount) ())))) ) - (defun check_messages_from_identities (MOD_HASH LAUNCHER_PUZZLE_HASH num_verifications_required identities my_id output new_puz parent_innerpuzhash_amounts_for_recovery_ids pubkey num_verifications) + ; this loops over our identities to check list, and checks if we have been given parent information for this identity + ; the reason for this is because we might only require 3/5 of the IDs give approval messages for a recovery + ; if we have the information for an identity then we create a consume message using that information + + (defun check_messages_from_identities (SINGLETON_STRUCT num_verifications_required identities my_id output new_puz parent_innerpuzhash_amounts_for_recovery_ids pubkey num_verifications) (if identities (if (f parent_innerpuzhash_amounts_for_recovery_ids) + ; if we have parent information then we should create a consume coin condition (check_messages_from_identities - MOD_HASH - LAUNCHER_PUZZLE_HASH + SINGLETON_STRUCT num_verifications_required (r identities) my_id @@ -77,9 +80,8 @@ (create_consume_message ; create coin_id from DID (create_coin_ID_for_recovery - MOD_HASH + SINGLETON_STRUCT (f identities) - LAUNCHER_PUZZLE_HASH (f (f parent_innerpuzhash_amounts_for_recovery_ids)) (f (r (f parent_innerpuzhash_amounts_for_recovery_ids))) (f (r (r (f parent_innerpuzhash_amounts_for_recovery_ids))))) @@ -92,9 +94,9 @@ pubkey (+ num_verifications 1) ) + ; if no parent information found for this identity, move on to next in list (check_messages_from_identities - MOD_HASH - LAUNCHER_PUZZLE_HASH + SINGLETON_STRUCT (r identities) my_id output @@ -109,15 +111,25 @@ (c (list AGG_SIG_UNSAFE pubkey new_puz) output) (if (= num_verifications num_verifications_required) (c (list AGG_SIG_UNSAFE pubkey new_puz) output) - (x "not enough verifications") + (x) ) ) ) ) + ; for a list of messages in the format (type . message) create a message + ; type 0 is 0 value coin + ; type 1 is coin announcement + ; type 2 is puzzle announcement (defun create_messages (messages) (if messages - (c (list CREATE_COIN (f messages) 0) (create_messages (r messages))) + (c + (if (f (f messages)) + (list (if (= (f (f messages)) 1) CREATE_COIN_ANNOUNCEMENT CREATE_PUZZLE_ANNOUNCEMENT) (r (f messages))) + (list CREATE_COIN (r (f messages)) 0) + ) + (create_messages (r messages)) + ) () ) ) @@ -132,19 +144,19 @@ (if mode (if (= mode 1) ; mode one - create messages and recreate singleton - (c (list CREATE_COIN new_inner_puzhash amount) (c (list AGG_SIG_ME MY_PUBKEY (sha256tree1 (list new_inner_puzhash amount message))) (create_messages message))) + (c (list CREATE_COIN new_inner_puzhash new_amount) (c (list AGG_SIG_ME MY_PUBKEY (sha256tree1 (list new_inner_puzhash new_amount message))) (create_messages message))) ; mode two - recovery ; check that recovery list is not empty (if recovery_list_reveal (if (= (sha256tree1 recovery_list_reveal) RECOVERY_DID_LIST_HASH) - (check_messages_from_identities (singleton_mod_hash_truth Truths) (singleton_launcher_puzzle_hash_truth Truths) NUM_VERIFICATIONS_REQUIRED recovery_list_reveal (my_id_truth Truths) (list (create_new_coin amount message)) message parent_innerpuzhash_amounts_for_recovery_ids pubkey 0) + (check_messages_from_identities (singleton_struct_truth Truths) NUM_VERIFICATIONS_REQUIRED recovery_list_reveal (my_id_truth Truths) (list (create_new_coin new_amount message)) message parent_innerpuzhash_amounts_for_recovery_ids pubkey 0) (x) ) (x) ) ) ; mode zero - exit spend - (list (list CREATE_COIN 0x00 -113) (list CREATE_COIN message amount) (list AGG_SIG_ME MY_PUBKEY (sha256tree1 (list amount message)))) + (list (list CREATE_COIN 0x00 -113) (list CREATE_COIN message new_amount) (list AGG_SIG_ME MY_PUBKEY (sha256tree1 (list new_amount message)))) ) ) diff --git a/wheat/wallet/puzzles/did_innerpuz.clvm.hex b/wheat/wallet/puzzles/did_innerpuz.clvm.hex index 968888f..57e1b6d 100644 --- a/wheat/wallet/puzzles/did_innerpuz.clvm.hex +++ b/wheat/wallet/puzzles/did_innerpuz.clvm.hex @@ -1 +1 @@ -ff02ffff01ff02ffff03ff5fffff01ff02ffff03ffff09ff5fffff010180ffff01ff04ffff04ff24ffff04ff8202ffffff04ff81bfff80808080ffff04ffff04ff20ffff04ff05ffff04ffff02ff3effff04ff02ffff04ffff04ff8202ffffff04ff81bfffff04ff82017fff80808080ff80808080ff80808080ffff02ff26ffff04ff02ffff04ff82017fff808080808080ffff01ff02ffff03ff8217ffffff01ff02ffff03ffff09ffff02ff3effff04ff02ffff04ff8217ffff80808080ff0b80ffff01ff02ff3affff04ff02ffff04ff8202efffff04ff820befffff04ff17ffff04ff8217ffffff04ff818fffff04ffff04ffff04ff24ffff04ff82017fffff04ff81bfff80808080ff8080ffff04ff82017fffff04ff8205ffffff04ff820bffffff01ff80808080808080808080808080ffff01ff088080ff0180ffff01ff088080ff018080ff0180ffff01ff04ffff04ff24ffff01ff00ff818f8080ffff04ffff04ff24ffff04ff82017fffff04ff81bfff80808080ffff04ffff04ff20ffff04ff05ffff04ffff02ff3effff04ff02ffff04ffff04ff81bfffff04ff82017fff808080ff80808080ff80808080ff8080808080ff0180ffff04ffff01ffffffff3231ff3d02ffff333cff0401ffffff0102ffff02ffff03ff05ffff01ff02ff2affff04ff02ffff04ff0dffff04ffff0bff32ffff0bff3cff2c80ffff0bff32ffff0bff32ffff0bff3cff2280ff0980ffff0bff32ff0bffff0bff3cff8080808080ff8080808080ffff010b80ff0180ff02ffff03ff2fffff01ff02ffff03ff8204ffffff01ff02ff3affff04ff02ffff04ff05ffff04ff0bffff04ff17ffff04ff6fffff04ff5fffff04ffff04ffff04ff28ffff04ffff0bffff0bffff0bff8208ffffff02ff2effff04ff02ffff04ff05ffff04ff8214ffffff04ffff0bffff0101ff0b80ffff04ffff0bffff0101ff4f80ffff04ffff0bffff0101ff0580ff8080808080808080ff822cff80ffff02ff3effff04ff02ffff04ffff02ff36ffff04ff02ffff04ff5fffff04ff82017fffff04ff8205ffff808080808080ff8080808080ff5f80ff808080ff81bf80ffff04ff82017fffff04ff8206ffffff04ff8205ffffff04ffff10ff820bffffff010180ff80808080808080808080808080ffff01ff02ff3affff04ff02ffff04ff05ffff04ff0bffff04ff6fffff04ff5fffff04ff81bfffff04ff82017fffff04ff8206ffffff04ff8205ffffff04ff820bffff80808080808080808080808080ff0180ffff01ff02ffff03ffff15ff820bffff1780ffff01ff04ffff04ff30ffff04ff8205ffffff04ff82017fff80808080ff81bf80ffff01ff02ffff03ffff09ff820bffff1780ffff01ff04ffff04ff30ffff04ff8205ffffff04ff82017fff80808080ff81bf80ffff01ff08ffff01986e6f7420656e6f75676820766572696669636174696f6e738080ff018080ff018080ff0180ffffff02ffff03ff05ffff01ff04ffff04ff24ffff04ff09ffff01ff80808080ffff02ff26ffff04ff02ffff04ff0dff8080808080ff8080ff0180ff04ffff0101ffff04ffff04ff34ffff04ff05ff808080ffff04ffff04ff30ffff04ff17ffff04ff0bff80808080ff80808080ffff0bff32ffff0bff3cff3880ffff0bff32ffff0bff32ffff0bff3cff2280ff0580ffff0bff32ffff02ff2affff04ff02ffff04ff07ffff04ffff0bff3cff3c80ff8080808080ffff0bff3cff8080808080ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff3effff04ff02ffff04ff09ff80808080ffff02ff3effff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080 \ No newline at end of file +ff02ffff01ff02ffff03ff5fffff01ff02ffff03ffff09ff5fffff010180ffff01ff04ffff04ff24ffff04ff8202ffffff04ff81bfff80808080ffff04ffff04ff20ffff04ff05ffff04ffff02ff7effff04ff02ffff04ffff04ff8202ffffff04ff81bfffff04ff82017fff80808080ff80808080ff80808080ffff02ff36ffff04ff02ffff04ff82017fff808080808080ffff01ff02ffff03ff8217ffffff01ff02ffff03ffff09ffff02ff7effff04ff02ffff04ff8217ffff80808080ff0b80ffff01ff02ff3affff04ff02ffff04ff8201efffff04ff17ffff04ff8217ffffff04ff818fffff04ffff04ffff04ff24ffff04ff82017fffff04ff81bfff80808080ff8080ffff04ff82017fffff04ff8205ffffff04ff820bffffff01ff808080808080808080808080ffff01ff088080ff0180ffff01ff088080ff018080ff0180ffff01ff04ffff04ff24ffff01ff00ff818f8080ffff04ffff04ff24ffff04ff82017fffff04ff81bfff80808080ffff04ffff04ff20ffff04ff05ffff04ffff02ff7effff04ff02ffff04ffff04ff81bfffff04ff82017fff808080ff80808080ff80808080ff8080808080ff0180ffff04ffff01ffffffff3231ff3d02ffff333cff3eff0401ffffff0102ffff02ffff03ff05ffff01ff02ff2affff04ff02ffff04ff0dffff04ffff0bff32ffff0bff7cff5c80ffff0bff32ffff0bff32ffff0bff7cff2280ff0980ffff0bff32ff0bffff0bff7cff8080808080ff8080808080ffff010b80ff0180ff02ffff03ff17ffff01ff02ffff03ff82027fffff01ff02ff3affff04ff02ffff04ff05ffff04ff0bffff04ff37ffff04ff2fffff04ffff04ffff04ff28ffff04ffff0bffff0bffff02ff26ffff04ff02ffff04ff05ffff04ff27ffff04ff82047fffff04ff820a7fffff04ff82167fff8080808080808080ffff02ff7effff04ff02ffff04ffff02ff2effff04ff02ffff04ff2fffff04ff81bfffff04ff8202ffff808080808080ff8080808080ff2f80ff808080ff5f80ffff04ff81bfffff04ff82037fffff04ff8202ffffff04ffff10ff8205ffffff010180ff808080808080808080808080ffff01ff02ff3affff04ff02ffff04ff05ffff04ff37ffff04ff2fffff04ff5fffff04ff81bfffff04ff82037fffff04ff8202ffffff04ff8205ffff808080808080808080808080ff0180ffff01ff02ffff03ffff15ff8205ffff0b80ffff01ff04ffff04ff30ffff04ff8202ffffff04ff81bfff80808080ff5f80ffff01ff02ffff03ffff09ff8205ffff0b80ffff01ff04ffff04ff30ffff04ff8202ffffff04ff81bfff80808080ff5f80ffff01ff088080ff018080ff018080ff0180ffffff0bff17ffff02ff5effff04ff02ffff04ff09ffff04ff2fffff04ffff02ff7effff04ff02ffff04ffff04ff09ffff04ff0bff1d8080ff80808080ff808080808080ff5f80ff02ffff03ff05ffff01ff04ffff02ffff03ff11ffff01ff04ffff02ffff03ffff09ff11ffff010180ffff0134ffff012c80ff0180ffff04ff19ff808080ffff01ff04ff24ffff04ff19ffff01ff8080808080ff0180ffff02ff36ffff04ff02ffff04ff0dff8080808080ff8080ff0180ffff04ffff0101ffff04ffff04ff34ffff04ff05ff808080ffff04ffff04ff30ffff04ff17ffff04ff0bff80808080ff80808080ffff0bff32ffff0bff7cff3880ffff0bff32ffff0bff32ffff0bff7cff2280ff0580ffff0bff32ffff02ff2affff04ff02ffff04ff07ffff04ffff0bff7cff7c80ff8080808080ffff0bff7cff8080808080ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff7effff04ff02ffff04ff09ff80808080ffff02ff7effff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080 \ No newline at end of file diff --git a/wheat/wallet/puzzles/did_innerpuz.clvm.hex.sha256tree b/wheat/wallet/puzzles/did_innerpuz.clvm.hex.sha256tree index 87025ac..552facb 100644 --- a/wheat/wallet/puzzles/did_innerpuz.clvm.hex.sha256tree +++ b/wheat/wallet/puzzles/did_innerpuz.clvm.hex.sha256tree @@ -1 +1 @@ -f2356bc00a27abf46c72b809ba7d1d53bde533d94a7a3da8954155afe54304c4 +ef41902d9964f6050f87de98b5c4e34512b7d2abded3fe700f7850ff20323bf2 diff --git a/wheat/wallet/puzzles/pool_member_innerpuz.clvm b/wheat/wallet/puzzles/pool_member_innerpuz.clvm index 182d62f..1d5e84d 100644 --- a/wheat/wallet/puzzles/pool_member_innerpuz.clvm +++ b/wheat/wallet/puzzles/pool_member_innerpuz.clvm @@ -19,7 +19,7 @@ ; Escaping if pool_reward_height is () ; p1 is pool_reward_amount if absorbing money - ; p1 is key_value_list if escaping + ; p1 is extra_data key_value_list if escaping ; pool_reward_amount is the value of the coin reward - this is passed in so that this puzzle will still work after halvenings ; pool_reward_height is the block height that the reward was generated at. This is used to calculate the coin ID. diff --git a/wheat/wallet/puzzles/pool_waitingroom_innerpuz.clvm b/wheat/wallet/puzzles/pool_waitingroom_innerpuz.clvm index 545c1f3..609b201 100644 --- a/wheat/wallet/puzzles/pool_waitingroom_innerpuz.clvm +++ b/wheat/wallet/puzzles/pool_waitingroom_innerpuz.clvm @@ -20,7 +20,7 @@ ; p1 is pool_reward_amount - the value of the coin reward - this is passed in so that this puzzle will still work after halvenings ; p2 is pool_reward_height - the block height that the reward was generated at. This is used to calculate the coin ID. ; if spend_type is 1 - ; p1 is key_value_list - signed extra data that the wallet may want to publicly announce for syncing purposes + ; p1 is extra_data key_value_list - signed extra data that the wallet may want to publicly announce for syncing purposes ; p2 is destination_puzhash - the location that the escape spend wants to create itself to (include condition_codes.clvm) diff --git a/wheat/wallet/trading/trade_store.py b/wheat/wallet/trading/trade_store.py index ff91639..196f67a 100644 --- a/wheat/wallet/trading/trade_store.py +++ b/wheat/wallet/trading/trade_store.py @@ -27,9 +27,6 @@ async def create(cls, db_wrapper: DBWrapper, cache_size: uint32 = uint32(600000) self.cache_size = cache_size self.db_wrapper = db_wrapper self.db_connection = db_wrapper.db - - await self.db_connection.execute("pragma journal_mode=wal") - await self.db_connection.execute("pragma synchronous=2") await self.db_connection.execute( ( "CREATE TABLE IF NOT EXISTS trade_records(" diff --git a/wheat/wallet/util/debug_spend_bundle.py b/wheat/wallet/util/debug_spend_bundle.py index 9232f50..e317931 100644 --- a/wheat/wallet/util/debug_spend_bundle.py +++ b/wheat/wallet/util/debug_spend_bundle.py @@ -7,6 +7,7 @@ from wheat.types.blockchain_format.coin import Coin from wheat.types.blockchain_format.program import Program, INFINITE_COST from wheat.types.blockchain_format.sized_bytes import bytes32 +from wheat.consensus.default_constants import DEFAULT_CONSTANTS from wheat.types.condition_opcodes import ConditionOpcode from wheat.util.condition_tools import conditions_dict_for_solution, pkm_pairs_for_conditions_dict from wheat.util.hash import std_hash @@ -40,7 +41,7 @@ def dump_coin(coin: Coin) -> str: return disassemble(coin_as_program(coin)) -def debug_spend_bundle(spend_bundle, agg_sig_additional_data=bytes([3] * 32)) -> None: +def debug_spend_bundle(spend_bundle, agg_sig_additional_data=DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA) -> None: """ Print a lot of useful information about a `SpendBundle` that might help with debugging its clvm. diff --git a/wheat/wallet/wallet.py b/wheat/wallet/wallet.py index 209ac09..dc18b20 100644 --- a/wheat/wallet/wallet.py +++ b/wheat/wallet/wallet.py @@ -82,7 +82,6 @@ async def get_max_send_amount(self, records=None): self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM, cost_per_byte=self.wallet_state_manager.constants.COST_PER_BYTE, safe_mode=True, - rust_checker=True, ) cost_result: uint64 = calculate_cost_of_program( program.program, result, self.wallet_state_manager.constants.COST_PER_BYTE diff --git a/wheat/wallet/wallet_block_store.py b/wheat/wallet/wallet_block_store.py index 3f2cade..544215a 100644 --- a/wheat/wallet/wallet_block_store.py +++ b/wheat/wallet/wallet_block_store.py @@ -36,9 +36,6 @@ async def create(cls, db_wrapper: DBWrapper): self.db_wrapper = db_wrapper self.db = db_wrapper.db - await self.db.execute("pragma journal_mode=wal") - await self.db.execute("pragma synchronous=2") - await self.db.execute( "CREATE TABLE IF NOT EXISTS header_blocks(header_hash text PRIMARY KEY, height int," " timestamp int, block blob)" diff --git a/wheat/wallet/wallet_coin_store.py b/wheat/wallet/wallet_coin_store.py index a81911a..28d9787 100644 --- a/wheat/wallet/wallet_coin_store.py +++ b/wheat/wallet/wallet_coin_store.py @@ -29,9 +29,6 @@ async def create(cls, wrapper: DBWrapper): self.db_connection = wrapper.db self.db_wrapper = wrapper - await self.db_connection.execute("pragma journal_mode=wal") - await self.db_connection.execute("pragma synchronous=2") - await self.db_connection.execute( ( "CREATE TABLE IF NOT EXISTS coin_record(" diff --git a/wheat/wallet/wallet_interested_store.py b/wheat/wallet/wallet_interested_store.py index 8cc08ec..b4eab40 100644 --- a/wheat/wallet/wallet_interested_store.py +++ b/wheat/wallet/wallet_interested_store.py @@ -20,8 +20,6 @@ async def create(cls, wrapper: DBWrapper): self.db_connection = wrapper.db self.db_wrapper = wrapper - await self.db_connection.execute("pragma journal_mode=wal") - await self.db_connection.execute("pragma synchronous=2") await self.db_connection.execute("CREATE TABLE IF NOT EXISTS interested_coins(coin_name text PRIMARY KEY)") diff --git a/wheat/wallet/wallet_pool_store.py b/wheat/wallet/wallet_pool_store.py index 65c50ed..93727b2 100644 --- a/wheat/wallet/wallet_pool_store.py +++ b/wheat/wallet/wallet_pool_store.py @@ -21,8 +21,6 @@ async def create(cls, wrapper: DBWrapper): self.db_connection = wrapper.db self.db_wrapper = wrapper - await self.db_connection.execute("pragma journal_mode=wal") - await self.db_connection.execute("pragma synchronous=2") await self.db_connection.execute( "CREATE TABLE IF NOT EXISTS pool_state_transitions(transition_index integer, wallet_id integer, " diff --git a/wheat/wallet/wallet_puzzle_store.py b/wheat/wallet/wallet_puzzle_store.py index 7ffeaa5..81f6752 100644 --- a/wheat/wallet/wallet_puzzle_store.py +++ b/wheat/wallet/wallet_puzzle_store.py @@ -35,8 +35,6 @@ async def create(cls, db_wrapper: DBWrapper, cache_size: uint32 = uint32(600000) self.db_wrapper = db_wrapper self.db_connection = self.db_wrapper.db - await self.db_connection.execute("pragma journal_mode=wal") - await self.db_connection.execute("pragma synchronous=2") await self.db_connection.execute( ( "CREATE TABLE IF NOT EXISTS derivation_paths(" diff --git a/wheat/wallet/wallet_state_manager.py b/wheat/wallet/wallet_state_manager.py index 3f6dbc5..0d828eb 100644 --- a/wheat/wallet/wallet_state_manager.py +++ b/wheat/wallet/wallet_state_manager.py @@ -18,7 +18,7 @@ from wheat.consensus.constants import ConsensusConstants from wheat.consensus.find_fork_point import find_fork_point_in_chain from wheat.full_node.weight_proof import WeightProofHandler -from wheat.pools.pool_puzzles import SINGLETON_LAUNCHER_HASH, solution_to_extra_data +from wheat.pools.pool_puzzles import SINGLETON_LAUNCHER_HASH, solution_to_pool_state from wheat.pools.pool_wallet import PoolWallet from wheat.protocols.wallet_protocol import PuzzleSolutionResponse, RespondPuzzleSolution from wheat.types.blockchain_format.coin import Coin @@ -137,6 +137,9 @@ async def create( self.lock = asyncio.Lock() self.log.debug(f"Starting in db path: {db_path}") self.db_connection = await aiosqlite.connect(db_path) + await self.db_connection.execute("pragma journal_mode=wal") + await self.db_connection.execute("pragma synchronous=OFF") + self.db_wrapper = DBWrapper(self.db_connection) self.coin_store = await WalletCoinStore.create(self.db_wrapper) self.tx_store = await WalletTransactionStore.create(self.db_wrapper) @@ -509,12 +512,15 @@ async def does_coin_belong_to_wallet(self, coin: Coin, wallet_id: int) -> bool: async def get_confirmed_balance_for_wallet_already_locked(self, wallet_id: int) -> uint128: # This is a workaround to be able to call la locking operation when already locked # for example, in the create method of DID wallet - assert self.lock.locked() is False + if self.lock.locked() is False: + raise AssertionError("expected wallet_state_manager to be locked") unspent_coin_records = await self.coin_store.get_unspent_coins_for_wallet(wallet_id) return get_balance_from_coin_records(unspent_coin_records) async def get_confirmed_balance_for_wallet( - self, wallet_id: int, unspent_coin_records: Optional[Set[WalletCoinRecord]] = None + self, + wallet_id: int, + unspent_coin_records: Optional[Set[WalletCoinRecord]] = None, ) -> uint128: """ Returns the confirmed balance, including coinbase rewards that are not spendable. @@ -527,6 +533,9 @@ async def get_confirmed_balance_for_wallet( return get_balance_from_coin_records(unspent_coin_records) async def get_confirmed_balance_for_wallet_with_lock(self, wallet_id: int) -> Set[WalletCoinRecord]: + if self.lock.locked() is True: + # raise AssertionError("expected wallet_state_manager to be unlocked") + pass async with self.lock: return await self.coin_store.get_unspent_coins_for_wallet(wallet_id) @@ -539,20 +548,28 @@ async def get_unconfirmed_balance( """ # This API should change so that get_balance_from_coin_records is called for Set[WalletCoinRecord] # and this method is called only for the unspent_coin_records==None case. - confirmed = await self.get_confirmed_balance_for_wallet(wallet_id, unspent_coin_records) + confirmed_amount = await self.get_confirmed_balance_for_wallet(wallet_id, unspent_coin_records) + return await self._get_unconfirmed_balance(wallet_id, confirmed_amount) + + async def get_unconfirmed_balance_already_locked(self, wallet_id) -> uint128: + confirmed_amount = await self.get_confirmed_balance_for_wallet_already_locked(wallet_id) + return await self._get_unconfirmed_balance(wallet_id, confirmed_amount) + + async def _get_unconfirmed_balance(self, wallet_id, confirmed: uint128) -> uint128: unconfirmed_tx: List[TransactionRecord] = await self.tx_store.get_unconfirmed_for_wallet(wallet_id) removal_amount: int = 0 addition_amount: int = 0 for record in unconfirmed_tx: for removal in record.removals: - removal_amount += removal.amount + if await self.does_coin_belong_to_wallet(removal, wallet_id): + removal_amount += removal.amount for addition in record.additions: # This change or a self transaction if await self.does_coin_belong_to_wallet(addition, wallet_id): addition_amount += addition.amount - result = confirmed - removal_amount + addition_amount + result = (confirmed + addition_amount) - removal_amount return uint128(result) async def unconfirmed_additions_for_wallet(self, wallet_id: int) -> Dict[bytes32, Coin]: @@ -599,6 +616,7 @@ async def new_transaction_block_callback( for cs in additional_coin_spends: if cs.coin.puzzle_hash == SINGLETON_LAUNCHER_HASH: already_have = False + pool_state = None for wallet_id, wallet in self.wallets.items(): if ( wallet.type() == WalletType.POOLING_WALLET @@ -608,10 +626,13 @@ async def new_transaction_block_callback( already_have = True if not already_have: try: - solution_to_extra_data(cs) + pool_state = solution_to_pool_state(cs) except Exception as e: self.log.debug(f"Not a pool wallet launcher {e}") continue + if pool_state is None: + self.log.debug("Not a pool wallet launcher") + continue self.log.info("Found created launcher. Creating pool wallet") pool_wallet = await PoolWallet.create( self, self.main_wallet, cs.coin.name(), additional_coin_spends, height, True, "pool_wallet" diff --git a/wheat/wallet/wallet_transaction_store.py b/wheat/wallet/wallet_transaction_store.py index ed34031..e43520c 100644 --- a/wheat/wallet/wallet_transaction_store.py +++ b/wheat/wallet/wallet_transaction_store.py @@ -29,9 +29,6 @@ async def create(cls, db_wrapper: DBWrapper): self.db_wrapper = db_wrapper self.db_connection = self.db_wrapper.db - - await self.db_connection.execute("pragma journal_mode=wal") - await self.db_connection.execute("pragma synchronous=2") await self.db_connection.execute( ( "CREATE TABLE IF NOT EXISTS transaction_record(" @@ -151,6 +148,8 @@ async def set_confirmed(self, tx_id: bytes32, height: uint32): current: Optional[TransactionRecord] = await self.get_transaction_record(tx_id) if current is None: return None + if current.confirmed_at_height == height: + return tx: TransactionRecord = TransactionRecord( confirmed_at_height=height, created_at_time=current.created_at_time, diff --git a/wheat/wallet/wallet_user_store.py b/wheat/wallet/wallet_user_store.py index 92778a1..28c318c 100644 --- a/wheat/wallet/wallet_user_store.py +++ b/wheat/wallet/wallet_user_store.py @@ -23,8 +23,6 @@ async def create(cls, db_wrapper: DBWrapper): self.db_wrapper = db_wrapper self.db_connection = db_wrapper.db - await self.db_connection.execute("pragma journal_mode=wal") - await self.db_connection.execute("pragma synchronous=2") await self.db_connection.execute( ( "CREATE TABLE IF NOT EXISTS users_wallets("