From 337dbd4e5c3d1d097c8a59759dad5377b50681e7 Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Tue, 19 Apr 2022 13:53:59 -0500 Subject: [PATCH 01/55] Revert "Revert "Pin mac intel installer to 10.15 (#11209)" (#11210)" (#11211) This reverts commit d3e73a75ab44c799f8b6f9a76fab550ad6d7824a. --- .github/workflows/build-macos-installer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-macos-installer.yml b/.github/workflows/build-macos-installer.yml index 09597f3ef29d..e7cd7e6399f7 100644 --- a/.github/workflows/build-macos-installer.yml +++ b/.github/workflows/build-macos-installer.yml @@ -25,7 +25,7 @@ jobs: max-parallel: 4 matrix: python-version: [3.9] - os: [macOS-latest] + os: [macos-10.15] steps: - name: Checkout Code From 7a123afddb1bb1c5b431ca6c419696e205be6fc5 Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Tue, 19 Apr 2022 17:43:06 -0500 Subject: [PATCH 02/55] Fix install.sh test for bookworm (#11227) --- .github/workflows/test-install-scripts.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 0a79447b35a9..7d27cec94d1a 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -156,6 +156,16 @@ jobs: apt-get --yes update apt-get install --yes git lsb-release sudo + # @TODO this step can be removed once Python 3.10 is supported + # Python 3.10 is now the default in bookworm, so install 3.9 specifically so install does not fail + - name: Prepare debian:bookworm + if: ${{ matrix.distribution.name == 'debian:bookworm' }} + env: + DEBIAN_FRONTEND: noninteractive + run: | + apt-get update -y + apt-get install -y python3.9-venv + - name: Prepare Fedora if: ${{ matrix.distribution.type == 'fedora' }} run: | From 83a740571b3be4eadc23d9def5640b14ebd6d3de Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 20 Apr 2022 11:02:45 -0700 Subject: [PATCH 03/55] aiohttp==3.8.1 for Python 3.10 (#11129) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2ac24927f492..adfadae7077e 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ "clvm_tools==0.4.4", # Currying, Program.to, other conveniences "chia_rs==0.1.1", "clvm-tools-rs==0.1.7", # Rust implementation of clvm_tools - "aiohttp==3.7.4", # HTTP server for full node rpc + "aiohttp==3.8.1", # HTTP server for full node rpc "aiosqlite==0.17.0", # asyncio wrapper for sqlite, to store blocks "bitstring==3.1.9", # Binary data management library "colorama==0.4.4", # Colorizes terminal output From 0aaa3436910dd86cf381dbba6305b2b312d4158b Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 20 Apr 2022 11:03:31 -0700 Subject: [PATCH 04/55] replace some asyncio.get_event_loop().run_until_complete() with asyncio.run() (#11131) --- tests/core/util/test_config.py | 2 +- tests/util/alert_server.py | 2 +- tests/wallet/test_singleton_lifecycle.py | 3 +-- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/core/util/test_config.py b/tests/core/util/test_config.py index e2493f8c861a..c1c80ce51dc6 100644 --- a/tests/core/util/test_config.py +++ b/tests/core/util/test_config.py @@ -138,7 +138,7 @@ def run_reader_and_writer_tasks(root_path: Path, default_config: Dict): Subprocess entry point. This function spins-off threads to perform read/write tasks concurrently, possibly leading to synchronization issues accessing config data. """ - asyncio.get_event_loop().run_until_complete(create_reader_and_writer_tasks(root_path, default_config)) + asyncio.run(create_reader_and_writer_tasks(root_path, default_config)) @pytest.fixture(scope="function") diff --git a/tests/util/alert_server.py b/tests/util/alert_server.py index fbf25bf50d2c..4f0f2d7ad786 100644 --- a/tests/util/alert_server.py +++ b/tests/util/alert_server.py @@ -77,7 +77,7 @@ def main(): ) quit() - return asyncio.get_event_loop().run_until_complete(run_and_wait(file_path, port)) + return asyncio.run(run_and_wait(file_path, port)) if __name__ == "__main__": diff --git a/tests/wallet/test_singleton_lifecycle.py b/tests/wallet/test_singleton_lifecycle.py index d332a20c1417..1cf8a8c18476 100644 --- a/tests/wallet/test_singleton_lifecycle.py +++ b/tests/wallet/test_singleton_lifecycle.py @@ -113,8 +113,7 @@ def test_only_odd_coins_0(): conditions = Program.to(condition_list) coin_spend = CoinSpend(farmed_coin, ANYONE_CAN_SPEND_PUZZLE, conditions) spend_bundle = SpendBundle.aggregate([launcher_spend_bundle, SpendBundle([coin_spend], G2Element())]) - run = asyncio.get_event_loop().run_until_complete - coins_added, coins_removed = run(check_spend_bundle_validity(bt.constants, blocks, spend_bundle)) + coins_added, coins_removed = asyncio.run(check_spend_bundle_validity(bt.constants, blocks, spend_bundle)) coin_set_added = set([_.coin for _ in coins_added]) coin_set_removed = set([_.coin for _ in coins_removed]) From a663ece4c3259efde7c175611d33603ced8be9d9 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 20 Apr 2022 11:05:52 -0700 Subject: [PATCH 05/55] fix ssl context creation for server vs. client side (#11134) --- chia/rpc/rpc_server.py | 7 +++++-- chia/server/server.py | 2 +- tests/block_tools.py | 4 ++-- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/chia/rpc/rpc_server.py b/chia/rpc/rpc_server.py index 4abf16969275..aff8aecaf6c8 100644 --- a/chia/rpc/rpc_server.py +++ b/chia/rpc/rpc_server.py @@ -9,7 +9,7 @@ from chia.rpc.util import wrap_http_handler from chia.server.outbound_message import NodeType -from chia.server.server import ssl_context_for_server +from chia.server.server import ssl_context_for_client, ssl_context_for_server from chia.types.peer_info import PeerInfo from chia.util.byte_types import hexstr_to_bytes from chia.util.ints import uint16 @@ -42,6 +42,9 @@ def __init__(self, rpc_api: Any, service_name: str, stop_cb: Callable, root_path self.ssl_context = ssl_context_for_server( self.ca_cert_path, self.ca_key_path, self.crt_path, self.key_path, log=self.log ) + self.ssl_client_context = ssl_context_for_client( + self.ca_cert_path, self.ca_key_path, self.crt_path, self.key_path, log=self.log + ) async def stop(self): self.shut_down = True @@ -278,7 +281,7 @@ async def connect_to_daemon(self, self_hostname: str, daemon_port: uint16): autoclose=True, autoping=True, heartbeat=60, - ssl_context=self.ssl_context, + ssl_context=self.ssl_client_context, max_msg_size=max_message_size, ) await self.connection(self.websocket) diff --git a/chia/server/server.py b/chia/server/server.py index 9edac98fc118..896906ce1b7a 100644 --- a/chia/server/server.py +++ b/chia/server/server.py @@ -48,7 +48,7 @@ def ssl_context_for_server( if check_permissions: verify_ssl_certs_and_keys([ca_cert, private_cert_path], [ca_key, private_key_path], log) - ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=str(ca_cert)) + ssl_context = ssl._create_unverified_context(purpose=ssl.Purpose.CLIENT_AUTH, cafile=str(ca_cert)) ssl_context.check_hostname = False ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2 ssl_context.set_ciphers( diff --git a/tests/block_tools.py b/tests/block_tools.py index 9a7317297394..6c573dffe364 100644 --- a/tests/block_tools.py +++ b/tests/block_tools.py @@ -50,7 +50,7 @@ from chia.full_node.signage_point import SignagePoint from chia.plotting.util import PlotsRefreshParameter, PlotRefreshResult, PlotRefreshEvents, parse_plot_info from chia.plotting.manager import PlotManager -from chia.server.server import ssl_context_for_server +from chia.server.server import ssl_context_for_client from chia.types.blockchain_format.classgroup import ClassgroupElement from chia.types.blockchain_format.coin import Coin, hash_coin_list from chia.types.blockchain_format.foliage import Foliage, FoliageBlockData, FoliageTransactionBlock, TransactionsInfo @@ -364,7 +364,7 @@ def get_daemon_ssl_context(self) -> ssl.SSLContext: key_path = self.root_path / self.config["daemon_ssl"]["private_key"] ca_cert_path = self.root_path / self.config["private_ssl_ca"]["crt"] ca_key_path = self.root_path / self.config["private_ssl_ca"]["key"] - return ssl_context_for_server(ca_cert_path, ca_key_path, crt_path, key_path) + return ssl_context_for_client(ca_cert_path, ca_key_path, crt_path, key_path) def get_plot_signature(self, m: bytes32, plot_pk: G1Element) -> G2Element: """ From dbedf1d8b659e02eae4bbe259dd9648ca40fc05d Mon Sep 17 00:00:00 2001 From: Florin Chirica Date: Wed, 20 Apr 2022 21:10:05 +0300 Subject: [PATCH 06/55] Fix timelord closing. (#10630) --- chia/timelord/timelord.py | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/chia/timelord/timelord.py b/chia/timelord/timelord.py index 456b6641d15a..732d90f85c57 100644 --- a/chia/timelord/timelord.py +++ b/chia/timelord/timelord.py @@ -183,25 +183,22 @@ async def _handle_client(self, reader: asyncio.StreamReader, writer: asyncio.Str async def _stop_chain(self, chain: Chain): try: - while chain not in self.allows_iters: - self.lock.release() - await asyncio.sleep(0.05) - log.error(f"Trying to stop {chain} before its initialization.") - await self.lock.acquire() - if chain not in self.chain_type_to_stream: - log.warning(f"Trying to stop a crashed chain: {chain}.") - return None - stop_ip, _, stop_writer = self.chain_type_to_stream[chain] - stop_writer.write(b"010") - await stop_writer.drain() + _, _, stop_writer = self.chain_type_to_stream[chain] if chain in self.allows_iters: + stop_writer.write(b"010") + await stop_writer.drain() self.allows_iters.remove(chain) + else: + log.error(f"Trying to stop {chain} before its initialization.") + stop_writer.close() + await stop_writer.wait_closed() if chain not in self.unspawned_chains: self.unspawned_chains.append(chain) - if chain in self.chain_type_to_stream: - del self.chain_type_to_stream[chain] + del self.chain_type_to_stream[chain] except ConnectionResetError as e: log.error(f"{e}") + except Exception as e: + log.error(f"Exception in stop chain: {type(e)} {e}") def _can_infuse_unfinished_block(self, block: timelord_protocol.NewUnfinishedBlockTimelord) -> Optional[uint64]: assert self.last_state is not None From 6207da3278235304517004d60077427e8a7ed866 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 20 Apr 2022 11:14:20 -0700 Subject: [PATCH 07/55] Up Windows installer build timeout to 50 minutes (#11107) Recent runs are mostly low 30s but some are close to 40 and one just timed out. --- .github/workflows/build-windows-installer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index 22abcf919476..5ae996af6d84 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -19,7 +19,7 @@ jobs: build: name: Windows Installer on Windows 10 and Python 3.9 runs-on: [windows-2019] - timeout-minutes: 40 + timeout-minutes: 50 steps: - name: Checkout Code From ddab10c7d61b882ed9d6974d399b355e2ef29456 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 20 Apr 2022 11:16:53 -0700 Subject: [PATCH 08/55] locate pyinstaller in dev deps rather than build scripts (#11118) --- .github/workflows/build-linux-arm64-installer.yml | 2 +- .github/workflows/build-linux-installer-deb.yml | 2 +- .github/workflows/build-linux-installer-rpm.yml | 2 +- .github/workflows/build-macos-installer.yml | 2 +- .github/workflows/build-macos-m1-installer.yml | 2 +- build_scripts/build_linux_deb.sh | 1 - build_scripts/build_linux_rpm.sh | 1 - build_scripts/build_macos.sh | 1 - build_scripts/build_macos_m1.sh | 3 --- setup.py | 1 + 10 files changed, 6 insertions(+), 11 deletions(-) diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index 57d3275c309a..5cb8cf4cacbe 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -106,7 +106,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Build arm64 .deb package env: diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index 2efa7e5176ac..43d5fe7e5e55 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -140,7 +140,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Setup Node 16.x uses: actions/setup-node@v3 diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index 16d8378fb6f3..64309818e78e 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -109,7 +109,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Build .rpm package env: diff --git a/.github/workflows/build-macos-installer.yml b/.github/workflows/build-macos-installer.yml index e7cd7e6399f7..a2b590b9c8aa 100644 --- a/.github/workflows/build-macos-installer.yml +++ b/.github/workflows/build-macos-installer.yml @@ -128,7 +128,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - sh install.sh + sh install.sh -d - name: Setup Node 16.x uses: actions/setup-node@v3 diff --git a/.github/workflows/build-macos-m1-installer.yml b/.github/workflows/build-macos-m1-installer.yml index 92c118120e4f..0f5870ff148b 100644 --- a/.github/workflows/build-macos-m1-installer.yml +++ b/.github/workflows/build-macos-m1-installer.yml @@ -102,7 +102,7 @@ jobs: env: INSTALL_PYTHON_VERSION: ${{ matrix.python-version }} run: | - arch -arm64 sh install.sh + arch -arm64 sh install.sh -d - name: Install node 16.x run: | diff --git a/build_scripts/build_linux_deb.sh b/build_scripts/build_linux_deb.sh index 62834020104d..ba74d7b46313 100644 --- a/build_scripts/build_linux_deb.sh +++ b/build_scripts/build_linux_deb.sh @@ -35,7 +35,6 @@ rm -rf dist mkdir dist echo "Create executables with pyinstaller" -pip install pyinstaller==4.9 SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? diff --git a/build_scripts/build_linux_rpm.sh b/build_scripts/build_linux_rpm.sh index 7ec656eeef8e..21db74f43c41 100644 --- a/build_scripts/build_linux_rpm.sh +++ b/build_scripts/build_linux_rpm.sh @@ -35,7 +35,6 @@ rm -rf dist mkdir dist echo "Create executables with pyinstaller" -pip install pyinstaller==4.9 SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? diff --git a/build_scripts/build_macos.sh b/build_scripts/build_macos.sh index 2cacc4d9fb3c..35aedaef97eb 100644 --- a/build_scripts/build_macos.sh +++ b/build_scripts/build_macos.sh @@ -22,7 +22,6 @@ sudo rm -rf dist mkdir dist echo "Create executables with pyinstaller" -pip install pyinstaller==4.9 SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? diff --git a/build_scripts/build_macos_m1.sh b/build_scripts/build_macos_m1.sh index 45f063373ba9..95582cb9fc24 100644 --- a/build_scripts/build_macos_m1.sh +++ b/build_scripts/build_macos_m1.sh @@ -21,9 +21,6 @@ echo "Create dist/" sudo rm -rf dist mkdir dist -echo "Install pyinstaller and build bootloaders for M1" -pip install pyinstaller==4.9 - echo "Create executables with pyinstaller" SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" diff --git a/setup.py b/setup.py index adfadae7077e..6ddd5137b751 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,7 @@ "black==21.12b0", "aiohttp_cors", # For blackd "ipython", # For asyncio debugging + "pyinstaller==4.9", "types-aiofiles", "types-click", "types-cryptography", From e741fe14bfaaf46f9e2ff69be0024abffa59d05d Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 20 Apr 2022 11:17:51 -0700 Subject: [PATCH 09/55] remove setting of SCM_VERSION in install-version.py (#11119) --- build_scripts/installer-version.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/build_scripts/installer-version.py b/build_scripts/installer-version.py index d9d71e02e7cf..12e13b0d934e 100644 --- a/build_scripts/installer-version.py +++ b/build_scripts/installer-version.py @@ -1,4 +1,3 @@ -import os import sys from setuptools_scm import get_version @@ -10,7 +9,6 @@ def main(): scm_full_version = get_version(root="..", relative_to=__file__) # scm_full_version = "1.0.5.dev22" - os.environ["SCM_VERSION"] = scm_full_version left_full_version = scm_full_version.split("+") version = left_full_version[0].split(".") From 068b68fb88d2a9d11f7c7586eef38e7e79288ef9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Apr 2022 11:40:51 -0700 Subject: [PATCH 10/55] Bump actions/upload-artifact from 2 to 3 (#11144) * Bump actions/upload-artifact from 2 to 3 Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 2 to 3. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v2...v3) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Also update runner_templates * Mark the github workspace as safe (#11159) * Mark the github workspace as safe * Move the git config step after git is installed in the test containers Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Gene Hoffman Co-authored-by: Chris Marslender --- .github/workflows/build-linux-arm64-installer.yml | 2 +- .github/workflows/build-linux-installer-deb.yml | 2 +- .github/workflows/build-linux-installer-rpm.yml | 2 +- .github/workflows/build-macos-installer.yml | 2 +- .github/workflows/build-macos-m1-installer.yml | 2 +- .github/workflows/build-test-macos-blockchain.yml | 2 +- .github/workflows/build-test-macos-clvm.yml | 2 +- .github/workflows/build-test-macos-core-cmds.yml | 2 +- .github/workflows/build-test-macos-core-consensus.yml | 2 +- .github/workflows/build-test-macos-core-custom_types.yml | 2 +- .github/workflows/build-test-macos-core-daemon.yml | 2 +- .../workflows/build-test-macos-core-full_node-full_sync.yml | 2 +- .github/workflows/build-test-macos-core-full_node-stores.yml | 2 +- .github/workflows/build-test-macos-core-full_node.yml | 2 +- .github/workflows/build-test-macos-core-server.yml | 2 +- .github/workflows/build-test-macos-core-ssl.yml | 2 +- .github/workflows/build-test-macos-core-util.yml | 2 +- .github/workflows/build-test-macos-core.yml | 2 +- .github/workflows/build-test-macos-farmer_harvester.yml | 2 +- .github/workflows/build-test-macos-generator.yml | 2 +- .github/workflows/build-test-macos-plot_sync.yml | 2 +- .github/workflows/build-test-macos-plotting.yml | 2 +- .github/workflows/build-test-macos-pools.yml | 2 +- .github/workflows/build-test-macos-simulation.yml | 2 +- .github/workflows/build-test-macos-tools.yml | 2 +- .github/workflows/build-test-macos-util.yml | 2 +- .github/workflows/build-test-macos-wallet-cat_wallet.yml | 2 +- .github/workflows/build-test-macos-wallet-did_wallet.yml | 2 +- .github/workflows/build-test-macos-wallet-rl_wallet.yml | 2 +- .github/workflows/build-test-macos-wallet-rpc.yml | 2 +- .github/workflows/build-test-macos-wallet-simple_sync.yml | 2 +- .github/workflows/build-test-macos-wallet-sync.yml | 2 +- .github/workflows/build-test-macos-wallet.yml | 2 +- .github/workflows/build-test-macos-weight_proof.yml | 2 +- .github/workflows/build-test-ubuntu-blockchain.yml | 2 +- .github/workflows/build-test-ubuntu-clvm.yml | 2 +- .github/workflows/build-test-ubuntu-core-cmds.yml | 2 +- .github/workflows/build-test-ubuntu-core-consensus.yml | 2 +- .github/workflows/build-test-ubuntu-core-custom_types.yml | 2 +- .github/workflows/build-test-ubuntu-core-daemon.yml | 2 +- .../workflows/build-test-ubuntu-core-full_node-full_sync.yml | 2 +- .github/workflows/build-test-ubuntu-core-full_node-stores.yml | 2 +- .github/workflows/build-test-ubuntu-core-full_node.yml | 2 +- .github/workflows/build-test-ubuntu-core-server.yml | 2 +- .github/workflows/build-test-ubuntu-core-ssl.yml | 2 +- .github/workflows/build-test-ubuntu-core-util.yml | 2 +- .github/workflows/build-test-ubuntu-core.yml | 2 +- .github/workflows/build-test-ubuntu-farmer_harvester.yml | 2 +- .github/workflows/build-test-ubuntu-generator.yml | 2 +- .github/workflows/build-test-ubuntu-plot_sync.yml | 2 +- .github/workflows/build-test-ubuntu-plotting.yml | 2 +- .github/workflows/build-test-ubuntu-pools.yml | 2 +- .github/workflows/build-test-ubuntu-simulation.yml | 2 +- .github/workflows/build-test-ubuntu-tools.yml | 2 +- .github/workflows/build-test-ubuntu-util.yml | 2 +- .github/workflows/build-test-ubuntu-wallet-cat_wallet.yml | 2 +- .github/workflows/build-test-ubuntu-wallet-did_wallet.yml | 2 +- .github/workflows/build-test-ubuntu-wallet-rl_wallet.yml | 2 +- .github/workflows/build-test-ubuntu-wallet-rpc.yml | 2 +- .github/workflows/build-test-ubuntu-wallet-simple_sync.yml | 2 +- .github/workflows/build-test-ubuntu-wallet-sync.yml | 2 +- .github/workflows/build-test-ubuntu-wallet.yml | 2 +- .github/workflows/build-test-ubuntu-weight_proof.yml | 2 +- .github/workflows/build-windows-installer.yml | 4 ++-- .github/workflows/upload-pypi-source.yml | 2 +- tests/runner_templates/build-test-macos | 2 +- tests/runner_templates/build-test-ubuntu | 2 +- 67 files changed, 68 insertions(+), 68 deletions(-) diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index 5cb8cf4cacbe..56a3b5e7f0d8 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -119,7 +119,7 @@ jobs: sh build_linux_deb.sh arm64 - name: Upload Linux artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-linux-deb-arm64 path: ${{ github.workspace }}/build_scripts/final_installer/ diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index 43d5fe7e5e55..e1ff9de5670b 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -162,7 +162,7 @@ jobs: sh build_linux_deb.sh amd64 - name: Upload Linux artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-linux-deb-intel path: ${{ github.workspace }}/build_scripts/final_installer/ diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index 64309818e78e..b12a702ee64d 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -122,7 +122,7 @@ jobs: sh build_linux_rpm.sh amd64 - name: Upload Linux artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-linux-rpm-intel path: ${{ github.workspace }}/build_scripts/final_installer/ diff --git a/.github/workflows/build-macos-installer.yml b/.github/workflows/build-macos-installer.yml index a2b590b9c8aa..ef3b41c58a83 100644 --- a/.github/workflows/build-macos-installer.yml +++ b/.github/workflows/build-macos-installer.yml @@ -148,7 +148,7 @@ jobs: sh build_macos.sh - name: Upload MacOS artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-macos-dmg-intel path: ${{ github.workspace }}/build_scripts/final_installer/ diff --git a/.github/workflows/build-macos-m1-installer.yml b/.github/workflows/build-macos-m1-installer.yml index 0f5870ff148b..17cc3b70cb76 100644 --- a/.github/workflows/build-macos-m1-installer.yml +++ b/.github/workflows/build-macos-m1-installer.yml @@ -122,7 +122,7 @@ jobs: arch -arm64 sh build_macos_m1.sh - name: Upload MacOS artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: chia-installers-macos-dmg-arm64 path: ${{ github.workspace }}/build_scripts/final_installer/ diff --git a/.github/workflows/build-test-macos-blockchain.yml b/.github/workflows/build-test-macos-blockchain.yml index a816459ff7b7..7d718ba11b39 100644 --- a/.github/workflows/build-test-macos-blockchain.yml +++ b/.github/workflows/build-test-macos-blockchain.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-clvm.yml b/.github/workflows/build-test-macos-clvm.yml index f2d496da9384..19955c44d476 100644 --- a/.github/workflows/build-test-macos-clvm.yml +++ b/.github/workflows/build-test-macos-clvm.yml @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-cmds.yml b/.github/workflows/build-test-macos-core-cmds.yml index 687ae6c94900..5a3580742a12 100644 --- a/.github/workflows/build-test-macos-core-cmds.yml +++ b/.github/workflows/build-test-macos-core-cmds.yml @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-consensus.yml b/.github/workflows/build-test-macos-core-consensus.yml index f5cd798982e3..d9ffc2c928a7 100644 --- a/.github/workflows/build-test-macos-core-consensus.yml +++ b/.github/workflows/build-test-macos-core-consensus.yml @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-custom_types.yml b/.github/workflows/build-test-macos-core-custom_types.yml index 130d15420c70..a3715506c4d9 100644 --- a/.github/workflows/build-test-macos-core-custom_types.yml +++ b/.github/workflows/build-test-macos-core-custom_types.yml @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-daemon.yml b/.github/workflows/build-test-macos-core-daemon.yml index 7ab9dc765a25..332c43d71065 100644 --- a/.github/workflows/build-test-macos-core-daemon.yml +++ b/.github/workflows/build-test-macos-core-daemon.yml @@ -109,7 +109,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-full_node-full_sync.yml b/.github/workflows/build-test-macos-core-full_node-full_sync.yml index fd8e5da8d28d..62c2c952db8d 100644 --- a/.github/workflows/build-test-macos-core-full_node-full_sync.yml +++ b/.github/workflows/build-test-macos-core-full_node-full_sync.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-full_node-stores.yml b/.github/workflows/build-test-macos-core-full_node-stores.yml index 72a06b6ca0b7..a92d2c90de49 100644 --- a/.github/workflows/build-test-macos-core-full_node-stores.yml +++ b/.github/workflows/build-test-macos-core-full_node-stores.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-full_node.yml b/.github/workflows/build-test-macos-core-full_node.yml index 83d6cb01568c..ab1a41ae62d9 100644 --- a/.github/workflows/build-test-macos-core-full_node.yml +++ b/.github/workflows/build-test-macos-core-full_node.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-server.yml b/.github/workflows/build-test-macos-core-server.yml index e93f23b2f8b9..b4c37663c2b3 100644 --- a/.github/workflows/build-test-macos-core-server.yml +++ b/.github/workflows/build-test-macos-core-server.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-ssl.yml b/.github/workflows/build-test-macos-core-ssl.yml index db9903634ac1..61711d49ab29 100644 --- a/.github/workflows/build-test-macos-core-ssl.yml +++ b/.github/workflows/build-test-macos-core-ssl.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core-util.yml b/.github/workflows/build-test-macos-core-util.yml index 0935d2d546b4..53744bf26dfc 100644 --- a/.github/workflows/build-test-macos-core-util.yml +++ b/.github/workflows/build-test-macos-core-util.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-core.yml b/.github/workflows/build-test-macos-core.yml index 4fe7cab2494f..812f9bcda113 100644 --- a/.github/workflows/build-test-macos-core.yml +++ b/.github/workflows/build-test-macos-core.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-farmer_harvester.yml b/.github/workflows/build-test-macos-farmer_harvester.yml index 9c9f2b4736a2..38b76a042753 100644 --- a/.github/workflows/build-test-macos-farmer_harvester.yml +++ b/.github/workflows/build-test-macos-farmer_harvester.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-generator.yml b/.github/workflows/build-test-macos-generator.yml index e695c65e96c8..897d03d4cd3f 100644 --- a/.github/workflows/build-test-macos-generator.yml +++ b/.github/workflows/build-test-macos-generator.yml @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-plot_sync.yml b/.github/workflows/build-test-macos-plot_sync.yml index 315c909bb476..3b72cfaff3c9 100644 --- a/.github/workflows/build-test-macos-plot_sync.yml +++ b/.github/workflows/build-test-macos-plot_sync.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-plotting.yml b/.github/workflows/build-test-macos-plotting.yml index 30d44782fd4b..4fa5344d0461 100644 --- a/.github/workflows/build-test-macos-plotting.yml +++ b/.github/workflows/build-test-macos-plotting.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-pools.yml b/.github/workflows/build-test-macos-pools.yml index 9279455daaf5..00e6e6fd8775 100644 --- a/.github/workflows/build-test-macos-pools.yml +++ b/.github/workflows/build-test-macos-pools.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-simulation.yml b/.github/workflows/build-test-macos-simulation.yml index 9d8e1f57eb7c..91d0df91d07b 100644 --- a/.github/workflows/build-test-macos-simulation.yml +++ b/.github/workflows/build-test-macos-simulation.yml @@ -109,7 +109,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-tools.yml b/.github/workflows/build-test-macos-tools.yml index e0a4396b6e1e..8c4bf3268a31 100644 --- a/.github/workflows/build-test-macos-tools.yml +++ b/.github/workflows/build-test-macos-tools.yml @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-util.yml b/.github/workflows/build-test-macos-util.yml index f0f2334a0d0c..a1c0603ad058 100644 --- a/.github/workflows/build-test-macos-util.yml +++ b/.github/workflows/build-test-macos-util.yml @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-cat_wallet.yml b/.github/workflows/build-test-macos-wallet-cat_wallet.yml index 70858e93b721..ddf94b384859 100644 --- a/.github/workflows/build-test-macos-wallet-cat_wallet.yml +++ b/.github/workflows/build-test-macos-wallet-cat_wallet.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-did_wallet.yml b/.github/workflows/build-test-macos-wallet-did_wallet.yml index 4f97d0ea2f19..9a36c893eb68 100644 --- a/.github/workflows/build-test-macos-wallet-did_wallet.yml +++ b/.github/workflows/build-test-macos-wallet-did_wallet.yml @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-rl_wallet.yml b/.github/workflows/build-test-macos-wallet-rl_wallet.yml index 1e19516260a4..73aa78a65a91 100644 --- a/.github/workflows/build-test-macos-wallet-rl_wallet.yml +++ b/.github/workflows/build-test-macos-wallet-rl_wallet.yml @@ -91,7 +91,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-rpc.yml b/.github/workflows/build-test-macos-wallet-rpc.yml index 68a95ea7a772..1fac91c10ad3 100644 --- a/.github/workflows/build-test-macos-wallet-rpc.yml +++ b/.github/workflows/build-test-macos-wallet-rpc.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-simple_sync.yml b/.github/workflows/build-test-macos-wallet-simple_sync.yml index 0a1a359b3621..13b2340fdf2e 100644 --- a/.github/workflows/build-test-macos-wallet-simple_sync.yml +++ b/.github/workflows/build-test-macos-wallet-simple_sync.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet-sync.yml b/.github/workflows/build-test-macos-wallet-sync.yml index c92ca8e21efe..210934a9c776 100644 --- a/.github/workflows/build-test-macos-wallet-sync.yml +++ b/.github/workflows/build-test-macos-wallet-sync.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-wallet.yml b/.github/workflows/build-test-macos-wallet.yml index 0a71248808d7..45c63cf94fe2 100644 --- a/.github/workflows/build-test-macos-wallet.yml +++ b/.github/workflows/build-test-macos-wallet.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-macos-weight_proof.yml b/.github/workflows/build-test-macos-weight_proof.yml index 9b737c31e4c3..c2c8235ea65e 100644 --- a/.github/workflows/build-test-macos-weight_proof.yml +++ b/.github/workflows/build-test-macos-weight_proof.yml @@ -105,7 +105,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-blockchain.yml b/.github/workflows/build-test-ubuntu-blockchain.yml index 60dbb451694e..c669bb12a985 100644 --- a/.github/workflows/build-test-ubuntu-blockchain.yml +++ b/.github/workflows/build-test-ubuntu-blockchain.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-clvm.yml b/.github/workflows/build-test-ubuntu-clvm.yml index 35275d29e874..518babfae31a 100644 --- a/.github/workflows/build-test-ubuntu-clvm.yml +++ b/.github/workflows/build-test-ubuntu-clvm.yml @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-cmds.yml b/.github/workflows/build-test-ubuntu-core-cmds.yml index f5f8ed214dbc..f047165522d8 100644 --- a/.github/workflows/build-test-ubuntu-core-cmds.yml +++ b/.github/workflows/build-test-ubuntu-core-cmds.yml @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-consensus.yml b/.github/workflows/build-test-ubuntu-core-consensus.yml index f9045c31ba30..069ca515d97a 100644 --- a/.github/workflows/build-test-ubuntu-core-consensus.yml +++ b/.github/workflows/build-test-ubuntu-core-consensus.yml @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-custom_types.yml b/.github/workflows/build-test-ubuntu-core-custom_types.yml index 447a28627335..134e25da6f2f 100644 --- a/.github/workflows/build-test-ubuntu-core-custom_types.yml +++ b/.github/workflows/build-test-ubuntu-core-custom_types.yml @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-daemon.yml b/.github/workflows/build-test-ubuntu-core-daemon.yml index 1992c859f152..59dc31a98c45 100644 --- a/.github/workflows/build-test-ubuntu-core-daemon.yml +++ b/.github/workflows/build-test-ubuntu-core-daemon.yml @@ -108,7 +108,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml b/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml index a2295de7ebb7..b22406b0b3f3 100644 --- a/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml +++ b/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-full_node-stores.yml b/.github/workflows/build-test-ubuntu-core-full_node-stores.yml index fbcf34703727..229dabe72435 100644 --- a/.github/workflows/build-test-ubuntu-core-full_node-stores.yml +++ b/.github/workflows/build-test-ubuntu-core-full_node-stores.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-full_node.yml b/.github/workflows/build-test-ubuntu-core-full_node.yml index 97208996a1fa..743abc39eae5 100644 --- a/.github/workflows/build-test-ubuntu-core-full_node.yml +++ b/.github/workflows/build-test-ubuntu-core-full_node.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-server.yml b/.github/workflows/build-test-ubuntu-core-server.yml index c96f2e13f107..1857c57280e5 100644 --- a/.github/workflows/build-test-ubuntu-core-server.yml +++ b/.github/workflows/build-test-ubuntu-core-server.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-ssl.yml b/.github/workflows/build-test-ubuntu-core-ssl.yml index 0edc22b104e5..f5b9ecbcae27 100644 --- a/.github/workflows/build-test-ubuntu-core-ssl.yml +++ b/.github/workflows/build-test-ubuntu-core-ssl.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core-util.yml b/.github/workflows/build-test-ubuntu-core-util.yml index 2e5c699cb98d..1c4de1baefcc 100644 --- a/.github/workflows/build-test-ubuntu-core-util.yml +++ b/.github/workflows/build-test-ubuntu-core-util.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-core.yml b/.github/workflows/build-test-ubuntu-core.yml index ed717b529b72..1e7d0749dcd9 100644 --- a/.github/workflows/build-test-ubuntu-core.yml +++ b/.github/workflows/build-test-ubuntu-core.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-farmer_harvester.yml b/.github/workflows/build-test-ubuntu-farmer_harvester.yml index 03a3050232a7..9248f0d27124 100644 --- a/.github/workflows/build-test-ubuntu-farmer_harvester.yml +++ b/.github/workflows/build-test-ubuntu-farmer_harvester.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-generator.yml b/.github/workflows/build-test-ubuntu-generator.yml index 3f928e2c0443..f1ea334f6ace 100644 --- a/.github/workflows/build-test-ubuntu-generator.yml +++ b/.github/workflows/build-test-ubuntu-generator.yml @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-plot_sync.yml b/.github/workflows/build-test-ubuntu-plot_sync.yml index fddf0cd9c2ab..8be06ca85fdd 100644 --- a/.github/workflows/build-test-ubuntu-plot_sync.yml +++ b/.github/workflows/build-test-ubuntu-plot_sync.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-plotting.yml b/.github/workflows/build-test-ubuntu-plotting.yml index 882d0b0c113a..45ed4bdb38bc 100644 --- a/.github/workflows/build-test-ubuntu-plotting.yml +++ b/.github/workflows/build-test-ubuntu-plotting.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-pools.yml b/.github/workflows/build-test-ubuntu-pools.yml index d2671e3a18de..0ce08c056fcc 100644 --- a/.github/workflows/build-test-ubuntu-pools.yml +++ b/.github/workflows/build-test-ubuntu-pools.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-simulation.yml b/.github/workflows/build-test-ubuntu-simulation.yml index 06435f947ea9..078670f96bd0 100644 --- a/.github/workflows/build-test-ubuntu-simulation.yml +++ b/.github/workflows/build-test-ubuntu-simulation.yml @@ -108,7 +108,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-tools.yml b/.github/workflows/build-test-ubuntu-tools.yml index 97877660be2d..b9758af50e17 100644 --- a/.github/workflows/build-test-ubuntu-tools.yml +++ b/.github/workflows/build-test-ubuntu-tools.yml @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-util.yml b/.github/workflows/build-test-ubuntu-util.yml index 1d0567e628d2..5af0aaedde20 100644 --- a/.github/workflows/build-test-ubuntu-util.yml +++ b/.github/workflows/build-test-ubuntu-util.yml @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml b/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml index af979fc0fd9c..d06f9b90b7b7 100644 --- a/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml b/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml index 06bf1ad15d55..fd44748cea34 100644 --- a/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml b/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml index c791e3f32c1e..92ed61663d2f 100644 --- a/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml @@ -90,7 +90,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-rpc.yml b/.github/workflows/build-test-ubuntu-wallet-rpc.yml index bdeb794df439..eb70761dfa84 100644 --- a/.github/workflows/build-test-ubuntu-wallet-rpc.yml +++ b/.github/workflows/build-test-ubuntu-wallet-rpc.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml b/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml index 7a9585c713fd..00c16db514a3 100644 --- a/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml +++ b/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet-sync.yml b/.github/workflows/build-test-ubuntu-wallet-sync.yml index ceb86d8b4234..d9d098edad01 100644 --- a/.github/workflows/build-test-ubuntu-wallet-sync.yml +++ b/.github/workflows/build-test-ubuntu-wallet-sync.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-wallet.yml b/.github/workflows/build-test-ubuntu-wallet.yml index 0e88ee66d539..71b6ef1ba306 100644 --- a/.github/workflows/build-test-ubuntu-wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-test-ubuntu-weight_proof.yml b/.github/workflows/build-test-ubuntu-weight_proof.yml index 041aaef86bfc..60fe28bdfc74 100644 --- a/.github/workflows/build-test-ubuntu-weight_proof.yml +++ b/.github/workflows/build-test-ubuntu-weight_proof.yml @@ -104,7 +104,7 @@ jobs: venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index 5ae996af6d84..5467d23a48a4 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -155,13 +155,13 @@ jobs: .\build_scripts\build_windows.ps1 - name: Upload Windows exe's to artifacts - uses: actions/upload-artifact@v2.2.2 + uses: actions/upload-artifact@v3 with: name: chia-installers-windows-exe-intel path: ${{ github.workspace }}\chia-blockchain-gui\Chia-win32-x64\ - name: Upload Installer to artifacts - uses: actions/upload-artifact@v2.2.2 + uses: actions/upload-artifact@v3 with: name: Windows-Installers path: ${{ github.workspace }}\chia-blockchain-gui\release-builds\ diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index 9114d3498665..c059199495d6 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -65,7 +65,7 @@ jobs: python -m build --sdist --outdir dist . - name: Upload artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: dist path: ./dist diff --git a/tests/runner_templates/build-test-macos b/tests/runner_templates/build-test-macos index 536ab47fcfa5..b5c8cbc00972 100644 --- a/tests/runner_templates/build-test-macos +++ b/tests/runner_templates/build-test-macos @@ -91,7 +91,7 @@ INSTALL_TIMELORD venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* diff --git a/tests/runner_templates/build-test-ubuntu b/tests/runner_templates/build-test-ubuntu index bc587a89aef1..52fe6a587e58 100644 --- a/tests/runner_templates/build-test-ubuntu +++ b/tests/runner_templates/build-test-ubuntu @@ -90,7 +90,7 @@ INSTALL_TIMELORD venv/bin/coverage report --rcfile=.coveragerc --show-missing - name: Publish coverage - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage path: coverage_reports/* From 547219b91657680fbc2819fa3087f320597c2eb5 Mon Sep 17 00:00:00 2001 From: Gene Hoffman <30377676+hoffmang9@users.noreply.github.com> Date: Wed, 20 Apr 2022 11:44:52 -0700 Subject: [PATCH 11/55] Remove 3.9.11 for Windows Installer (#11226) And replace with just 3.9.x --- .github/workflows/build-windows-installer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index 5467d23a48a4..b74fefd1bd10 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -62,7 +62,7 @@ jobs: - uses: actions/setup-python@v2 name: Install Python 3.9 with: - python-version: "3.9.11" + python-version: "3.9" - name: Setup Node 16.x uses: actions/setup-node@v3 From b5a8ac378397b5a9b1619ae6af66b3b477368a1d Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Wed, 20 Apr 2022 23:39:51 -0500 Subject: [PATCH 12/55] Add cli only rpm (#11236) * Add cli only rpm * Ensure rvm (fpm) is loaded before running fpm * Use full path to fpm, since GHA seems to mess up the PATH in the container * Add back source and add use ruby-3 * Call rpm script with bash, to see if the rvm script works * Add --depends for libcrypt.so.1 --- .../workflows/build-linux-installer-rpm.yml | 2 +- build_scripts/build_linux_rpm.sh | 28 +++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index b12a702ee64d..36c5795aae70 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -119,7 +119,7 @@ jobs: git -C ./chia-blockchain-gui status . ./activate cd ./build_scripts - sh build_linux_rpm.sh amd64 + bash build_linux_rpm.sh amd64 - name: Upload Linux artifacts uses: actions/upload-artifact@v3 diff --git a/build_scripts/build_linux_rpm.sh b/build_scripts/build_linux_rpm.sh index 21db74f43c41..60829b193737 100644 --- a/build_scripts/build_linux_rpm.sh +++ b/build_scripts/build_linux_rpm.sh @@ -43,6 +43,31 @@ if [ "$LAST_EXIT_CODE" -ne 0 ]; then exit $LAST_EXIT_CODE fi +# Builds CLI only rpm +CLI_RPM_BASE="chia-blockchain-cli-$CHIA_INSTALLER_VERSION-1.$REDHAT_PLATFORM" +mkdir -p "dist/$CLI_RPM_BASE/opt/chia" +mkdir -p "dist/$CLI_RPM_BASE/usr/bin" +cp -r dist/daemon/* "dist/$CLI_RPM_BASE/opt/chia/" +ln -s ../../opt/chia/chia "dist/$CLI_RPM_BASE/usr/bin/chia" +# This is built into the base build image +# shellcheck disable=SC1091 +. /etc/profile.d/rvm.sh +rvm use ruby-3 +# /usr/lib64/libcrypt.so.1 is marked as a dependency specifically because newer versions of fedora bundle +# libcrypt.so.2 by default, and the libxcrypt-compat package needs to be installed for the other version +# Marking as a dependency allows yum/dnf to automatically install the libxcrypt-compat package as well +fpm -s dir -t rpm \ + -C "dist/$CLI_RPM_BASE" \ + -p "dist/$CLI_RPM_BASE.rpm" \ + --name chia-blockchain-cli \ + --license Apache-2.0 \ + --version "$CHIA_INSTALLER_VERSION" \ + --architecture "$REDHAT_PLATFORM" \ + --description "Chia is a modern cryptocurrency built from scratch, designed to be efficient, decentralized, and secure." \ + --depends /usr/lib64/libcrypt.so.1 \ + . +# CLI only rpm done + cp -r dist/daemon ../chia-blockchain-gui/packages/gui cd .. || exit cd chia-blockchain-gui || exit @@ -109,4 +134,7 @@ if [ "$REDHAT_PLATFORM" = "x86_64" ]; then fi fi +# Move the cli only rpm into final installers as well, so it gets uploaded as an artifact +mv "dist/$CLI_RPM_BASE.rpm" final_installer/ + ls final_installer/ From 2e422332897f6c0610302324346961de0c9c41b1 Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Thu, 21 Apr 2022 18:59:27 -0500 Subject: [PATCH 13/55] Testing postinst/prerm scripts with the UI .deb (#11258) --- build_scripts/assets/deb/postinst.sh | 7 +++++++ build_scripts/assets/deb/prerm.sh | 7 +++++++ build_scripts/build_linux_deb.sh | 6 ++++-- build_scripts/deb-options.json | 9 +++++++++ 4 files changed, 27 insertions(+), 2 deletions(-) create mode 100644 build_scripts/assets/deb/postinst.sh create mode 100644 build_scripts/assets/deb/prerm.sh create mode 100644 build_scripts/deb-options.json diff --git a/build_scripts/assets/deb/postinst.sh b/build_scripts/assets/deb/postinst.sh new file mode 100644 index 000000000000..01be3411582a --- /dev/null +++ b/build_scripts/assets/deb/postinst.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +# Post install script for the UI .deb to place symlinks in places to allow the CLI to work similarly in both versions + +set -e + +ln -s /usr/lib/chia-blockchain/resources/app.asar.unpacked/daemon/chia /usr/bin/chia || true +ln -s /usr/lib/chia-blockchain/resources/app.asar.unpacked/daemon /opt/chia || true diff --git a/build_scripts/assets/deb/prerm.sh b/build_scripts/assets/deb/prerm.sh new file mode 100644 index 000000000000..9e34e2602897 --- /dev/null +++ b/build_scripts/assets/deb/prerm.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash +# Pre remove script for the UI .deb to clean up the symlinks from the installer + +set -e + +unlink /usr/bin/chia || true +unlink /opt/chia || true diff --git a/build_scripts/build_linux_deb.sh b/build_scripts/build_linux_deb.sh index ba74d7b46313..27e24ba386ad 100644 --- a/build_scripts/build_linux_deb.sh +++ b/build_scripts/build_linux_deb.sh @@ -98,8 +98,10 @@ cd ../../../build_scripts || exit echo "Create chia-$CHIA_INSTALLER_VERSION.deb" rm -rf final_installer mkdir final_installer -electron-installer-debian --src dist/$DIR_NAME/ --dest final_installer/ \ ---arch "$PLATFORM" --options.version $CHIA_INSTALLER_VERSION --options.bin chia-blockchain --options.name chia-blockchain +electron-installer-debian --src "dist/$DIR_NAME/" \ + --arch "$PLATFORM" \ + --options.version "$CHIA_INSTALLER_VERSION" \ + --config deb-options.json LAST_EXIT_CODE=$? if [ "$LAST_EXIT_CODE" -ne 0 ]; then echo >&2 "electron-installer-debian failed!" diff --git a/build_scripts/deb-options.json b/build_scripts/deb-options.json new file mode 100644 index 000000000000..da5ef86f06e6 --- /dev/null +++ b/build_scripts/deb-options.json @@ -0,0 +1,9 @@ +{ + "dest": "final_installer/", + "bin": "chia-blockchain", + "name": "chia-blockchain", + "scripts": { + "postinst": "assets/deb/postinst.sh", + "prerm": "assets/deb/prerm.sh" + } +} From ea8cc18a86373556f552b17197b52cdb898f5cb6 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 21 Apr 2022 17:01:17 -0700 Subject: [PATCH 14/55] have pyinstaller check platlib for dll's, not ROOT (#11120) --- chia/pyinstaller.spec | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/chia/pyinstaller.spec b/chia/pyinstaller.spec index 6d81c943a30d..2e4bc0be2528 100644 --- a/chia/pyinstaller.spec +++ b/chia/pyinstaller.spec @@ -2,6 +2,7 @@ import importlib import pathlib import platform +import sysconfig from pkg_resources import get_distribution @@ -98,7 +99,7 @@ if THIS_IS_WINDOWS: if THIS_IS_WINDOWS: chia_mod = importlib.import_module("chia") - dll_paths = ROOT / "*.dll" + dll_paths = pathlib.Path(sysconfig.get_path("platlib")) / "*.dll" binaries = [ ( From 3bff0764c91adf6693e5426de873cead53eeb71c Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:43:31 +0200 Subject: [PATCH 15/55] harvester: Use a set instead of a list to speed up availability checks (#11204) --- chia/plotting/manager.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/chia/plotting/manager.py b/chia/plotting/manager.py index aa309795e95a..a5c14372ace1 100644 --- a/chia/plotting/manager.py +++ b/chia/plotting/manager.py @@ -232,9 +232,9 @@ def _refresh_task(self, sleep_interval_ms: int): plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(self.root_path) plot_directories: Set[Path] = set(plot_filenames.keys()) - plot_paths: List[Path] = [] + plot_paths: Set[Path] = set() for paths in plot_filenames.values(): - plot_paths += paths + plot_paths.update(paths) total_result: PlotRefreshResult = PlotRefreshResult() total_size = len(plot_paths) @@ -274,7 +274,7 @@ def _refresh_task(self, sleep_interval_ms: int): for filename in filenames_to_remove: del self.plot_filename_paths[filename] - for remaining, batch in list_to_batches(plot_paths, self.refresh_parameter.batch_size): + for remaining, batch in list_to_batches(list(plot_paths), self.refresh_parameter.batch_size): batch_result: PlotRefreshResult = self.refresh_batch(batch, plot_directories) if not self._refreshing_enabled: self.log.debug("refresh_plots: Aborted") From 398d3672d70f2c869650da6027c12e4324647069 Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:43:31 +0200 Subject: [PATCH 16/55] harvester: Use a set instead of a list to speed up availability checks (#11204) From 4d6dd2f29eb8a8241b934eead02b82e35a7846eb Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 21 Apr 2022 17:00:28 -0700 Subject: [PATCH 17/55] check dependency artifacts (#11243) * check dependency artifacts * remove commented out code * find root path relative to script * fixup stringy pathy mixup * Update check_dependency_artifacts.py --- .../workflows/check_wheel_availability.yaml | 38 ++++++++++ build_scripts/check_dependency_artifacts.py | 70 +++++++++++++++++++ 2 files changed, 108 insertions(+) create mode 100644 .github/workflows/check_wheel_availability.yaml create mode 100644 build_scripts/check_dependency_artifacts.py diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml new file mode 100644 index 000000000000..c051bd485046 --- /dev/null +++ b/.github/workflows/check_wheel_availability.yaml @@ -0,0 +1,38 @@ +name: Check Dependency Artifacts + +on: + push: + branches: + - main + tags: + - '**' + pull_request: + branches: + - '**' + +concurrency: + # SHA is added to the end if on `main` to let all main workflows run + group: ${{ github.ref }}-${{ github.workflow }}-${{ github.event_name }}-${{ github.ref == 'refs/heads/main' && github.sha || '' }} + cancel-in-progress: true + +jobs: + check_dependency_artifacts: + name: Check Dependency Artifacts + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ['3.7', '3.8', '3.9'] + os: [macOS-latest, ubuntu-latest] + + steps: + - name: Checkout Code + uses: actions/checkout@v3 + + - name: Setup Python environment + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Check Wheel Availability + run: python build_scripts/check_dependency_artifacts.py diff --git a/build_scripts/check_dependency_artifacts.py b/build_scripts/check_dependency_artifacts.py new file mode 100644 index 000000000000..2c34a4c54f45 --- /dev/null +++ b/build_scripts/check_dependency_artifacts.py @@ -0,0 +1,70 @@ +import os +import pathlib +import subprocess +import sys +import tempfile + +excepted_packages = { + "keyrings.cryptfile", # pure python + "dnslib", # pure python +} + + +def excepted(path: pathlib.Path) -> bool: + # TODO: This should be implemented with a real file name parser though i'm + # uncertain at the moment what package that would be. + + name, dash, rest = path.name.partition("-") + return name in excepted_packages + + +def main() -> int: + with tempfile.TemporaryDirectory() as directory_string: + directory_path = pathlib.Path(directory_string) + + extras = ["upnp"] + package_path_string = os.fspath(pathlib.Path(__file__).parent.parent) + + if len(extras) > 0: + package_and_extras = f"{package_path_string}[{','.join(extras)}]" + else: + package_and_extras = package_path_string + + subprocess.run( + [ + sys.executable, + "-m", + "pip", + "download", + "--dest", + os.fspath(directory_path), + "--extra-index", + "https://pypi.chia.net/simple/", + package_and_extras, + ], + check=True, + ) + + failed_artifacts = [] + + for artifact in directory_path.iterdir(): + if artifact.suffix == ".whl": + # everything being a wheel is the target + continue + + if excepted(artifact): + continue + + failed_artifacts.append(artifact) + + if len(failed_artifacts) > 0: + print("The following unacceptable artifacts were downloaded by pip:") + for artifact in failed_artifacts: + print(f" {artifact.name}") + + return 1 + + return 0 + + +sys.exit(main()) From 9f6f5f6090af5fc7d19215c1ddfda52c70020852 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Sat, 23 Apr 2022 20:11:09 -0700 Subject: [PATCH 18/55] Debian bookworm now uses 3.10, update the comment (#11278) --- .github/workflows/test-install-scripts.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 7d27cec94d1a..05e1d419e3d0 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -80,7 +80,7 @@ jobs: url: "docker://debian:bullseye" - name: debian:bookworm type: debian - # https://packages.debian.org/bookworm/python/python3 (3.9) + # https://packages.debian.org/bookworm/python/python3 (3.10) url: "docker://debian:bookworm" - name: fedora:33 type: fedora From b6370b2515b2bc79fbd6750384f6bd839b8f6fc7 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Sat, 23 Apr 2022 20:11:31 -0700 Subject: [PATCH 19/55] Update setproctitle to 1.2.3 for python 3.10 (#11274) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6ddd5137b751..9148cc15eaab 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ # "keyrings.cryptfile==1.3.8", # Secure storage for keys on Linux (Will be replaced) # See https://github.com/frispete/keyrings.cryptfile/issues/15 "PyYAML==5.4.1", # Used for config file format - "setproctitle==1.2.2", # Gives the chia processes readable names + "setproctitle==1.2.3", # Gives the chia processes readable names "sortedcontainers==2.4.0", # For maintaining sorted mempools # TODO: when moving to click 8 remove the pinning of black noted below "click==7.1.2", # For the CLI From 728bba0e1a7ce0075d260e37389c53ab08639f8d Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Sat, 23 Apr 2022 20:11:56 -0700 Subject: [PATCH 20/55] Remove multidict from setup.py for python 3.10 (#11272) * Testing multidict 6.0.2 * Stop specifying multidict directly --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index 9148cc15eaab..e68f121fb440 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,6 @@ from setuptools import setup dependencies = [ - "multidict==5.1.0", # Avoid 5.2.0 due to Avast "aiofiles==0.7.0", # Async IO for files "blspy==1.0.9", # Signature library "chiavdf==1.0.5", # timelord and vdf verification From 086de8d406bd37401bd26e3c6fdb0322c909db9e Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Sat, 23 Apr 2022 20:13:06 -0700 Subject: [PATCH 21/55] Update PyYAML to 6.0 for python 3.10 (#11273) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index e68f121fb440..587cfbdff412 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ "keyrings.cryptfile==1.3.4", # Secure storage for keys on Linux (Will be replaced) # "keyrings.cryptfile==1.3.8", # Secure storage for keys on Linux (Will be replaced) # See https://github.com/frispete/keyrings.cryptfile/issues/15 - "PyYAML==5.4.1", # Used for config file format + "PyYAML==6.0", # Used for config file format "setproctitle==1.2.3", # Gives the chia processes readable names "sortedcontainers==2.4.0", # For maintaining sorted mempools # TODO: when moving to click 8 remove the pinning of black noted below From eb92556546d6b3b4196c4ae4a9d1787763c22758 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Mon, 25 Apr 2022 09:20:08 -0700 Subject: [PATCH 22/55] Add Linux Mint to test matrix (#11295) * add Linux Mint to test matrix * : -> / for linux mint dockers * Prepare Linux Mint * stop testing linux mint 21 since it reports 20.3 * names, comment, and add 19.2 * mint * manually install requests * trailing whitespace --- .github/workflows/test-install-scripts.yml | 54 ++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 05e1d419e3d0..6ad2cfc3852e 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -113,6 +113,41 @@ jobs: type: ubuntu # https://packages.ubuntu.com/impish/python3 (21.10, 3.9) url: "docker://ubuntu:impish" + - name: linuxmintd/mint19.1-amd64 (Tessa) + type: mint + # 3.6 default with an option for 3.7 + url: "docker://linuxmintd/mint19.1-amd64" + - name: linuxmintd/mint19.2-amd64 (Tina) + type: mint + # 3.6 default with an option for 3.7 + url: "docker://linuxmintd/mint19.2-amd64" + - name: linuxmintd/mint19.3-amd64 (Tricia) + type: mint + # 3.6 default with an option for 3.7 + url: "docker://linuxmintd/mint19.3-amd64" + - name: linuxmintd/mint20-amd64 (Ulyana) + type: mint + # 3.8 + url: "docker://linuxmintd/mint20-amd64" + - name: linuxmintd/mint20.1-amd64 (Ulyssa) + type: mint + # 3.8 + url: "docker://linuxmintd/mint20.1-amd64" + - name: linuxmintd/mint20.2-amd64 (Uma) + type: mint + # 3.8 + url: "docker://linuxmintd/mint20.2-amd64" + - name: linuxmintd/mint20.3-amd64 (Una) + type: mint + # 3.8 + url: "docker://linuxmintd/mint20.3-amd64" +# The Linux Mint 21 docker image reports as 20.3 but has different Python. +# Uncomment after adapting to handle this or upstream fixing it. +# Also, Linux Mint 21 is not released as of this change. +# - name: linuxmintd/mint21-amd64 +# type: linuxmint +# # 3.10 default with an option for 3.9 +# url: "docker://linuxmintd/mint21-amd64" steps: - name: Prepare Amazon Linux @@ -188,6 +223,25 @@ jobs: apt-get --yes update apt-get install --yes git lsb-release sudo + - name: Prepare Linux Mint + if: ${{ matrix.distribution.type == 'mint' }} + env: + DEBIAN_FRONTEND: noninteractive + run: | + # for 19.* + apt-get --yes update + # for 19.3 to avoid + # Setting up software-properties-common (2.0.0.2) ... + # Traceback (most recent call last): + # File "/usr/lib/linuxmint/mintSources/mintSources.py", line 11, in + # import requests + # ModuleNotFoundError: No module named 'requests' + apt-get install --yes python3-requests + apt-get install --yes software-properties-common + add-apt-repository --yes ppa:git-core/ppa + apt-get --yes update + apt-get install --yes git lsb-release sudo + - name: Add safe git directory run: git config --global --add safe.directory $GITHUB_WORKSPACE From 74f3ecd3d8444acf177d7cd2bc63756e188f7d73 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Mon, 25 Apr 2022 11:08:26 -0700 Subject: [PATCH 23/55] simplify install.sh ubuntu version tracking (#11288) * simplify install.sh ubuntu version tracking * quotes for bash * undo some unintended changes --- install.sh | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/install.sh b/install.sh index 5ce4095fed60..8e2b97131e9e 100755 --- a/install.sh +++ b/install.sh @@ -56,7 +56,10 @@ fi # Get submodules git submodule update --init mozilla-ca -UBUNTU_PRE_2004=false +UBUNTU_PRE_2004=0 +UBUNTU_2000=0 +UBUNTU_2100=0 + if $UBUNTU; then LSB_RELEASE=$(lsb_release -rs) # In case Ubuntu minimal does not come with bc @@ -64,8 +67,13 @@ if $UBUNTU; then sudo apt install bc -y fi # Mint 20.04 responds with 20 here so 20 instead of 20.04 - UBUNTU_PRE_2004=$(echo "$LSB_RELEASE<20" | bc) - UBUNTU_2100=$(echo "$LSB_RELEASE>=21" | bc) + if [ "$(echo "$LSB_RELEASE<20" | bc)" = "1" ]; then + UBUNTU_PRE_2004=1 + elif [ "$(echo "$LSB_RELEASE<21" | bc)" = "1" ]; then + UBUNTU_2000=1 + else + UBUNTU_2100=1 + fi fi install_python3_and_sqlite3_from_source_with_yum() { @@ -114,16 +122,16 @@ install_python3_and_sqlite3_from_source_with_yum() { # Manage npm and other install requirements on an OS specific basis if [ "$(uname)" = "Linux" ]; then #LINUX=1 - if [ "$UBUNTU" = "true" ] && [ "$UBUNTU_PRE_2004" = "1" ]; then + if [ "$UBUNTU_PRE_2004" = "1" ]; then # Ubuntu echo "Installing on Ubuntu pre 20.04 LTS." sudo apt-get update sudo apt-get install -y python3.7-venv python3.7-distutils openssl - elif [ "$UBUNTU" = "true" ] && [ "$UBUNTU_PRE_2004" = "0" ] && [ "$UBUNTU_2100" = "0" ]; then + elif [ "$UBUNTU_2000" = "1" ]; then echo "Installing on Ubuntu 20.04 LTS." sudo apt-get update sudo apt-get install -y python3.8-venv python3-distutils openssl - elif [ "$UBUNTU" = "true" ] && [ "$UBUNTU_2100" = "1" ]; then + elif [ "$UBUNTU_2100" = "1" ]; then echo "Installing on Ubuntu 21.04 or newer." sudo apt-get update sudo apt-get install -y python3.9-venv python3-distutils openssl From 38be31b187a2042fd0ad8b0decdb6b6e9938c0b0 Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Mon, 25 Apr 2022 16:50:38 -0500 Subject: [PATCH 24/55] Fix targeting for arm64 to not land on native arm64 mac runners (#11309) --- .github/workflows/build-linux-arm64-installer.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index 56a3b5e7f0d8..5e17da8df7ea 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -18,7 +18,7 @@ concurrency: jobs: build: name: Linux ARM64 installer on Python 3.8 - runs-on: [ARM64] + runs-on: [Linux, ARM64] container: chianetwork/ubuntu-18.04-builder:latest timeout-minutes: 120 strategy: From 9dcc0221f7c6c1a3bb74933200ccdb6d00c225f1 Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Tue, 26 Apr 2022 10:01:21 -0500 Subject: [PATCH 25/55] Update names of installer workflows so they get grouped together in the workflow listing (#11310) --- .github/workflows/build-linux-arm64-installer.yml | 4 ++-- .github/workflows/build-linux-installer-deb.yml | 4 ++-- .github/workflows/build-linux-installer-rpm.yml | 4 ++-- .github/workflows/build-macos-installer.yml | 4 ++-- .github/workflows/build-macos-m1-installer.yml | 4 ++-- .github/workflows/build-windows-installer.yml | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index 5e17da8df7ea..418cebd2827c 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -1,4 +1,4 @@ -name: Linux ARM64 installer on Python 3.8 +name: Build Installer - Linux DEB ARM64 on: push: @@ -17,7 +17,7 @@ concurrency: jobs: build: - name: Linux ARM64 installer on Python 3.8 + name: Linux arm64 DEB Installer runs-on: [Linux, ARM64] container: chianetwork/ubuntu-18.04-builder:latest timeout-minutes: 120 diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index e1ff9de5670b..3147ffc05387 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -1,4 +1,4 @@ -name: Linux .deb installer on Python 3.8 +name: Build Installer - Linux DEB AMD64 on: workflow_dispatch: @@ -18,7 +18,7 @@ concurrency: jobs: build: - name: Linux .deb installer on Python 3.8 + name: Linux amd64 DEB Installer runs-on: ${{ matrix.os }} timeout-minutes: 40 strategy: diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index 36c5795aae70..be74457ca73e 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -1,4 +1,4 @@ -name: Linux .rpm installer on Python 3.9 +name: Build Installer - Linux RPM AMD64 on: workflow_dispatch: @@ -18,7 +18,7 @@ concurrency: jobs: build: - name: Linux .rpm installer on Python 3.9 + name: Linux amd64 RPM Installer runs-on: ubuntu-latest container: image: chianetwork/centos7-builder:latest diff --git a/.github/workflows/build-macos-installer.yml b/.github/workflows/build-macos-installer.yml index ef3b41c58a83..6748cc037967 100644 --- a/.github/workflows/build-macos-installer.yml +++ b/.github/workflows/build-macos-installer.yml @@ -1,4 +1,4 @@ -name: MacOS Intel installer on Python 3.9 +name: Build Installer - MacOS Intel on: push: @@ -17,7 +17,7 @@ concurrency: jobs: build: - name: MacOS Intel Installer on Python 3.9 + name: MacOS Intel Installer runs-on: ${{ matrix.os }} timeout-minutes: 40 strategy: diff --git a/.github/workflows/build-macos-m1-installer.yml b/.github/workflows/build-macos-m1-installer.yml index 17cc3b70cb76..d234c1514ddc 100644 --- a/.github/workflows/build-macos-m1-installer.yml +++ b/.github/workflows/build-macos-m1-installer.yml @@ -1,4 +1,4 @@ -name: MacOS M1 installer on Python 3.9 +name: Build Installer - MacOS arm64 on: push: @@ -17,7 +17,7 @@ concurrency: jobs: build: - name: MacOS M1 installer on Python 3.9 + name: MacOS arm64 installer runs-on: [m1] timeout-minutes: 40 strategy: diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index b74fefd1bd10..15fa47090edb 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -1,4 +1,4 @@ -name: Windows Installer on Windows 10 and Python 3.9 +name: Build Installer - Windows 10 on: push: @@ -17,7 +17,7 @@ concurrency: jobs: build: - name: Windows Installer on Windows 10 and Python 3.9 + name: Windows 10 Installer runs-on: [windows-2019] timeout-minutes: 50 From 0b0b82cb8cbcd5f264581b04ff81299f231c9a25 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Tue, 26 Apr 2022 10:48:04 -0700 Subject: [PATCH 26/55] also m1 and arm64 for wheel checks (#11277) * also m1 and arm64 for wheel checks * account for self-hosted and pre-setup-python of m1 and arm64 runners * && * python3 * report python version * use docker on arm64 to get multiple python versions * flush * report more system information * except pycryptodome for now * more variables, simpler logic * corrections * switch to [macos, arm64] * add python version to job name * separate os and arch matrix axes * reorder matrixing * drop workflow name from job name * oops * skip python setup in docker cases * drop the containers * Update check_dependency_artifacts.py --- .../workflows/check_wheel_availability.yaml | 35 ++++++++++++++++--- build_scripts/check_dependency_artifacts.py | 14 ++++++++ 2 files changed, 44 insertions(+), 5 deletions(-) diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml index c051bd485046..4563629cfd27 100644 --- a/.github/workflows/check_wheel_availability.yaml +++ b/.github/workflows/check_wheel_availability.yaml @@ -17,20 +17,45 @@ concurrency: jobs: check_dependency_artifacts: - name: Check Dependency Artifacts - runs-on: ${{ matrix.os }} + name: ${{ matrix.os.name }} ${{ matrix.arch.name }} ${{ matrix.python-version }} + runs-on: ${{ matrix.os.runs-on[matrix.arch.matrix] }} strategy: fail-fast: false matrix: + os: + - name: Linux + matrix: linux + runs-on: + intel: ubuntu-latest + arm: [linux, arm64] + - name: macOS + matrix: macos + runs-on: + intel: macos-latest + arm: [macos, arm64] + arch: + - name: ARM64 + matrix: arm + - name: Intel + matrix: intel python-version: ['3.7', '3.8', '3.9'] - os: [macOS-latest, ubuntu-latest] + exclude: + - os: + matrix: macos + python-version: '3.7' + - os: + matrix: macos + arch: + matrix: arm + python-version: '3.8' steps: + - uses: Chia-Network/actions/clean-workspace@main + - name: Checkout Code uses: actions/checkout@v3 - - name: Setup Python environment - uses: actions/setup-python@v2 + - uses: Chia-Network/actions/setup-python@main with: python-version: ${{ matrix.python-version }} diff --git a/build_scripts/check_dependency_artifacts.py b/build_scripts/check_dependency_artifacts.py index 2c34a4c54f45..07fe57b834bc 100644 --- a/build_scripts/check_dependency_artifacts.py +++ b/build_scripts/check_dependency_artifacts.py @@ -1,5 +1,6 @@ import os import pathlib +import platform import subprocess import sys import tempfile @@ -20,6 +21,8 @@ def excepted(path: pathlib.Path) -> bool: def main() -> int: with tempfile.TemporaryDirectory() as directory_string: + print(f"Working in: {directory_string}") + print() directory_path = pathlib.Path(directory_string) extras = ["upnp"] @@ -30,6 +33,17 @@ def main() -> int: else: package_and_extras = package_path_string + print("Downloading packages for Python version:") + lines = [ + *sys.version.splitlines(), + "", + f"machine: {platform.machine()}", + f"platform: {platform.platform()}", + ] + for line in lines: + print(f" {line}") + print(flush=True) + subprocess.run( [ sys.executable, From e0deccdfbfdfb0ab604e352c8b22f80bdceb25eb Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Tue, 26 Apr 2022 13:27:14 -0500 Subject: [PATCH 27/55] Upload CLI RPMS to s3, create checksums, etc (#11316) --- .../workflows/build-linux-installer-rpm.yml | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index be74457ca73e..38f6d548627d 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -145,15 +145,17 @@ jobs: echo "CHIA_DEV_BUILD=$CHIA_DEV_BUILD" >>$GITHUB_ENV ls $GITHUB_WORKSPACE/build_scripts/final_installer/ aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/dev/chia-blockchain-${CHIA_DEV_BUILD}-1.x86_64.rpm + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/dev/chia-blockchain-cli-${CHIA_DEV_BUILD}-1.x86_64.rpm - name: Create Checksums if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' env: CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} run: | - ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm > $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 - ls $GITHUB_WORKSPACE/build_scripts/final_installer/ + ls $GITHUB_WORKSPACE/build_scripts/final_installer/ + sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm > $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 + sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm > $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 + ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - name: Install py3createtorrent if: startsWith(github.ref, 'refs/tags/') || github.ref == 'refs/heads/main' @@ -165,8 +167,9 @@ jobs: env: CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} run: | - py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm -o $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent --webseed https://download.chia.net/install/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm - ls + py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm -o $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent --webseed https://download.chia.net/install/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm + py3createtorrent -f -t udp://tracker.opentrackr.org:1337/announce $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm -o $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent --webseed https://download.chia.net/install/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm + ls - name: Upload Beta Installer if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' @@ -175,15 +178,20 @@ jobs: run: | aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/beta/chia-blockchain-1.x86_64_latest_beta.rpm aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/beta/chia-blockchain-1.x86_64_latest_beta.rpm.sha256 + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/beta/chia-blockchain-cli-1.x86_64_latest_beta.rpm + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/beta/chia-blockchain-cli-1.x86_64_latest_beta.rpm.sha256 - name: Upload Release Files if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') env: CHIA_INSTALLER_VERSION: ${{ steps.version_number.outputs.CHIA_INSTALLER_VERSION }} run: | - aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/install/ - aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/install/ - aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent s3://download.chia.net/torrents/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/install/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/install/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent s3://download.chia.net/torrents/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm s3://download.chia.net/install/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.sha256 s3://download.chia.net/install/ + aws s3 cp $GITHUB_WORKSPACE/build_scripts/final_installer/chia-blockchain-cli-${CHIA_INSTALLER_VERSION}-1.x86_64.rpm.torrent s3://download.chia.net/torrents/ - name: Get tag name if: startsWith(github.ref, 'refs/tags/') From ceaca43b35ecbc496ace1e241fd879ea0613fcbd Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Tue, 26 Apr 2022 12:37:01 -0700 Subject: [PATCH 28/55] Support for Python 3.10 (#9930) * Support for Python 3.10 * Update install.sh to block Python 3.11 * websockets to 10.1 * Update workflows for Python 3.10 * single quote 3.10 * Enable fedora:35 (py3.10) installer script testing * rebuild workflows * fixup test-install-scripts.yml * add ignore for distutils deprecation in tests for now * asyncio.get_event_loop().run_until_complete() -> asyncio.run() * aiohttp==3.8.1 for python 3.10 support * use ssl.Purpose.CLIENT_AUTH for ssl_context_for_server() * rebuild workflows * use ssl_context_for_client() in BlockTools.get_daemon_ssl_context() * create a client context for the RpcServer to connect to the daemon * go back to asyncio.get_event_loop().run_until_complete() for now to recover 3.7 * ignore:There is no current event loop:DeprecationWarning * Ms.plot load perf2 (#10978) * 2.7 seconds -> 0.45 seconds * Merge * Work on create_plots refactor * Try to fix tests * Try to fix tests * Use new functions * Fix block_tools by adding dir * Extra argument * Try to fix cyclic import * isort * Drop warning * Some cleanups around `exclude_final_dir` and directory adding * Cleanup `min_mainnet_k_size` checks * Drop unrelated changes * Fixes after rebase * Fix cyclic import * Update tests/block_tools.py Co-authored-by: dustinface <35775977+xdustinface@users.noreply.github.com> * Update tests/block_tools.py Co-authored-by: dustinface <35775977+xdustinface@users.noreply.github.com> Co-authored-by: xdustinface Co-authored-by: dustinface <35775977+xdustinface@users.noreply.github.com> * remove 3.10 avoidance step from debian:bookworm installer testing * add 3.10 to wheel availability check workflow * add 3.10 to Install.ps1 supported Python versions for Windows * add jammy jellyfish to the install script test matrix * correct ubuntu:jammy job name * add 22.04 with Python 3.10 to install.sh Co-authored-by: Gene Hoffman Co-authored-by: Yostra Co-authored-by: Mariano Sorgente <3069354+mariano54@users.noreply.github.com> Co-authored-by: xdustinface Co-authored-by: dustinface <35775977+xdustinface@users.noreply.github.com> --- .../workflows/build-test-macos-blockchain.yml | 2 +- .github/workflows/build-test-macos-clvm.yml | 2 +- .../workflows/build-test-macos-core-cmds.yml | 2 +- .../build-test-macos-core-consensus.yml | 2 +- .../build-test-macos-core-custom_types.yml | 2 +- .../build-test-macos-core-daemon.yml | 2 +- ...ld-test-macos-core-full_node-full_sync.yml | 2 +- ...build-test-macos-core-full_node-stores.yml | 2 +- .../build-test-macos-core-full_node.yml | 2 +- .../build-test-macos-core-server.yml | 2 +- .../workflows/build-test-macos-core-ssl.yml | 2 +- .../workflows/build-test-macos-core-util.yml | 2 +- .github/workflows/build-test-macos-core.yml | 2 +- .../build-test-macos-farmer_harvester.yml | 2 +- .../workflows/build-test-macos-generator.yml | 2 +- .../workflows/build-test-macos-plot_sync.yml | 2 +- .../workflows/build-test-macos-plotting.yml | 2 +- .github/workflows/build-test-macos-pools.yml | 2 +- .../workflows/build-test-macos-simulation.yml | 2 +- .github/workflows/build-test-macos-tools.yml | 2 +- .github/workflows/build-test-macos-util.yml | 2 +- .../build-test-macos-wallet-cat_wallet.yml | 2 +- .../build-test-macos-wallet-did_wallet.yml | 2 +- .../build-test-macos-wallet-rl_wallet.yml | 2 +- .../workflows/build-test-macos-wallet-rpc.yml | 2 +- .../build-test-macos-wallet-simple_sync.yml | 2 +- .../build-test-macos-wallet-sync.yml | 2 +- .github/workflows/build-test-macos-wallet.yml | 2 +- .../build-test-macos-weight_proof.yml | 2 +- .../build-test-ubuntu-blockchain.yml | 2 +- .github/workflows/build-test-ubuntu-clvm.yml | 2 +- .../workflows/build-test-ubuntu-core-cmds.yml | 2 +- .../build-test-ubuntu-core-consensus.yml | 2 +- .../build-test-ubuntu-core-custom_types.yml | 2 +- .../build-test-ubuntu-core-daemon.yml | 2 +- ...d-test-ubuntu-core-full_node-full_sync.yml | 2 +- ...uild-test-ubuntu-core-full_node-stores.yml | 2 +- .../build-test-ubuntu-core-full_node.yml | 2 +- .../build-test-ubuntu-core-server.yml | 2 +- .../workflows/build-test-ubuntu-core-ssl.yml | 2 +- .../workflows/build-test-ubuntu-core-util.yml | 2 +- .github/workflows/build-test-ubuntu-core.yml | 2 +- .../build-test-ubuntu-farmer_harvester.yml | 2 +- .../workflows/build-test-ubuntu-generator.yml | 2 +- .../workflows/build-test-ubuntu-plot_sync.yml | 2 +- .../workflows/build-test-ubuntu-plotting.yml | 2 +- .github/workflows/build-test-ubuntu-pools.yml | 2 +- .../build-test-ubuntu-simulation.yml | 2 +- .github/workflows/build-test-ubuntu-tools.yml | 2 +- .github/workflows/build-test-ubuntu-util.yml | 2 +- .../build-test-ubuntu-wallet-cat_wallet.yml | 2 +- .../build-test-ubuntu-wallet-did_wallet.yml | 2 +- .../build-test-ubuntu-wallet-rl_wallet.yml | 2 +- .../build-test-ubuntu-wallet-rpc.yml | 2 +- .../build-test-ubuntu-wallet-simple_sync.yml | 2 +- .../build-test-ubuntu-wallet-sync.yml | 2 +- .../workflows/build-test-ubuntu-wallet.yml | 2 +- .../build-test-ubuntu-weight_proof.yml | 2 +- .../workflows/check_wheel_availability.yaml | 2 +- .github/workflows/test-install-scripts.yml | 22 +++++++------------ Install.ps1 | 2 +- install.sh | 17 +++++++++----- pytest.ini | 2 ++ tests/runner_templates/build-test-macos | 2 +- tests/runner_templates/build-test-ubuntu | 2 +- 65 files changed, 84 insertions(+), 81 deletions(-) diff --git a/.github/workflows/build-test-macos-blockchain.yml b/.github/workflows/build-test-macos-blockchain.yml index 7d718ba11b39..0fa679ad40f8 100644 --- a/.github/workflows/build-test-macos-blockchain.yml +++ b/.github/workflows/build-test-macos-blockchain.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-clvm.yml b/.github/workflows/build-test-macos-clvm.yml index 19955c44d476..534a45fb388d 100644 --- a/.github/workflows/build-test-macos-clvm.yml +++ b/.github/workflows/build-test-macos-clvm.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-cmds.yml b/.github/workflows/build-test-macos-core-cmds.yml index 5a3580742a12..8455e4ec7d40 100644 --- a/.github/workflows/build-test-macos-core-cmds.yml +++ b/.github/workflows/build-test-macos-core-cmds.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-consensus.yml b/.github/workflows/build-test-macos-core-consensus.yml index d9ffc2c928a7..90553fe7c657 100644 --- a/.github/workflows/build-test-macos-core-consensus.yml +++ b/.github/workflows/build-test-macos-core-consensus.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-custom_types.yml b/.github/workflows/build-test-macos-core-custom_types.yml index a3715506c4d9..420f07ce2e46 100644 --- a/.github/workflows/build-test-macos-core-custom_types.yml +++ b/.github/workflows/build-test-macos-core-custom_types.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-daemon.yml b/.github/workflows/build-test-macos-core-daemon.yml index 332c43d71065..baf4158ca31f 100644 --- a/.github/workflows/build-test-macos-core-daemon.yml +++ b/.github/workflows/build-test-macos-core-daemon.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-full_node-full_sync.yml b/.github/workflows/build-test-macos-core-full_node-full_sync.yml index 62c2c952db8d..535e43a1760d 100644 --- a/.github/workflows/build-test-macos-core-full_node-full_sync.yml +++ b/.github/workflows/build-test-macos-core-full_node-full_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-full_node-stores.yml b/.github/workflows/build-test-macos-core-full_node-stores.yml index a92d2c90de49..f3dd00536dc3 100644 --- a/.github/workflows/build-test-macos-core-full_node-stores.yml +++ b/.github/workflows/build-test-macos-core-full_node-stores.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-full_node.yml b/.github/workflows/build-test-macos-core-full_node.yml index ab1a41ae62d9..ce8cefecf2b1 100644 --- a/.github/workflows/build-test-macos-core-full_node.yml +++ b/.github/workflows/build-test-macos-core-full_node.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-server.yml b/.github/workflows/build-test-macos-core-server.yml index b4c37663c2b3..1dea72b1d955 100644 --- a/.github/workflows/build-test-macos-core-server.yml +++ b/.github/workflows/build-test-macos-core-server.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-ssl.yml b/.github/workflows/build-test-macos-core-ssl.yml index 61711d49ab29..650f65381ec1 100644 --- a/.github/workflows/build-test-macos-core-ssl.yml +++ b/.github/workflows/build-test-macos-core-ssl.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core-util.yml b/.github/workflows/build-test-macos-core-util.yml index 53744bf26dfc..7f7ae0c58c92 100644 --- a/.github/workflows/build-test-macos-core-util.yml +++ b/.github/workflows/build-test-macos-core-util.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-core.yml b/.github/workflows/build-test-macos-core.yml index 812f9bcda113..a5cc05473496 100644 --- a/.github/workflows/build-test-macos-core.yml +++ b/.github/workflows/build-test-macos-core.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-farmer_harvester.yml b/.github/workflows/build-test-macos-farmer_harvester.yml index 38b76a042753..bc39c34161a0 100644 --- a/.github/workflows/build-test-macos-farmer_harvester.yml +++ b/.github/workflows/build-test-macos-farmer_harvester.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-generator.yml b/.github/workflows/build-test-macos-generator.yml index 897d03d4cd3f..00270f31f54b 100644 --- a/.github/workflows/build-test-macos-generator.yml +++ b/.github/workflows/build-test-macos-generator.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-plot_sync.yml b/.github/workflows/build-test-macos-plot_sync.yml index 3b72cfaff3c9..e398470845ee 100644 --- a/.github/workflows/build-test-macos-plot_sync.yml +++ b/.github/workflows/build-test-macos-plot_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-plotting.yml b/.github/workflows/build-test-macos-plotting.yml index 4fa5344d0461..9092712bdacc 100644 --- a/.github/workflows/build-test-macos-plotting.yml +++ b/.github/workflows/build-test-macos-plotting.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-pools.yml b/.github/workflows/build-test-macos-pools.yml index 00e6e6fd8775..86410e7a9f74 100644 --- a/.github/workflows/build-test-macos-pools.yml +++ b/.github/workflows/build-test-macos-pools.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-simulation.yml b/.github/workflows/build-test-macos-simulation.yml index 91d0df91d07b..24f1f4580aee 100644 --- a/.github/workflows/build-test-macos-simulation.yml +++ b/.github/workflows/build-test-macos-simulation.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-tools.yml b/.github/workflows/build-test-macos-tools.yml index 8c4bf3268a31..677418fbfd58 100644 --- a/.github/workflows/build-test-macos-tools.yml +++ b/.github/workflows/build-test-macos-tools.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-util.yml b/.github/workflows/build-test-macos-util.yml index a1c0603ad058..dccb4318b52f 100644 --- a/.github/workflows/build-test-macos-util.yml +++ b/.github/workflows/build-test-macos-util.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-wallet-cat_wallet.yml b/.github/workflows/build-test-macos-wallet-cat_wallet.yml index ddf94b384859..110a20fba3e6 100644 --- a/.github/workflows/build-test-macos-wallet-cat_wallet.yml +++ b/.github/workflows/build-test-macos-wallet-cat_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-wallet-did_wallet.yml b/.github/workflows/build-test-macos-wallet-did_wallet.yml index 9a36c893eb68..425ab585b1c0 100644 --- a/.github/workflows/build-test-macos-wallet-did_wallet.yml +++ b/.github/workflows/build-test-macos-wallet-did_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-wallet-rl_wallet.yml b/.github/workflows/build-test-macos-wallet-rl_wallet.yml index 73aa78a65a91..a01f4dc5967f 100644 --- a/.github/workflows/build-test-macos-wallet-rl_wallet.yml +++ b/.github/workflows/build-test-macos-wallet-rl_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-wallet-rpc.yml b/.github/workflows/build-test-macos-wallet-rpc.yml index 1fac91c10ad3..603215dca66c 100644 --- a/.github/workflows/build-test-macos-wallet-rpc.yml +++ b/.github/workflows/build-test-macos-wallet-rpc.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-wallet-simple_sync.yml b/.github/workflows/build-test-macos-wallet-simple_sync.yml index 13b2340fdf2e..8462b3a071db 100644 --- a/.github/workflows/build-test-macos-wallet-simple_sync.yml +++ b/.github/workflows/build-test-macos-wallet-simple_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-wallet-sync.yml b/.github/workflows/build-test-macos-wallet-sync.yml index 210934a9c776..705cd2b22f69 100644 --- a/.github/workflows/build-test-macos-wallet-sync.yml +++ b/.github/workflows/build-test-macos-wallet-sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-wallet.yml b/.github/workflows/build-test-macos-wallet.yml index 45c63cf94fe2..5ff85a333629 100644 --- a/.github/workflows/build-test-macos-wallet.yml +++ b/.github/workflows/build-test-macos-wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-macos-weight_proof.yml b/.github/workflows/build-test-macos-weight_proof.yml index c2c8235ea65e..61f867ac9ed9 100644 --- a/.github/workflows/build-test-macos-weight_proof.yml +++ b/.github/workflows/build-test-macos-weight_proof.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-blockchain.yml b/.github/workflows/build-test-ubuntu-blockchain.yml index c669bb12a985..8e59898412df 100644 --- a/.github/workflows/build-test-ubuntu-blockchain.yml +++ b/.github/workflows/build-test-ubuntu-blockchain.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-clvm.yml b/.github/workflows/build-test-ubuntu-clvm.yml index 518babfae31a..03cf06b85cbf 100644 --- a/.github/workflows/build-test-ubuntu-clvm.yml +++ b/.github/workflows/build-test-ubuntu-clvm.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-cmds.yml b/.github/workflows/build-test-ubuntu-core-cmds.yml index f047165522d8..47497b8e609d 100644 --- a/.github/workflows/build-test-ubuntu-core-cmds.yml +++ b/.github/workflows/build-test-ubuntu-core-cmds.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-consensus.yml b/.github/workflows/build-test-ubuntu-core-consensus.yml index 069ca515d97a..808f94d7b86a 100644 --- a/.github/workflows/build-test-ubuntu-core-consensus.yml +++ b/.github/workflows/build-test-ubuntu-core-consensus.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-custom_types.yml b/.github/workflows/build-test-ubuntu-core-custom_types.yml index 134e25da6f2f..0657a19ae415 100644 --- a/.github/workflows/build-test-ubuntu-core-custom_types.yml +++ b/.github/workflows/build-test-ubuntu-core-custom_types.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-daemon.yml b/.github/workflows/build-test-ubuntu-core-daemon.yml index 59dc31a98c45..0554c79b7e2d 100644 --- a/.github/workflows/build-test-ubuntu-core-daemon.yml +++ b/.github/workflows/build-test-ubuntu-core-daemon.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml b/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml index b22406b0b3f3..a21784797f40 100644 --- a/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml +++ b/.github/workflows/build-test-ubuntu-core-full_node-full_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-full_node-stores.yml b/.github/workflows/build-test-ubuntu-core-full_node-stores.yml index 229dabe72435..cac0138e8fb2 100644 --- a/.github/workflows/build-test-ubuntu-core-full_node-stores.yml +++ b/.github/workflows/build-test-ubuntu-core-full_node-stores.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-full_node.yml b/.github/workflows/build-test-ubuntu-core-full_node.yml index 743abc39eae5..54cc927bb684 100644 --- a/.github/workflows/build-test-ubuntu-core-full_node.yml +++ b/.github/workflows/build-test-ubuntu-core-full_node.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-server.yml b/.github/workflows/build-test-ubuntu-core-server.yml index 1857c57280e5..d2f4578c1dd0 100644 --- a/.github/workflows/build-test-ubuntu-core-server.yml +++ b/.github/workflows/build-test-ubuntu-core-server.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-ssl.yml b/.github/workflows/build-test-ubuntu-core-ssl.yml index f5b9ecbcae27..c648a426b5cb 100644 --- a/.github/workflows/build-test-ubuntu-core-ssl.yml +++ b/.github/workflows/build-test-ubuntu-core-ssl.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core-util.yml b/.github/workflows/build-test-ubuntu-core-util.yml index 1c4de1baefcc..0859d6799c41 100644 --- a/.github/workflows/build-test-ubuntu-core-util.yml +++ b/.github/workflows/build-test-ubuntu-core-util.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-core.yml b/.github/workflows/build-test-ubuntu-core.yml index 1e7d0749dcd9..da6a7be77dd8 100644 --- a/.github/workflows/build-test-ubuntu-core.yml +++ b/.github/workflows/build-test-ubuntu-core.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-farmer_harvester.yml b/.github/workflows/build-test-ubuntu-farmer_harvester.yml index 9248f0d27124..4e63e8643cf6 100644 --- a/.github/workflows/build-test-ubuntu-farmer_harvester.yml +++ b/.github/workflows/build-test-ubuntu-farmer_harvester.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-generator.yml b/.github/workflows/build-test-ubuntu-generator.yml index f1ea334f6ace..09faa6f945e3 100644 --- a/.github/workflows/build-test-ubuntu-generator.yml +++ b/.github/workflows/build-test-ubuntu-generator.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-plot_sync.yml b/.github/workflows/build-test-ubuntu-plot_sync.yml index 8be06ca85fdd..80647fe59f17 100644 --- a/.github/workflows/build-test-ubuntu-plot_sync.yml +++ b/.github/workflows/build-test-ubuntu-plot_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-plotting.yml b/.github/workflows/build-test-ubuntu-plotting.yml index 45ed4bdb38bc..4fca372fb3e0 100644 --- a/.github/workflows/build-test-ubuntu-plotting.yml +++ b/.github/workflows/build-test-ubuntu-plotting.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-pools.yml b/.github/workflows/build-test-ubuntu-pools.yml index 0ce08c056fcc..a26e9585eadd 100644 --- a/.github/workflows/build-test-ubuntu-pools.yml +++ b/.github/workflows/build-test-ubuntu-pools.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-simulation.yml b/.github/workflows/build-test-ubuntu-simulation.yml index 078670f96bd0..6f6642a01070 100644 --- a/.github/workflows/build-test-ubuntu-simulation.yml +++ b/.github/workflows/build-test-ubuntu-simulation.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-tools.yml b/.github/workflows/build-test-ubuntu-tools.yml index b9758af50e17..b22ce9c392ab 100644 --- a/.github/workflows/build-test-ubuntu-tools.yml +++ b/.github/workflows/build-test-ubuntu-tools.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-util.yml b/.github/workflows/build-test-ubuntu-util.yml index 5af0aaedde20..4d178e5b3220 100644 --- a/.github/workflows/build-test-ubuntu-util.yml +++ b/.github/workflows/build-test-ubuntu-util.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml b/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml index d06f9b90b7b7..2df63f20c748 100644 --- a/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet-cat_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml b/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml index fd44748cea34..61601566bd53 100644 --- a/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet-did_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml b/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml index 92ed61663d2f..7216abf63518 100644 --- a/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet-rl_wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-wallet-rpc.yml b/.github/workflows/build-test-ubuntu-wallet-rpc.yml index eb70761dfa84..4f08f05421ea 100644 --- a/.github/workflows/build-test-ubuntu-wallet-rpc.yml +++ b/.github/workflows/build-test-ubuntu-wallet-rpc.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml b/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml index 00c16db514a3..adf75586d6b4 100644 --- a/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml +++ b/.github/workflows/build-test-ubuntu-wallet-simple_sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-wallet-sync.yml b/.github/workflows/build-test-ubuntu-wallet-sync.yml index d9d098edad01..1aecaa36c138 100644 --- a/.github/workflows/build-test-ubuntu-wallet-sync.yml +++ b/.github/workflows/build-test-ubuntu-wallet-sync.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-wallet.yml b/.github/workflows/build-test-ubuntu-wallet.yml index 71b6ef1ba306..dffddc492cc1 100644 --- a/.github/workflows/build-test-ubuntu-wallet.yml +++ b/.github/workflows/build-test-ubuntu-wallet.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/build-test-ubuntu-weight_proof.yml b/.github/workflows/build-test-ubuntu-weight_proof.yml index 60fe28bdfc74..a80a04475c6e 100644 --- a/.github/workflows/build-test-ubuntu-weight_proof.yml +++ b/.github/workflows/build-test-ubuntu-weight_proof.yml @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml index 4563629cfd27..7aea5be5ee25 100644 --- a/.github/workflows/check_wheel_availability.yaml +++ b/.github/workflows/check_wheel_availability.yaml @@ -38,7 +38,7 @@ jobs: matrix: arm - name: Intel matrix: intel - python-version: ['3.7', '3.8', '3.9'] + python-version: ['3.7', '3.8', '3.9', '3.10'] exclude: - os: matrix: macos diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 6ad2cfc3852e..021409c6a170 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -90,10 +90,10 @@ jobs: type: fedora # (34, 3.9) https://packages.fedoraproject.org/search?query=python3&releases=Fedora+34&start=0 url: "docker://fedora:34" -# - name: fedora:35 -# type: fedora -# # (35, 3.10) https://packages.fedoraproject.org/search?query=python3&releases=Fedora+35&start=0 -# url: "docker://fedora:35" + - name: fedora:35 + type: fedora + # (35, 3.10) https://packages.fedoraproject.org/search?query=python3&releases=Fedora+35&start=0 + url: "docker://fedora:35" - name: rockylinux:8 type: rocky url: "docker://rockylinux:8" @@ -113,6 +113,10 @@ jobs: type: ubuntu # https://packages.ubuntu.com/impish/python3 (21.10, 3.9) url: "docker://ubuntu:impish" + - name: ubuntu:jammy (22.04) + type: ubuntu + # https://packages.ubuntu.com/jammy/python3 (22.04, 3.10) + url: "docker://ubuntu:jammy" - name: linuxmintd/mint19.1-amd64 (Tessa) type: mint # 3.6 default with an option for 3.7 @@ -191,16 +195,6 @@ jobs: apt-get --yes update apt-get install --yes git lsb-release sudo - # @TODO this step can be removed once Python 3.10 is supported - # Python 3.10 is now the default in bookworm, so install 3.9 specifically so install does not fail - - name: Prepare debian:bookworm - if: ${{ matrix.distribution.name == 'debian:bookworm' }} - env: - DEBIAN_FRONTEND: noninteractive - run: | - apt-get update -y - apt-get install -y python3.9-venv - - name: Prepare Fedora if: ${{ matrix.distribution.type == 'fedora' }} run: | diff --git a/Install.ps1 b/Install.ps1 index 8350d2082c93..89acb6928c4f 100644 --- a/Install.ps1 +++ b/Install.ps1 @@ -43,7 +43,7 @@ if ($null -eq (Get-Command py -ErrorAction SilentlyContinue)) Exit 1 } -$supportedPythonVersions = "3.9", "3.8", "3.7" +$supportedPythonVersions = "3.10", "3.9", "3.8", "3.7" if (Test-Path env:INSTALL_PYTHON_VERSION) { $pythonVersion = $env:INSTALL_PYTHON_VERSION diff --git a/install.sh b/install.sh index 8e2b97131e9e..ced5ad92c17f 100755 --- a/install.sh +++ b/install.sh @@ -59,6 +59,7 @@ git submodule update --init mozilla-ca UBUNTU_PRE_2004=0 UBUNTU_2000=0 UBUNTU_2100=0 +UBUNTU_2200=0 if $UBUNTU; then LSB_RELEASE=$(lsb_release -rs) @@ -71,8 +72,10 @@ if $UBUNTU; then UBUNTU_PRE_2004=1 elif [ "$(echo "$LSB_RELEASE<21" | bc)" = "1" ]; then UBUNTU_2000=1 - else + elif [ "$(echo "$LSB_RELEASE<22" | bc)" = "1" ]; then UBUNTU_2100=1 + else + UBUNTU_2200=1 fi fi @@ -132,9 +135,13 @@ if [ "$(uname)" = "Linux" ]; then sudo apt-get update sudo apt-get install -y python3.8-venv python3-distutils openssl elif [ "$UBUNTU_2100" = "1" ]; then - echo "Installing on Ubuntu 21.04 or newer." + echo "Installing on Ubuntu 21.04." sudo apt-get update sudo apt-get install -y python3.9-venv python3-distutils openssl + elif [ "$UBUNTU_2200" = "1" ]; then + echo "Installing on Ubuntu 22.04 LTS or newer." + sudo apt-get update + sudo apt-get install -y python3.10-venv python3-distutils openssl elif [ "$DEBIAN" = "true" ]; then echo "Installing on Debian." sudo apt-get update @@ -194,14 +201,14 @@ fi find_python() { set +e unset BEST_VERSION - for V in 39 3.9 38 3.8 37 3.7 3; do + for V in 310 3.10 39 3.9 38 3.8 37 3.7 3; do if command -v python$V >/dev/null; then if [ "$BEST_VERSION" = "" ]; then BEST_VERSION=$V if [ "$BEST_VERSION" = "3" ]; then PY3_VERSION=$(python$BEST_VERSION --version | cut -d ' ' -f2) - if [[ "$PY3_VERSION" =~ 3.10.* ]]; then - echo "Chia requires Python version <= 3.9.10" + if [[ "$PY3_VERSION" =~ 3.11.* ]]; then + echo "Chia requires Python version < 3.11.0" echo "Current Python version = $PY3_VERSION" # If Arch, direct to Arch Wiki if type pacman >/dev/null 2>&1 && [ -f "/etc/arch-release" ]; then diff --git a/pytest.ini b/pytest.ini index 953e5cbf4d8a..628c1af61be8 100644 --- a/pytest.ini +++ b/pytest.ini @@ -21,3 +21,5 @@ filterwarnings = ignore:Exception ignored in:pytest.PytestUnraisableExceptionWarning ignore:cannot collect test class:pytest.PytestCollectionWarning ignore:The loop argument is deprecated since Python 3\.8, and scheduled for removal in Python 3\.10.:DeprecationWarning + ignore:The distutils package is deprecated:DeprecationWarning + ignore:There is no current event loop:DeprecationWarning diff --git a/tests/runner_templates/build-test-macos b/tests/runner_templates/build-test-macos index b5c8cbc00972..68abf4148f32 100644 --- a/tests/runner_templates/build-test-macos +++ b/tests/runner_templates/build-test-macos @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.8, 3.9] + python-version: ['3.9', '3.10'] os: [macOS-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet diff --git a/tests/runner_templates/build-test-ubuntu b/tests/runner_templates/build-test-ubuntu index 52fe6a587e58..3816acf7aa7e 100644 --- a/tests/runner_templates/build-test-ubuntu +++ b/tests/runner_templates/build-test-ubuntu @@ -27,7 +27,7 @@ jobs: fail-fast: false max-parallel: 4 matrix: - python-version: [3.7, 3.8, 3.9] + python-version: ['3.7', '3.8', '3.9', '3.10'] os: [ubuntu-latest] env: CHIA_ROOT: ${{ github.workspace }}/.chia/mainnet From 5b254884f6bc4792386bc55bc9a6d66005aa3bcd Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Tue, 26 Apr 2022 16:18:16 -0500 Subject: [PATCH 29/55] Build installers on push to release branches (#11321) --- .github/workflows/build-linux-arm64-installer.yml | 1 + .github/workflows/build-linux-installer-deb.yml | 1 + .github/workflows/build-linux-installer-rpm.yml | 1 + .github/workflows/build-macos-installer.yml | 1 + .github/workflows/build-macos-m1-installer.yml | 1 + .github/workflows/build-windows-installer.yml | 1 + 6 files changed, 6 insertions(+) diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index 418cebd2827c..cc27b1877632 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -4,6 +4,7 @@ on: push: branches: - main + - 'release/**' tags: - '**' pull_request: diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index 3147ffc05387..6aa183cddd3d 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -5,6 +5,7 @@ on: push: branches: - main + - 'release/**' tags: - '**' pull_request: diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index 38f6d548627d..2f0e37b45ee6 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -5,6 +5,7 @@ on: push: branches: - main + - 'release/**' tags: - '**' pull_request: diff --git a/.github/workflows/build-macos-installer.yml b/.github/workflows/build-macos-installer.yml index 6748cc037967..98855f7f7b2b 100644 --- a/.github/workflows/build-macos-installer.yml +++ b/.github/workflows/build-macos-installer.yml @@ -4,6 +4,7 @@ on: push: branches: - main + - 'release/**' tags: - '**' pull_request: diff --git a/.github/workflows/build-macos-m1-installer.yml b/.github/workflows/build-macos-m1-installer.yml index d234c1514ddc..faa29793daf1 100644 --- a/.github/workflows/build-macos-m1-installer.yml +++ b/.github/workflows/build-macos-m1-installer.yml @@ -4,6 +4,7 @@ on: push: branches: - main + - 'release/**' tags: - '**' pull_request: diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index 15fa47090edb..5ce9ad16a3f6 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -4,6 +4,7 @@ on: push: branches: - main + - 'release/**' tags: - '**' pull_request: From ec23a8b8ed65bc790a937b1f932c0b9f7b7f640a Mon Sep 17 00:00:00 2001 From: Chris Marslender Date: Wed, 27 Apr 2022 18:10:41 -0500 Subject: [PATCH 30/55] Mark workspace safe for arm installers (#11339) --- .github/workflows/build-linux-arm64-installer.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index cc27b1877632..f2b0f97c6e5b 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -31,6 +31,9 @@ jobs: steps: - uses: Chia-Network/actions/clean-workspace@main + - name: Add safe git directory + uses: Chia-Network/actions/git-mark-workspace-safe@main + - name: Checkout Code uses: actions/checkout@v3 with: From aeb5629a60b70d22f9c217fff66084f1e912aa1e Mon Sep 17 00:00:00 2001 From: Mariano Sorgente <3069354+mariano54@users.noreply.github.com> Date: Wed, 27 Apr 2022 17:07:41 -0400 Subject: [PATCH 31/55] Fix issue with missing coins (#11338) * raise error when request fails * Gather when cancelling sync * Increase timeout * Increase timeout even more --- chia/wallet/util/wallet_sync_utils.py | 17 +++++++++-------- chia/wallet/wallet_node.py | 3 +++ 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/chia/wallet/util/wallet_sync_utils.py b/chia/wallet/util/wallet_sync_utils.py index 01e1a66ef6ac..484edcf35b26 100644 --- a/chia/wallet/util/wallet_sync_utils.py +++ b/chia/wallet/util/wallet_sync_utils.py @@ -57,10 +57,10 @@ async def subscribe_to_phs( Tells full nodes that we are interested in puzzle hashes, and returns the response. """ msg = wallet_protocol.RegisterForPhUpdates(puzzle_hashes, uint32(max(min_height, uint32(0)))) - all_coins_state: Optional[RespondToPhUpdates] = await peer.register_interest_in_puzzle_hash(msg) - if all_coins_state is not None: - return all_coins_state.coin_states - return [] + all_coins_state: Optional[RespondToPhUpdates] = await peer.register_interest_in_puzzle_hash(msg, timeout=300) + if all_coins_state is None: + raise ValueError(f"None response from peer {peer.peer_host} for register_interest_in_puzzle_hash") + return all_coins_state.coin_states async def subscribe_to_coin_updates( @@ -72,10 +72,11 @@ async def subscribe_to_coin_updates( Tells full nodes that we are interested in coin ids, and returns the response. """ msg = wallet_protocol.RegisterForCoinUpdates(coin_names, uint32(max(0, min_height))) - all_coins_state: Optional[RespondToCoinUpdates] = await peer.register_interest_in_coin(msg) - if all_coins_state is not None: - return all_coins_state.coin_states - return [] + all_coins_state: Optional[RespondToCoinUpdates] = await peer.register_interest_in_coin(msg, timeout=300) + + if all_coins_state is None: + raise ValueError(f"None response from peer {peer.peer_host} for register_interest_in_coin") + return all_coins_state.coin_states def validate_additions( diff --git a/chia/wallet/wallet_node.py b/chia/wallet/wallet_node.py index b64f38cf1a7e..e24d93cac270 100644 --- a/chia/wallet/wallet_node.py +++ b/chia/wallet/wallet_node.py @@ -699,9 +699,11 @@ async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: i for states in chunks(items, chunk_size): if self.server is None: self.log.error("No server") + await asyncio.gather(*all_tasks) return False if peer.peer_node_id not in self.server.all_connections: self.log.error(f"Disconnected from peer {peer.peer_node_id} host {peer.peer_host}") + await asyncio.gather(*all_tasks) return False if trusted: async with self.wallet_state_manager.db_wrapper.lock: @@ -726,6 +728,7 @@ async def receive_and_validate(inner_states: List[CoinState], inner_idx_start: i await asyncio.sleep(0.1) if self._shut_down: self.log.info("Terminating receipt and validation due to shut down request") + await asyncio.gather(*all_tasks) return False concurrent_tasks_cs_heights.append(last_change_height_cs(states[0])) all_tasks.append(asyncio.create_task(receive_and_validate(states, idx, concurrent_tasks_cs_heights))) From 351fd1de56c775071bfd1431c4e8718784483e6a Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Fri, 29 Apr 2022 04:36:21 +0200 Subject: [PATCH 32/55] farmer|rpc: Introduce `get_harvesters_summary` RPC endpoint (#11245) --- chia/farmer/farmer.py | 4 +-- chia/plot_sync/receiver.py | 11 +++--- chia/rpc/farmer_rpc_api.py | 6 +++- chia/rpc/farmer_rpc_client.py | 3 ++ chia/util/misc.py | 7 ++++ tests/core/test_farmer_harvester_rpc.py | 32 +++++++++++++----- tests/plot_sync/test_receiver.py | 45 ++++++++++--------------- 7 files changed, 64 insertions(+), 44 deletions(-) diff --git a/chia/farmer/farmer.py b/chia/farmer/farmer.py index f7e50e9ed89e..6b451f5bbc37 100644 --- a/chia/farmer/farmer.py +++ b/chia/farmer/farmer.py @@ -629,13 +629,13 @@ async def generate_login_link(self, launcher_id: bytes32) -> Optional[str]: return None - async def get_harvesters(self) -> Dict: + async def get_harvesters(self, counts_only: bool = False) -> Dict: harvesters: List = [] for connection in self.server.get_connections(NodeType.HARVESTER): self.log.debug(f"get_harvesters host: {connection.peer_host}, node_id: {connection.peer_node_id}") receiver = self.plot_sync_receivers.get(connection.peer_node_id) if receiver is not None: - harvesters.append(receiver.to_dict()) + harvesters.append(receiver.to_dict(counts_only)) else: self.log.debug( f"get_harvesters invalid peer: {connection.peer_host}, node_id: {connection.peer_node_id}" diff --git a/chia/plot_sync/receiver.py b/chia/plot_sync/receiver.py index 4df791b53112..e56d5e610dc3 100644 --- a/chia/plot_sync/receiver.py +++ b/chia/plot_sync/receiver.py @@ -25,6 +25,7 @@ from chia.server.ws_connection import ProtocolMessageTypes, WSChiaConnection, make_msg from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import int16, uint64 +from chia.util.misc import get_list_or_len from chia.util.streamable import _T_Streamable log = logging.getLogger(__name__) @@ -287,17 +288,17 @@ async def _sync_done(self, data: PlotSyncDone) -> None: async def sync_done(self, data: PlotSyncDone) -> None: await self._process(self._sync_done, ProtocolMessageTypes.plot_sync_done, data) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self, counts_only: bool = False) -> Dict[str, Any]: result: Dict[str, Any] = { "connection": { "node_id": self._connection.peer_node_id, "host": self._connection.peer_host, "port": self._connection.peer_port, }, - "plots": list(self._plots.values()), - "failed_to_open_filenames": self._invalid, - "no_key_filenames": self._keys_missing, - "duplicates": self._duplicates, + "plots": get_list_or_len(list(self._plots.values()), counts_only), + "failed_to_open_filenames": get_list_or_len(self._invalid, counts_only), + "no_key_filenames": get_list_or_len(self._keys_missing, counts_only), + "duplicates": get_list_or_len(self._duplicates, counts_only), } if self._last_sync_time != 0: result["last_sync_time"] = self._last_sync_time diff --git a/chia/rpc/farmer_rpc_api.py b/chia/rpc/farmer_rpc_api.py index 396a3c47629c..ba6453693b66 100644 --- a/chia/rpc/farmer_rpc_api.py +++ b/chia/rpc/farmer_rpc_api.py @@ -20,6 +20,7 @@ def get_routes(self) -> Dict[str, Callable]: "/get_pool_state": self.get_pool_state, "/set_payout_instructions": self.set_payout_instructions, "/get_harvesters": self.get_harvesters, + "/get_harvesters_summary": self.get_harvesters_summary, "/get_pool_login_link": self.get_pool_login_link, } @@ -123,7 +124,10 @@ async def set_payout_instructions(self, request: Dict) -> Dict: return {} async def get_harvesters(self, _: Dict): - return await self.service.get_harvesters() + return await self.service.get_harvesters(False) + + async def get_harvesters_summary(self, _: Dict[str, object]) -> Dict[str, object]: + return await self.service.get_harvesters(True) async def get_pool_login_link(self, request: Dict) -> Dict: launcher_id: bytes32 = bytes32(hexstr_to_bytes(request["launcher_id"])) diff --git a/chia/rpc/farmer_rpc_client.py b/chia/rpc/farmer_rpc_client.py index 612f42e9d093..23d348e9a9ff 100644 --- a/chia/rpc/farmer_rpc_client.py +++ b/chia/rpc/farmer_rpc_client.py @@ -52,6 +52,9 @@ async def set_payout_instructions(self, launcher_id: bytes32, payout_instruction async def get_harvesters(self) -> Dict[str, Any]: return await self.fetch("get_harvesters", {}) + async def get_harvesters_summary(self) -> Dict[str, object]: + return await self.fetch("get_harvesters_summary", {}) + async def get_pool_login_link(self, launcher_id: bytes32) -> Optional[str]: try: return (await self.fetch("get_pool_login_link", {"launcher_id": launcher_id.hex()}))["login_link"] diff --git a/chia/util/misc.py b/chia/util/misc.py index 3607017c802a..fbd8414f3fbd 100644 --- a/chia/util/misc.py +++ b/chia/util/misc.py @@ -1,3 +1,6 @@ +from typing import Sequence, Union + + def format_bytes(bytes: int) -> str: if not isinstance(bytes, int) or bytes < 0: @@ -68,3 +71,7 @@ def prompt_yes_no(prompt: str = "(y/n) ") -> bool: return True elif ch == "n": return False + + +def get_list_or_len(list_in: Sequence[object], length: bool) -> Union[int, Sequence[object]]: + return len(list_in) if length else list_in diff --git a/tests/core/test_farmer_harvester_rpc.py b/tests/core/test_farmer_harvester_rpc.py index e312698fe007..711e661a1c21 100644 --- a/tests/core/test_farmer_harvester_rpc.py +++ b/tests/core/test_farmer_harvester_rpc.py @@ -17,6 +17,7 @@ from chia.util.config import load_config, lock_and_load_config, save_config from chia.util.hash import std_hash from chia.util.ints import uint8, uint16, uint32, uint64 +from chia.util.misc import get_list_or_len from chia.wallet.derive_keys import master_sk_to_wallet_sk from tests.setup_nodes import setup_harvester_farmer, test_constants from tests.time_out_assert import time_out_assert, time_out_assert_custom_interval @@ -102,8 +103,9 @@ async def test_get_routes(harvester_farmer_environment): await validate_get_routes(harvester_rpc_client, harvester_rpc_api) +@pytest.mark.parametrize("endpoint", ["get_harvesters", "get_harvesters_summary"]) @pytest.mark.asyncio -async def test_farmer_get_harvesters(harvester_farmer_environment): +async def test_farmer_get_harvesters_and_summary(harvester_farmer_environment, endpoint: str): ( farmer_service, farmer_rpc_api, @@ -114,26 +116,38 @@ async def test_farmer_get_harvesters(harvester_farmer_environment): ) = harvester_farmer_environment harvester = harvester_service._node - num_plots = 0 + harvester_plots = [] async def non_zero_plots() -> bool: res = await harvester_rpc_client.get_plots() - nonlocal num_plots - num_plots = len(res["plots"]) - return num_plots > 0 + nonlocal harvester_plots + harvester_plots = res["plots"] + return len(harvester_plots) > 0 await time_out_assert(10, non_zero_plots) async def test_get_harvesters(): + nonlocal harvester_plots harvester.plot_manager.trigger_refresh() await time_out_assert(5, harvester.plot_manager.needs_refresh, value=False) - farmer_res = await farmer_rpc_client.get_harvesters() + farmer_res = await getattr(farmer_rpc_client, endpoint)() + if len(list(farmer_res["harvesters"])) != 1: log.error(f"test_get_harvesters: invalid harvesters {list(farmer_res['harvesters'])}") return False - if len(list(farmer_res["harvesters"][0]["plots"])) != num_plots: - log.error(f"test_get_harvesters: invalid plots {list(farmer_res['harvesters'])}") - return False + + harvester_dict = farmer_res["harvesters"][0] + counts_only: bool = endpoint == "get_harvesters_summary" + + if not counts_only: + harvester_dict["plots"] = sorted(harvester_dict["plots"], key=lambda item: item["filename"]) + harvester_plots = sorted(harvester_plots, key=lambda item: item["filename"]) + + assert harvester_dict["plots"] == get_list_or_len(harvester_plots, counts_only) + assert harvester_dict["failed_to_open_filenames"] == get_list_or_len([], counts_only) + assert harvester_dict["no_key_filenames"] == get_list_or_len([], counts_only) + assert harvester_dict["duplicates"] == get_list_or_len([], counts_only) + return True await time_out_assert_custom_interval(30, 1, test_get_harvesters) diff --git a/tests/plot_sync/test_receiver.py b/tests/plot_sync/test_receiver.py index 5c63ae412640..6c334230562c 100644 --- a/tests/plot_sync/test_receiver.py +++ b/tests/plot_sync/test_receiver.py @@ -21,6 +21,7 @@ from chia.server.ws_connection import NodeType from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint8, uint32, uint64 +from chia.util.misc import get_list_or_len from chia.util.streamable import _T_Streamable from tests.plot_sync.util import get_dummy_connection @@ -217,13 +218,15 @@ async def test_reset() -> None: assert receiver.connection() == connection_before +@pytest.mark.parametrize("counts_only", [True, False]) @pytest.mark.asyncio -async def test_to_dict() -> None: +async def test_to_dict(counts_only: bool) -> None: receiver, sync_steps = plot_sync_setup() - plot_sync_dict_1 = receiver.to_dict() - assert "plots" in plot_sync_dict_1 and len(plot_sync_dict_1["plots"]) == 10 - assert "failed_to_open_filenames" in plot_sync_dict_1 and len(plot_sync_dict_1["failed_to_open_filenames"]) == 0 - assert "no_key_filenames" in plot_sync_dict_1 and len(plot_sync_dict_1["no_key_filenames"]) == 0 + plot_sync_dict_1 = receiver.to_dict(counts_only) + + assert get_list_or_len(plot_sync_dict_1["plots"], not counts_only) == 10 + assert get_list_or_len(plot_sync_dict_1["failed_to_open_filenames"], not counts_only) == 0 + assert get_list_or_len(plot_sync_dict_1["no_key_filenames"], not counts_only) == 0 assert "last_sync_time" not in plot_sync_dict_1 assert plot_sync_dict_1["connection"] == { "node_id": receiver.connection().peer_node_id, @@ -232,33 +235,21 @@ async def test_to_dict() -> None: } # We should get equal dicts - plot_sync_dict_2 = receiver.to_dict() - assert plot_sync_dict_1 == plot_sync_dict_2 - - dict_2_paths = [x.filename for x in plot_sync_dict_2["plots"]] - for plot_info in sync_steps[State.loaded].args[0]: - assert plot_info.filename not in dict_2_paths + assert plot_sync_dict_1 == receiver.to_dict(counts_only) + # But unequal dicts wit the opposite counts_only value + assert plot_sync_dict_1 != receiver.to_dict(not counts_only) # Walk through all states from idle to done and run them with the test data for state in State: await run_sync_step(receiver, sync_steps[state], state) - plot_sync_dict_3 = receiver.to_dict() - dict_3_paths = [x.filename for x in plot_sync_dict_3["plots"]] - for plot_info in sync_steps[State.loaded].args[0]: - assert plot_info.filename in dict_3_paths - - for path in sync_steps[State.removed].args[0]: - assert path not in plot_sync_dict_3["plots"] - - for path in sync_steps[State.invalid].args[0]: - assert path in plot_sync_dict_3["failed_to_open_filenames"] - - for path in sync_steps[State.keys_missing].args[0]: - assert path in plot_sync_dict_3["no_key_filenames"] - - for path in sync_steps[State.duplicates].args[0]: - assert path in plot_sync_dict_3["duplicates"] + plot_sync_dict_3 = receiver.to_dict(counts_only) + assert get_list_or_len(sync_steps[State.loaded].args[0], counts_only) == plot_sync_dict_3["plots"] + assert ( + get_list_or_len(sync_steps[State.invalid].args[0], counts_only) == plot_sync_dict_3["failed_to_open_filenames"] + ) + assert get_list_or_len(sync_steps[State.keys_missing].args[0], counts_only) == plot_sync_dict_3["no_key_filenames"] + assert get_list_or_len(sync_steps[State.duplicates].args[0], counts_only) == plot_sync_dict_3["duplicates"] assert plot_sync_dict_3["last_sync_time"] > 0 From f4d2e77206a7bc5688f004fcd24257275c9dd74c Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Fri, 29 Apr 2022 17:16:39 +0200 Subject: [PATCH 33/55] plot_sync: Introduce `receiver.Sync` (#11267) * plot_sync: Introduce `receiver.Sync` * Use `dataclasses.replace` Co-authored-by: Kyle Altendorf Co-authored-by: Kyle Altendorf --- chia/plot_sync/receiver.py | 146 ++++++++++++------------- tests/plot_sync/test_plot_sync.py | 6 +- tests/plot_sync/test_receiver.py | 88 ++++++++------- tests/plot_sync/test_sync_simulated.py | 4 +- 4 files changed, 127 insertions(+), 117 deletions(-) diff --git a/chia/plot_sync/receiver.py b/chia/plot_sync/receiver.py index e56d5e610dc3..6bb401ecdf89 100644 --- a/chia/plot_sync/receiver.py +++ b/chia/plot_sync/receiver.py @@ -1,5 +1,6 @@ import logging import time +from dataclasses import dataclass, field from typing import Any, Callable, Collection, Coroutine, Dict, List, Optional from chia.plot_sync.delta import Delta, PathListDelta, PlotListDelta @@ -24,21 +25,34 @@ ) from chia.server.ws_connection import ProtocolMessageTypes, WSChiaConnection, make_msg from chia.types.blockchain_format.sized_bytes import bytes32 -from chia.util.ints import int16, uint64 +from chia.util.ints import int16, uint32, uint64 from chia.util.misc import get_list_or_len from chia.util.streamable import _T_Streamable log = logging.getLogger(__name__) +@dataclass +class Sync: + state: State = State.idle + sync_id: uint64 = uint64(0) + next_message_id: uint64 = uint64(0) + plots_processed: uint32 = uint32(0) + plots_total: uint32 = uint32(0) + delta: Delta = field(default_factory=Delta) + time_done: float = 0 + + def bump_next_message_id(self) -> None: + self.next_message_id = uint64(self.next_message_id + 1) + + def bump_plots_processed(self) -> None: + self.plots_processed = uint32(self.plots_processed + 1) + + class Receiver: _connection: WSChiaConnection - _sync_state: State - _delta: Delta - _expected_sync_id: uint64 - _expected_message_id: uint64 - _last_sync_id: uint64 - _last_sync_time: float + _current_sync: Sync + _last_sync: Sync _plots: Dict[str, Plot] _invalid: List[str] _keys_missing: List[str] @@ -49,12 +63,8 @@ def __init__( self, connection: WSChiaConnection, update_callback: Callable[[bytes32, Delta], Coroutine[Any, Any, None]] ) -> None: self._connection = connection - self._sync_state = State.idle - self._delta = Delta() - self._expected_sync_id = uint64(0) - self._expected_message_id = uint64(0) - self._last_sync_id = uint64(0) - self._last_sync_time = 0 + self._current_sync = Sync() + self._last_sync = Sync() self._plots = {} self._invalid = [] self._keys_missing = [] @@ -62,37 +72,21 @@ def __init__( self._update_callback = update_callback # type: ignore[assignment, misc] def reset(self) -> None: - self._sync_state = State.idle - self._expected_sync_id = uint64(0) - self._expected_message_id = uint64(0) - self._last_sync_id = uint64(0) - self._last_sync_time = 0 + self._current_sync = Sync() + self._last_sync = Sync() self._plots.clear() self._invalid.clear() self._keys_missing.clear() self._duplicates.clear() - self._delta.clear() - - def bump_expected_message_id(self) -> None: - self._expected_message_id = uint64(self._expected_message_id + 1) def connection(self) -> WSChiaConnection: return self._connection - def state(self) -> State: - return self._sync_state + def current_sync(self) -> Sync: + return self._current_sync - def expected_sync_id(self) -> uint64: - return self._expected_sync_id - - def expected_message_id(self) -> uint64: - return self._expected_message_id - - def last_sync_id(self) -> uint64: - return self._last_sync_id - - def last_sync_time(self) -> float: - return self._last_sync_time + def last_sync(self) -> Sync: + return self._last_sync def plots(self) -> Dict[str, Plot]: return self._plots @@ -132,12 +126,12 @@ async def send_response(plot_sync_error: Optional[PlotSyncError] = None) -> None await send_response(PlotSyncError(int16(ErrorCodes.unknown), f"{e}", None)) def _validate_identifier(self, identifier: PlotSyncIdentifier, start: bool = False) -> None: - sync_id_match = identifier.sync_id == self._expected_sync_id - message_id_match = identifier.message_id == self._expected_message_id + sync_id_match = identifier.sync_id == self._current_sync.sync_id + message_id_match = identifier.message_id == self._current_sync.next_message_id identifier_match = sync_id_match and message_id_match if (start and not message_id_match) or (not start and not identifier_match): expected: PlotSyncIdentifier = PlotSyncIdentifier( - identifier.timestamp, self._expected_sync_id, self._expected_message_id + identifier.timestamp, self._current_sync.sync_id, self._current_sync.next_message_id ) raise InvalidIdentifierError( identifier, @@ -148,14 +142,15 @@ async def _sync_started(self, data: PlotSyncStart) -> None: if data.initial: self.reset() self._validate_identifier(data.identifier, True) - if data.last_sync_id != self.last_sync_id(): - raise InvalidLastSyncIdError(data.last_sync_id, self.last_sync_id()) + if data.last_sync_id != self._last_sync.sync_id: + raise InvalidLastSyncIdError(data.last_sync_id, self._last_sync.sync_id) if data.last_sync_id == data.identifier.sync_id: raise SyncIdsMatchError(State.idle, data.last_sync_id) - self._expected_sync_id = data.identifier.sync_id - self._delta.clear() - self._sync_state = State.loaded - self.bump_expected_message_id() + self._current_sync.sync_id = data.identifier.sync_id + self._current_sync.delta.clear() + self._current_sync.state = State.loaded + self._current_sync.plots_total = data.plot_file_count + self._current_sync.bump_next_message_id() async def sync_started(self, data: PlotSyncStart) -> None: await self._process(self._sync_started, ProtocolMessageTypes.plot_sync_start, data) @@ -164,14 +159,15 @@ async def _process_loaded(self, plot_infos: PlotSyncPlotList) -> None: self._validate_identifier(plot_infos.identifier) for plot_info in plot_infos.data: - if plot_info.filename in self._plots or plot_info.filename in self._delta.valid.additions: + if plot_info.filename in self._plots or plot_info.filename in self._current_sync.delta.valid.additions: raise PlotAlreadyAvailableError(State.loaded, plot_info.filename) - self._delta.valid.additions[plot_info.filename] = plot_info + self._current_sync.delta.valid.additions[plot_info.filename] = plot_info + self._current_sync.bump_plots_processed() if plot_infos.final: - self._sync_state = State.removed + self._current_sync.state = State.removed - self.bump_expected_message_id() + self._current_sync.bump_next_message_id() async def process_loaded(self, plot_infos: PlotSyncPlotList) -> None: await self._process(self._process_loaded, ProtocolMessageTypes.plot_sync_loaded, plot_infos) @@ -194,18 +190,20 @@ async def process_path_list( if not is_removal and path in delta: raise PlotAlreadyAvailableError(state, path) delta.append(path) + if not is_removal: + self._current_sync.bump_plots_processed() if paths.final: - self._sync_state = next_state + self._current_sync.state = next_state - self.bump_expected_message_id() + self._current_sync.bump_next_message_id() async def _process_removed(self, paths: PlotSyncPathList) -> None: await self.process_path_list( state=State.removed, next_state=State.invalid, target=self._plots, - delta=self._delta.valid.removals, + delta=self._current_sync.delta.valid.removals, paths=paths, is_removal=True, ) @@ -218,7 +216,7 @@ async def _process_invalid(self, paths: PlotSyncPathList) -> None: state=State.invalid, next_state=State.keys_missing, target=self._invalid, - delta=self._delta.invalid.additions, + delta=self._current_sync.delta.invalid.additions, paths=paths, ) @@ -230,7 +228,7 @@ async def _process_keys_missing(self, paths: PlotSyncPathList) -> None: state=State.keys_missing, next_state=State.duplicates, target=self._keys_missing, - delta=self._delta.keys_missing.additions, + delta=self._current_sync.delta.keys_missing.additions, paths=paths, ) @@ -242,7 +240,7 @@ async def _process_duplicates(self, paths: PlotSyncPathList) -> None: state=State.duplicates, next_state=State.done, target=self._duplicates, - delta=self._delta.duplicates.additions, + delta=self._current_sync.delta.duplicates.additions, paths=paths, ) @@ -251,39 +249,41 @@ async def process_duplicates(self, paths: PlotSyncPathList) -> None: async def _sync_done(self, data: PlotSyncDone) -> None: self._validate_identifier(data.identifier) - # Update ids - self._last_sync_id = self._expected_sync_id - self._expected_sync_id = uint64(0) - self._expected_message_id = uint64(0) + self._current_sync.time_done = time.time() # First create the update delta (i.e. transform invalid/keys_missing into additions/removals) which we will # send to the callback receiver below - delta_invalid: PathListDelta = PathListDelta.from_lists(self._invalid, self._delta.invalid.additions) + delta_invalid: PathListDelta = PathListDelta.from_lists( + self._invalid, self._current_sync.delta.invalid.additions + ) delta_keys_missing: PathListDelta = PathListDelta.from_lists( - self._keys_missing, self._delta.keys_missing.additions + self._keys_missing, self._current_sync.delta.keys_missing.additions + ) + delta_duplicates: PathListDelta = PathListDelta.from_lists( + self._duplicates, self._current_sync.delta.duplicates.additions ) - delta_duplicates: PathListDelta = PathListDelta.from_lists(self._duplicates, self._delta.duplicates.additions) update = Delta( - PlotListDelta(self._delta.valid.additions.copy(), self._delta.valid.removals.copy()), + PlotListDelta( + self._current_sync.delta.valid.additions.copy(), self._current_sync.delta.valid.removals.copy() + ), delta_invalid, delta_keys_missing, delta_duplicates, ) # Apply delta - self._plots.update(self._delta.valid.additions) - for removal in self._delta.valid.removals: + self._plots.update(self._current_sync.delta.valid.additions) + for removal in self._current_sync.delta.valid.removals: del self._plots[removal] - self._invalid = self._delta.invalid.additions.copy() - self._keys_missing = self._delta.keys_missing.additions.copy() - self._duplicates = self._delta.duplicates.additions.copy() - # Update state and bump last sync time - self._sync_state = State.idle - self._last_sync_time = time.time() + self._invalid = self._current_sync.delta.invalid.additions.copy() + self._keys_missing = self._current_sync.delta.keys_missing.additions.copy() + self._duplicates = self._current_sync.delta.duplicates.additions.copy() + # Save current sync as last sync and create a new current sync + self._last_sync = self._current_sync + self._current_sync = Sync() # Let the callback receiver know if this sync cycle caused any update try: await self._update_callback(self._connection.peer_node_id, update) # type: ignore[misc,call-arg] except Exception as e: log.error(f"_update_callback raised: {e}") - self._delta.clear() async def sync_done(self, data: PlotSyncDone) -> None: await self._process(self._sync_done, ProtocolMessageTypes.plot_sync_done, data) @@ -300,6 +300,6 @@ def to_dict(self, counts_only: bool = False) -> Dict[str, Any]: "no_key_filenames": get_list_or_len(self._keys_missing, counts_only), "duplicates": get_list_or_len(self._duplicates, counts_only), } - if self._last_sync_time != 0: - result["last_sync_time"] = self._last_sync_time + if self._last_sync.time_done != 0: + result["last_sync_time"] = self._last_sync.time_done return result diff --git a/tests/plot_sync/test_plot_sync.py b/tests/plot_sync/test_plot_sync.py index 96e1fcadd832..7c82c89b8891 100644 --- a/tests/plot_sync/test_plot_sync.py +++ b/tests/plot_sync/test_plot_sync.py @@ -30,8 +30,8 @@ def synced(sender: Sender, receiver: Receiver, previous_last_sync_id: int) -> bool: return ( sender._last_sync_id != previous_last_sync_id - and sender._last_sync_id == receiver._last_sync_id != 0 - and receiver.state() == State.idle + and sender._last_sync_id == receiver._last_sync.sync_id != 0 + and receiver.current_sync().state == State.idle and not sender._lock.locked() ) @@ -211,7 +211,7 @@ async def run_sync_test(self) -> None: # Make sure to reset the passed flag always before a new run self.expected[self.harvesters.index(harvester)].callback_passed = False receiver._update_callback = self.plot_sync_callback - assert harvester.plot_sync_sender._last_sync_id == receiver._last_sync_id + assert harvester.plot_sync_sender._last_sync_id == receiver._last_sync.sync_id last_sync_ids.append(harvester.plot_sync_sender._last_sync_id) plot_manager.start_refreshing() plot_manager.trigger_refresh() diff --git a/tests/plot_sync/test_receiver.py b/tests/plot_sync/test_receiver.py index 6c334230562c..cad02c095740 100644 --- a/tests/plot_sync/test_receiver.py +++ b/tests/plot_sync/test_receiver.py @@ -1,3 +1,4 @@ +import dataclasses import logging import time from secrets import token_bytes @@ -7,7 +8,7 @@ from blspy import G1Element from chia.plot_sync.delta import Delta -from chia.plot_sync.receiver import Receiver +from chia.plot_sync.receiver import Receiver, Sync from chia.plot_sync.util import ErrorCodes, State from chia.protocols.harvester_protocol import ( Plot, @@ -31,11 +32,8 @@ def assert_default_values(receiver: Receiver) -> None: - assert receiver.state() == State.idle - assert receiver.expected_sync_id() == 0 - assert receiver.expected_message_id() == 0 - assert receiver.last_sync_id() == 0 - assert receiver.last_sync_time() == 0 + assert receiver.current_sync() == Sync() + assert receiver.last_sync() == Sync() assert receiver.plots() == {} assert receiver.invalid() == [] assert receiver.keys_missing() == [] @@ -107,25 +105,25 @@ def post_function_validate(receiver: Receiver, data: Union[List[Plot], List[str] if expected_state == State.loaded: for plot_info in data: assert type(plot_info) == Plot - assert plot_info.filename in receiver._delta.valid.additions + assert plot_info.filename in receiver._current_sync.delta.valid.additions elif expected_state == State.removed: for path in data: - assert path in receiver._delta.valid.removals + assert path in receiver._current_sync.delta.valid.removals elif expected_state == State.invalid: for path in data: - assert path in receiver._delta.invalid.additions + assert path in receiver._current_sync.delta.invalid.additions elif expected_state == State.keys_missing: for path in data: - assert path in receiver._delta.keys_missing.additions + assert path in receiver._current_sync.delta.keys_missing.additions elif expected_state == State.duplicates: for path in data: - assert path in receiver._delta.duplicates.additions + assert path in receiver._current_sync.delta.duplicates.additions @pytest.mark.asyncio async def run_sync_step(receiver: Receiver, sync_step: SyncStepData, expected_state: State) -> None: - assert receiver.state() == expected_state - last_sync_time_before = receiver._last_sync_time + assert receiver.current_sync().state == expected_state + last_sync_time_before = receiver._last_sync.time_done # For the the list types invoke the trigger function in batches if sync_step.payload_type == PlotSyncPlotList or sync_step.payload_type == PlotSyncPathList: step_data, _ = sync_step.args @@ -133,21 +131,27 @@ async def run_sync_step(receiver: Receiver, sync_step: SyncStepData, expected_st # Invoke batches of: 1, 2, 3, 4 items and validate the data against plot store before and after indexes = [0, 1, 3, 6, 10] for i in range(0, len(indexes) - 1): + plots_processed_before = receiver.current_sync().plots_processed invoke_data = step_data[indexes[i] : indexes[i + 1]] pre_function_validate(receiver, invoke_data, expected_state) await sync_step.function( create_payload(sync_step.payload_type, False, invoke_data, i == (len(indexes) - 2)) ) post_function_validate(receiver, invoke_data, expected_state) + if expected_state == State.removed: + assert receiver.current_sync().plots_processed == plots_processed_before + else: + assert receiver.current_sync().plots_processed == plots_processed_before + len(invoke_data) else: # For Start/Done just invoke it.. await sync_step.function(create_payload(sync_step.payload_type, sync_step.state == State.idle, *sync_step.args)) # Make sure we moved to the next state - assert receiver.state() != expected_state + assert receiver.current_sync().state != expected_state if sync_step.payload_type == PlotSyncDone: - assert receiver._last_sync_time != last_sync_time_before + assert receiver._last_sync.time_done != last_sync_time_before + assert receiver.last_sync().plots_processed == receiver.last_sync().plots_total else: - assert receiver._last_sync_time == last_sync_time_before + assert receiver._last_sync.time_done == last_sync_time_before def plot_sync_setup() -> Tuple[Receiver, List[SyncStepData]]: @@ -195,25 +199,29 @@ async def test_reset() -> None: receiver, sync_steps = plot_sync_setup() connection_before = receiver.connection() # Assign some dummy values - receiver._sync_state = State.done - receiver._expected_sync_id = uint64(1) - receiver._expected_message_id = uint64(1) - receiver._last_sync_id = uint64(1) - receiver._last_sync_time = time.time() + receiver._current_sync.state = State.done + receiver._current_sync.sync_id = uint64(1) + receiver._current_sync.next_message_id = uint64(1) + receiver._current_sync.plots_processed = uint32(1) + receiver._current_sync.plots_total = uint32(1) + receiver._current_sync.delta.valid.additions = receiver.plots().copy() + receiver._current_sync.delta.valid.removals = ["1"] + receiver._current_sync.delta.invalid.additions = ["1"] + receiver._current_sync.delta.invalid.removals = ["1"] + receiver._current_sync.delta.keys_missing.additions = ["1"] + receiver._current_sync.delta.keys_missing.removals = ["1"] + receiver._current_sync.delta.duplicates.additions = ["1"] + receiver._current_sync.delta.duplicates.removals = ["1"] + receiver._current_sync.time_done = time.time() + receiver._last_sync = dataclasses.replace(receiver._current_sync) receiver._invalid = ["1"] receiver._keys_missing = ["1"] - receiver._delta.valid.additions = receiver.plots().copy() - receiver._delta.valid.removals = ["1"] - receiver._delta.invalid.additions = ["1"] - receiver._delta.invalid.removals = ["1"] - receiver._delta.keys_missing.additions = ["1"] - receiver._delta.keys_missing.removals = ["1"] - receiver._delta.duplicates.additions = ["1"] - receiver._delta.duplicates.removals = ["1"] + + receiver._last_sync.sync_id = uint64(1) # Call `reset` and make sure all expected values are set back to their defaults. receiver.reset() assert_default_values(receiver) - assert receiver._delta == Delta() + assert receiver._current_sync.delta == Delta() # Connection should remain assert receiver.connection() == connection_before @@ -293,18 +301,18 @@ async def test_sync_flow() -> None: assert path in receiver.duplicates() # We should be in idle state again - assert receiver.state() == State.idle + assert receiver.current_sync().state == State.idle @pytest.mark.asyncio async def test_invalid_ids() -> None: receiver, sync_steps = plot_sync_setup() for state in State: - assert receiver.state() == state + assert receiver.current_sync().state == state current_step = sync_steps[state] - if receiver.state() == State.idle: + if receiver.current_sync().state == State.idle: # Set last_sync_id for the tests below - receiver._last_sync_id = uint64(1) + receiver._last_sync.sync_id = uint64(1) # Test "sync_started last doesn't match" invalid_last_sync_id_param = PlotSyncStart( plot_sync_identifier(uint64(0), uint64(0)), False, uint64(2), uint32(0) @@ -318,17 +326,19 @@ async def test_invalid_ids() -> None: await current_step.function(invalid_sync_id_match_param) assert_error_response(receiver, ErrorCodes.sync_ids_match) # Reset the last_sync_id to the default - receiver._last_sync_id = uint64(0) + receiver._last_sync.sync_id = uint64(0) else: # Test invalid sync_id invalid_sync_id_param = current_step.payload_type( - plot_sync_identifier(uint64(10), uint64(receiver.expected_message_id())), *current_step.args + plot_sync_identifier(uint64(10), uint64(receiver.current_sync().next_message_id)), *current_step.args ) await current_step.function(invalid_sync_id_param) assert_error_response(receiver, ErrorCodes.invalid_identifier) # Test invalid message_id invalid_message_id_param = current_step.payload_type( - plot_sync_identifier(receiver.expected_sync_id(), uint64(receiver.expected_message_id() + 1)), + plot_sync_identifier( + receiver.current_sync().sync_id, uint64(receiver.current_sync().next_message_id + 1) + ), *current_step.args, ) await current_step.function(invalid_message_id_param) @@ -349,12 +359,12 @@ async def test_invalid_ids() -> None: async def test_plot_errors(state_to_fail: State, expected_error_code: ErrorCodes) -> None: receiver, sync_steps = plot_sync_setup() for state in State: - assert receiver.state() == state + assert receiver.current_sync().state == state current_step = sync_steps[state] if state == state_to_fail: plot_infos, _ = current_step.args await current_step.function(create_payload(current_step.payload_type, False, plot_infos, False)) - identifier = plot_sync_identifier(receiver.expected_sync_id(), receiver.expected_message_id()) + identifier = plot_sync_identifier(receiver.current_sync().sync_id, receiver.current_sync().next_message_id) invalid_payload = current_step.payload_type(identifier, plot_infos, True) await current_step.function(invalid_payload) if state == state_to_fail: diff --git a/tests/plot_sync/test_sync_simulated.py b/tests/plot_sync/test_sync_simulated.py index ae83dc7b64c4..a51296083b61 100644 --- a/tests/plot_sync/test_sync_simulated.py +++ b/tests/plot_sync/test_sync_simulated.py @@ -110,7 +110,7 @@ def run_internal() -> None: async def sync_done() -> bool: assert sync_id is not None - return self.plot_sync_receiver.last_sync_id() == self.plot_sync_sender._last_sync_id == sync_id + return self.plot_sync_receiver.last_sync().sync_id == self.plot_sync_sender._last_sync_id == sync_id await time_out_assert(60, sync_done) @@ -415,7 +415,7 @@ async def wait_for_reset() -> bool: async def sync_done() -> bool: assert started_sync_id != 0 - return test_data.plot_sync_receiver.last_sync_id() == sender._last_sync_id == started_sync_id + return test_data.plot_sync_receiver.last_sync().sync_id == sender._last_sync_id == started_sync_id # Send start and capture the sync_id sender.sync_start(len(plots), True) From 5fd88ce4ddff393165255a10c28e1be97796abf9 Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Sat, 30 Apr 2022 03:34:14 +0200 Subject: [PATCH 34/55] farmer|rpc: Some changes to `get_harvesters{_summary}` RPC endpoints (#11342) * farmer|rpc: Always add `last_sync_time` in `Receiver.to_dict` This changes the resonse of `get_harvesters` and `get_harvesters_summary` to always include that field with value `None`/`null` if the receiver is not yet synced. * plot_sync: Add sync state to dict in `Receiver.to_dict` * Add `total_plot_size` to `Receiver.to_dict` * Refactor sync object assertions * Access `_current_sync` directly * Generate `syncing` data outside --- chia/plot_sync/receiver.py | 30 ++++++++++++++++++---- tests/plot_sync/test_receiver.py | 44 +++++++++++++++++++++++++++++--- 2 files changed, 66 insertions(+), 8 deletions(-) diff --git a/chia/plot_sync/receiver.py b/chia/plot_sync/receiver.py index 6bb401ecdf89..37cae4e86d9c 100644 --- a/chia/plot_sync/receiver.py +++ b/chia/plot_sync/receiver.py @@ -40,7 +40,10 @@ class Sync: plots_processed: uint32 = uint32(0) plots_total: uint32 = uint32(0) delta: Delta = field(default_factory=Delta) - time_done: float = 0 + time_done: Optional[float] = None + + def in_progress(self) -> bool: + return self.sync_id != 0 def bump_next_message_id(self) -> None: self.next_message_id = uint64(self.next_message_id + 1) @@ -57,6 +60,7 @@ class Receiver: _invalid: List[str] _keys_missing: List[str] _duplicates: List[str] + _total_plot_size: int _update_callback: Callable[[bytes32, Delta], Coroutine[Any, Any, None]] def __init__( @@ -69,6 +73,7 @@ def __init__( self._invalid = [] self._keys_missing = [] self._duplicates = [] + self._total_plot_size = 0 self._update_callback = update_callback # type: ignore[assignment, misc] def reset(self) -> None: @@ -78,6 +83,7 @@ def reset(self) -> None: self._invalid.clear() self._keys_missing.clear() self._duplicates.clear() + self._total_plot_size = 0 def connection(self) -> WSChiaConnection: return self._connection @@ -88,6 +94,9 @@ def current_sync(self) -> Sync: def last_sync(self) -> Sync: return self._last_sync + def initial_sync(self) -> bool: + return self._last_sync.sync_id == 0 + def plots(self) -> Dict[str, Plot]: return self._plots @@ -100,6 +109,9 @@ def keys_missing(self) -> List[str]: def duplicates(self) -> List[str]: return self._duplicates + def total_plot_size(self) -> int: + return self._total_plot_size + async def _process( self, method: Callable[[_T_Streamable], Any], message_type: ProtocolMessageTypes, message: Any ) -> None: @@ -276,6 +288,7 @@ async def _sync_done(self, data: PlotSyncDone) -> None: self._invalid = self._current_sync.delta.invalid.additions.copy() self._keys_missing = self._current_sync.delta.keys_missing.additions.copy() self._duplicates = self._current_sync.delta.duplicates.additions.copy() + self._total_plot_size = sum(plot.file_size for plot in self._plots.values()) # Save current sync as last sync and create a new current sync self._last_sync = self._current_sync self._current_sync = Sync() @@ -289,7 +302,14 @@ async def sync_done(self, data: PlotSyncDone) -> None: await self._process(self._sync_done, ProtocolMessageTypes.plot_sync_done, data) def to_dict(self, counts_only: bool = False) -> Dict[str, Any]: - result: Dict[str, Any] = { + syncing = None + if self._current_sync.in_progress(): + syncing = { + "initial": self.initial_sync(), + "plot_files_processed": self._current_sync.plots_processed, + "plot_files_total": self._current_sync.plots_total, + } + return { "connection": { "node_id": self._connection.peer_node_id, "host": self._connection.peer_host, @@ -299,7 +319,7 @@ def to_dict(self, counts_only: bool = False) -> Dict[str, Any]: "failed_to_open_filenames": get_list_or_len(self._invalid, counts_only), "no_key_filenames": get_list_or_len(self._keys_missing, counts_only), "duplicates": get_list_or_len(self._duplicates, counts_only), + "total_plot_size": self._total_plot_size, + "syncing": syncing, + "last_sync_time": self._last_sync.time_done, } - if self._last_sync.time_done != 0: - result["last_sync_time"] = self._last_sync.time_done - return result diff --git a/tests/plot_sync/test_receiver.py b/tests/plot_sync/test_receiver.py index cad02c095740..57f7d2d3bda6 100644 --- a/tests/plot_sync/test_receiver.py +++ b/tests/plot_sync/test_receiver.py @@ -1,5 +1,6 @@ import dataclasses import logging +import random import time from secrets import token_bytes from typing import Any, Callable, List, Tuple, Type, Union @@ -168,7 +169,7 @@ def plot_sync_setup() -> Tuple[Receiver, List[SyncStepData]]: pool_contract_puzzle_hash=None, pool_public_key=None, plot_public_key=G1Element(), - file_size=uint64(0), + file_size=uint64(random.randint(0, 100)), time_modified=uint64(0), ) for x in path_list @@ -176,6 +177,7 @@ def plot_sync_setup() -> Tuple[Receiver, List[SyncStepData]]: # Manually add the plots we want to remove in tests receiver._plots = {plot_info.filename: plot_info for plot_info in plot_info_list[0:10]} + receiver._total_plot_size = sum(plot.file_size for plot in receiver._plots.values()) sync_steps: List[SyncStepData] = [ SyncStepData(State.idle, receiver.sync_started, PlotSyncStart, False, uint64(0), uint32(len(plot_info_list))), @@ -235,7 +237,9 @@ async def test_to_dict(counts_only: bool) -> None: assert get_list_or_len(plot_sync_dict_1["plots"], not counts_only) == 10 assert get_list_or_len(plot_sync_dict_1["failed_to_open_filenames"], not counts_only) == 0 assert get_list_or_len(plot_sync_dict_1["no_key_filenames"], not counts_only) == 0 - assert "last_sync_time" not in plot_sync_dict_1 + assert plot_sync_dict_1["total_plot_size"] == sum(plot.file_size for plot in receiver.plots().values()) + assert plot_sync_dict_1["syncing"] is None + assert plot_sync_dict_1["last_sync_time"] is None assert plot_sync_dict_1["connection"] == { "node_id": receiver.connection().peer_node_id, "host": receiver.connection().peer_host, @@ -247,10 +251,27 @@ async def test_to_dict(counts_only: bool) -> None: # But unequal dicts wit the opposite counts_only value assert plot_sync_dict_1 != receiver.to_dict(not counts_only) - # Walk through all states from idle to done and run them with the test data + expected_plot_files_processed: int = 0 + expected_plot_files_total: int = sync_steps[State.idle].args[2] + + # Walk through all states from idle to done and run them with the test data and validate the sync progress for state in State: await run_sync_step(receiver, sync_steps[state], state) + if state != State.idle and state != State.removed and state != State.done: + expected_plot_files_processed += len(sync_steps[state].args[0]) + + sync_data = receiver.to_dict()["syncing"] + if state == State.done: + expected_sync_data = None + else: + expected_sync_data = { + "initial": True, + "plot_files_processed": expected_plot_files_processed, + "plot_files_total": expected_plot_files_total, + } + assert sync_data == expected_sync_data + plot_sync_dict_3 = receiver.to_dict(counts_only) assert get_list_or_len(sync_steps[State.loaded].args[0], counts_only) == plot_sync_dict_3["plots"] assert ( @@ -259,7 +280,24 @@ async def test_to_dict(counts_only: bool) -> None: assert get_list_or_len(sync_steps[State.keys_missing].args[0], counts_only) == plot_sync_dict_3["no_key_filenames"] assert get_list_or_len(sync_steps[State.duplicates].args[0], counts_only) == plot_sync_dict_3["duplicates"] + assert plot_sync_dict_3["total_plot_size"] == sum(plot.file_size for plot in receiver.plots().values()) assert plot_sync_dict_3["last_sync_time"] > 0 + assert plot_sync_dict_3["syncing"] is None + + # Trigger a repeated plot sync + await receiver.sync_started( + PlotSyncStart( + PlotSyncIdentifier(uint64(time.time()), uint64(receiver.last_sync().sync_id + 1), uint64(0)), + False, + receiver.last_sync().sync_id, + uint32(1), + ) + ) + assert receiver.to_dict()["syncing"] == { + "initial": False, + "plot_files_processed": 0, + "plot_files_total": 1, + } @pytest.mark.asyncio From 630b401c011f4495d32ca732cf151e635aafa6dc Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Sat, 30 Apr 2022 03:35:47 +0200 Subject: [PATCH 35/55] util: Implement `Paginator` class as interface to access a list by pages (#11247) * util: Implement `Paginator` class as interface to access a list by pages * Be less restrictive about page sizes and refactor tests * Make the pages based of 0 instead of 1 and some more test refactoring * More tests * Adjust workflows after rebase * Introduce `Paginator.create` * `<=` instead of `- 1` --- chia/util/paginator.py | 46 ++++++++++++++++++++++++ tests/util/test_paginator.py | 70 ++++++++++++++++++++++++++++++++++++ 2 files changed, 116 insertions(+) create mode 100644 chia/util/paginator.py create mode 100644 tests/util/test_paginator.py diff --git a/chia/util/paginator.py b/chia/util/paginator.py new file mode 100644 index 000000000000..69bcce5377f4 --- /dev/null +++ b/chia/util/paginator.py @@ -0,0 +1,46 @@ +from __future__ import annotations + +import dataclasses +from math import ceil +from typing import Sequence + + +class InvalidPageSizeLimit(Exception): + def __init__(self, page_size_limit: int) -> None: + super().__init__(f"Page size limit must be one or more, not: {page_size_limit}") + + +class InvalidPageSizeError(Exception): + def __init__(self, page_size: int, page_size_limit: int) -> None: + super().__init__(f"Invalid page size {page_size}. Must be between: 1 and {page_size_limit}") + + +class PageOutOfBoundsError(Exception): + def __init__(self, page_size: int, max_page_size: int) -> None: + super().__init__(f"Page {page_size} out of bounds. Available pages: 0-{max_page_size}") + + +@dataclasses.dataclass +class Paginator: + _source: Sequence[object] + _page_size: int + + @classmethod + def create(cls, source: Sequence[object], page_size: int, page_size_limit: int = 100) -> Paginator: + if page_size_limit < 1: + raise InvalidPageSizeLimit(page_size_limit) + if page_size > page_size_limit: + raise InvalidPageSizeError(page_size, page_size_limit) + return cls(source, page_size) + + def page_size(self) -> int: + return self._page_size + + def page_count(self) -> int: + return max(1, ceil(len(self._source) / self._page_size)) + + def get_page(self, page: int) -> Sequence[object]: + if page < 0 or page >= self.page_count(): + raise PageOutOfBoundsError(page, self.page_count() - 1) + offset = page * self._page_size + return self._source[offset : offset + self._page_size] diff --git a/tests/util/test_paginator.py b/tests/util/test_paginator.py new file mode 100644 index 000000000000..9ffb4814e6bf --- /dev/null +++ b/tests/util/test_paginator.py @@ -0,0 +1,70 @@ +from math import ceil +from typing import List, Type + +import pytest + +from chia.util.paginator import InvalidPageSizeError, InvalidPageSizeLimit, PageOutOfBoundsError, Paginator + + +@pytest.mark.parametrize( + "source, page_size, page_size_limit", + [([], 1, 1), ([1], 1, 2), ([1, 2], 2, 2), ([], 10, 100), ([1, 2, 10], 1000, 1000)], +) +def test_constructor_valid_inputs(source: List[int], page_size: int, page_size_limit: int) -> None: + paginator: Paginator = Paginator.create(source, page_size, page_size_limit) + assert paginator.page_size() == page_size + assert paginator.page_count() == 1 + assert paginator.get_page(0) == source + + +@pytest.mark.parametrize( + "page_size, page_size_limit, exception", + [ + (5, -1, InvalidPageSizeLimit), + (5, 0, InvalidPageSizeLimit), + (2, 1, InvalidPageSizeError), + (100, 1, InvalidPageSizeError), + (1001, 1000, InvalidPageSizeError), + ], +) +def test_constructor_invalid_inputs(page_size: int, page_size_limit: int, exception: Type[Exception]) -> None: + with pytest.raises(exception): + Paginator.create([], page_size, page_size_limit) + + +def test_page_count() -> None: + for page_size in range(1, 10): + for i in range(0, 10): + assert Paginator.create(range(0, i), page_size).page_count() == max(1, ceil(i / page_size)) + + +@pytest.mark.parametrize( + "length, page_size, page, expected_data", + [ + (17, 5, 0, [0, 1, 2, 3, 4]), + (17, 5, 1, [5, 6, 7, 8, 9]), + (17, 5, 2, [10, 11, 12, 13, 14]), + (17, 5, 3, [15, 16]), + (3, 4, 0, [0, 1, 2]), + (3, 3, 0, [0, 1, 2]), + (3, 2, 0, [0, 1]), + (3, 2, 1, [2]), + (3, 1, 0, [0]), + (3, 1, 1, [1]), + (3, 1, 2, [2]), + (2, 2, 0, [0, 1]), + (2, 1, 0, [0]), + (2, 1, 1, [1]), + (1, 2, 0, [0]), + (0, 2, 0, []), + (0, 10, 0, []), + ], +) +def test_get_page_valid(length: int, page: int, page_size: int, expected_data: List[int]) -> None: + assert Paginator.create(list(range(0, length)), page_size).get_page(page) == expected_data + + +@pytest.mark.parametrize("page", [-1000, -10, -1, 5, 10, 1000]) +def test_get_page_invalid(page: int) -> None: + with pytest.raises(PageOutOfBoundsError): + Paginator.create(range(0, 17), 5).get_page(page) From 5482086bd9345f461fa21d731c8d04f0c1b4d19d Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Mon, 2 May 2022 22:02:00 +0200 Subject: [PATCH 36/55] farmer|rpc: Add `plot_count` to `get_pool_state` RPC endpoint (#11364) * farmer|rpc: Add `plot_count` to `get_pool_state` RPC endpoint * Test `plot_count` of `get_pool_state` farmer RPC endpoint --- chia/rpc/farmer_rpc_api.py | 9 ++++ tests/core/test_farmer_harvester_rpc.py | 62 +++++++++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/chia/rpc/farmer_rpc_api.py b/chia/rpc/farmer_rpc_api.py index ba6453693b66..0d32f3aa3196 100644 --- a/chia/rpc/farmer_rpc_api.py +++ b/chia/rpc/farmer_rpc_api.py @@ -110,11 +110,20 @@ async def set_reward_targets(self, request: Dict) -> Dict: self.service.set_reward_targets(farmer_target, pool_target) return {} + def get_pool_contract_puzzle_hash_plot_count(self, pool_contract_puzzle_hash: bytes32) -> int: + plot_count: int = 0 + for receiver in self.service.plot_sync_receivers.values(): + plot_count += sum( + plot.pool_contract_puzzle_hash == pool_contract_puzzle_hash for plot in receiver.plots().values() + ) + return plot_count + async def get_pool_state(self, _: Dict) -> Dict: pools_list = [] for p2_singleton_puzzle_hash, pool_dict in self.service.pool_state.items(): pool_state = pool_dict.copy() pool_state["p2_singleton_puzzle_hash"] = p2_singleton_puzzle_hash.hex() + pool_state["plot_count"] = self.get_pool_contract_puzzle_hash_plot_count(p2_singleton_puzzle_hash) pools_list.append(pool_state) return {"pool_state": pools_list} diff --git a/tests/core/test_farmer_harvester_rpc.py b/tests/core/test_farmer_harvester_rpc.py index 711e661a1c21..cc641a591743 100644 --- a/tests/core/test_farmer_harvester_rpc.py +++ b/tests/core/test_farmer_harvester_rpc.py @@ -289,3 +289,65 @@ async def test_farmer_get_pool_state(harvester_farmer_environment, self_hostname for pool_dict in client_pool_state["pool_state"]: for key in ["points_found_24h", "points_acknowledged_24h"]: assert pool_dict[key][0] == list(since_24h) + + +@pytest.mark.asyncio +async def test_farmer_get_pool_state_plot_count(harvester_farmer_environment, self_hostname: str) -> None: + ( + farmer_service, + farmer_rpc_api, + farmer_rpc_client, + harvester_service, + harvester_rpc_api, + harvester_rpc_client, + ) = harvester_farmer_environment + farmer_api = farmer_service._api + + async def wait_for_plot_sync() -> bool: + try: + return (await farmer_rpc_client.get_harvesters_summary())["harvesters"][0]["plots"] > 0 + except Exception: + return False + + await time_out_assert(15, wait_for_plot_sync, True) + + assert len((await farmer_rpc_client.get_pool_state())["pool_state"]) == 0 + + pool_contract_puzzle_hash: bytes32 = bytes32.from_hexstr( + "1b9d1eaa3c6a9b27cd90ad9070eb012794a74b277446417bc7b904145010c087" + ) + pool_list = [ + { + "launcher_id": "ae4ef3b9bfe68949691281a015a9c16630fc8f66d48c19ca548fb80768791afa", + "owner_public_key": "aa11e92274c0f6a2449fd0c7cfab4a38f943289dbe2214c808b36390c34eacfaa1d4c8f3c6ec582ac502ff32228679a0", # noqa + "payout_instructions": "c2b08e41d766da4116e388357ed957d04ad754623a915f3fd65188a8746cf3e8", + "pool_url": self_hostname, + "p2_singleton_puzzle_hash": pool_contract_puzzle_hash.hex(), + "target_puzzle_hash": "344587cf06a39db471d2cc027504e8688a0a67cce961253500c956c73603fd58", + } + ] + + root_path = farmer_api.farmer._root_path + with lock_and_load_config(root_path, "config.yaml") as config: + config["pool"]["pool_list"] = pool_list + save_config(root_path, "config.yaml", config) + await farmer_api.farmer.update_pool_state() + + pool_plot_count = (await farmer_rpc_client.get_pool_state())["pool_state"][0]["plot_count"] + assert pool_plot_count == 5 + + # TODO: Maybe improve this to not remove from Receiver directly but instead from the harvester and then wait for + # plot sync event. + async def remove_all_and_validate() -> bool: + nonlocal pool_plot_count + receiver = farmer_api.farmer.plot_sync_receivers[harvester_service._server.node_id] + for path, plot in receiver.plots().copy().items(): + if plot.pool_contract_puzzle_hash == pool_contract_puzzle_hash: + del receiver.plots()[path] + pool_plot_count -= 1 + plot_count = (await farmer_rpc_client.get_pool_state())["pool_state"][0]["plot_count"] + assert plot_count == pool_plot_count + return plot_count + + await time_out_assert(15, remove_all_and_validate, False) + assert (await farmer_rpc_client.get_pool_state())["pool_state"][0]["plot_count"] == 0 From 946d216835eba1f1da040da177afcd0c1dab63b1 Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Tue, 3 May 2022 04:32:37 +0200 Subject: [PATCH 37/55] test: Give `setup_farmer` and `setup_harvester` a separate chia root (#11408) * test: Give `setup_farmer` and `setup_harvester` a separate chia root * test: Wait for `last_sync_time` in `get_harvesters_{summary}` To make sure the first sync from the harvester to the farmer is done before we check plot counts. --- tests/core/ssl/test_ssl.py | 4 +-- tests/core/test_farmer_harvester_rpc.py | 8 +++-- .../farmer_harvester/test_farmer_harvester.py | 4 +-- tests/plot_sync/test_plot_sync.py | 6 +++- tests/setup_nodes.py | 31 ++++++++++--------- tests/setup_services.py | 24 +++++++++++--- 6 files changed, 51 insertions(+), 26 deletions(-) diff --git a/tests/core/ssl/test_ssl.py b/tests/core/ssl/test_ssl.py index 24a20efcfce6..8d82a664af33 100644 --- a/tests/core/ssl/test_ssl.py +++ b/tests/core/ssl/test_ssl.py @@ -46,8 +46,8 @@ async def establish_connection(server: ChiaServer, self_hostname: str, ssl_conte @pytest_asyncio.fixture(scope="function") -async def harvester_farmer(bt): - async for _ in setup_harvester_farmer(bt, test_constants, start_services=True): +async def harvester_farmer(bt, tmp_path): + async for _ in setup_harvester_farmer(bt, tmp_path, test_constants, start_services=True): yield _ diff --git a/tests/core/test_farmer_harvester_rpc.py b/tests/core/test_farmer_harvester_rpc.py index cc641a591743..e6f07296405d 100644 --- a/tests/core/test_farmer_harvester_rpc.py +++ b/tests/core/test_farmer_harvester_rpc.py @@ -28,8 +28,8 @@ @pytest_asyncio.fixture(scope="function") -async def harvester_farmer_simulation(bt): - async for _ in setup_harvester_farmer(bt, test_constants, start_services=True): +async def harvester_farmer_simulation(bt, tmp_path): + async for _ in setup_harvester_farmer(bt, tmp_path, test_constants, start_services=True): yield _ @@ -136,6 +136,10 @@ async def test_get_harvesters(): log.error(f"test_get_harvesters: invalid harvesters {list(farmer_res['harvesters'])}") return False + if farmer_res["harvesters"][0]["last_sync_time"] is None: + log.error(f"test_get_harvesters: sync not done {list(farmer_res['harvesters'])}") + return False + harvester_dict = farmer_res["harvesters"][0] counts_only: bool = endpoint == "get_harvesters_summary" diff --git a/tests/farmer_harvester/test_farmer_harvester.py b/tests/farmer_harvester/test_farmer_harvester.py index ca494cf54ae4..881ff28e5ab1 100644 --- a/tests/farmer_harvester/test_farmer_harvester.py +++ b/tests/farmer_harvester/test_farmer_harvester.py @@ -14,8 +14,8 @@ def farmer_is_started(farmer): @pytest_asyncio.fixture(scope="function") -async def harvester_farmer_environment_no_start(bt): - async for _ in setup_harvester_farmer(bt, test_constants, start_services=False): +async def harvester_farmer_environment_no_start(bt, tmp_path): + async for _ in setup_harvester_farmer(bt, tmp_path, test_constants, start_services=False): yield _ diff --git a/tests/plot_sync/test_plot_sync.py b/tests/plot_sync/test_plot_sync.py index 7c82c89b8891..3c7942e6dd6b 100644 --- a/tests/plot_sync/test_plot_sync.py +++ b/tests/plot_sync/test_plot_sync.py @@ -18,7 +18,7 @@ from chia.protocols.harvester_protocol import Plot from chia.server.start_service import Service from chia.types.blockchain_format.sized_bytes import bytes32 -from chia.util.config import create_default_chia_config +from chia.util.config import create_default_chia_config, lock_and_load_config, save_config from chia.util.ints import uint8, uint64 from tests.block_tools import BlockTools from tests.plot_sync.util import start_harvester_service @@ -293,6 +293,10 @@ def new_test_dir(name: str, plot_list: List[Path]) -> TestDirectory: farmer: Farmer = farmer_service._node harvesters: List[Harvester] = [await start_harvester_service(service) for service in harvester_services] for harvester in harvesters: + # Remove default plot directory for this tests + with lock_and_load_config(harvester.root_path, "config.yaml") as config: + config["harvester"]["plot_directories"] = [] + save_config(harvester.root_path, "config.yaml", config) harvester.plot_manager.set_public_keys( bt.plot_manager.farmer_public_keys.copy(), bt.plot_manager.pool_public_keys.copy() ) diff --git a/tests/setup_nodes.py b/tests/setup_nodes.py index 03a97c386d67..7645df10179e 100644 --- a/tests/setup_nodes.py +++ b/tests/setup_nodes.py @@ -5,11 +5,9 @@ from pathlib import Path from chia.consensus.constants import ConsensusConstants -from chia.cmds.init_funcs import init from chia.full_node.full_node_api import FullNodeAPI from chia.server.start_service import Service from chia.server.start_wallet import service_kwargs_for_wallet -from chia.util.config import load_config, save_config from chia.util.hash import std_hash from chia.util.ints import uint16, uint32 from chia.util.keychain import bytes_to_mnemonic @@ -290,14 +288,17 @@ async def setup_simulators_and_wallets( await _teardown_nodes(node_iters) -async def setup_harvester_farmer(bt: BlockTools, consensus_constants: ConsensusConstants, *, start_services: bool): +async def setup_harvester_farmer( + bt: BlockTools, tmp_path: Path, consensus_constants: ConsensusConstants, *, start_services: bool +): farmer_port = find_available_listen_port("farmer") farmer_rpc_port = find_available_listen_port("farmer rpc") harvester_port = find_available_listen_port("harvester") harvester_rpc_port = find_available_listen_port("harvester rpc") node_iters = [ setup_harvester( - bt.root_path, + bt, + tmp_path / "harvester", bt.config["self_hostname"], harvester_port, harvester_rpc_port, @@ -307,6 +308,7 @@ async def setup_harvester_farmer(bt: BlockTools, consensus_constants: ConsensusC ), setup_farmer( bt, + tmp_path / "farmer", bt.config["self_hostname"], farmer_port, farmer_rpc_port, @@ -334,23 +336,22 @@ async def setup_farmer_multi_harvester( node_iterators = [ setup_farmer( - block_tools, block_tools.config["self_hostname"], farmer_port, farmer_rpc_port, consensus_constants + block_tools, + temp_dir / "farmer", + block_tools.config["self_hostname"], + farmer_port, + farmer_rpc_port, + consensus_constants, ) ] for i in range(0, harvester_count): - root_path: Path = temp_dir / str(i) - init(None, root_path) - init(block_tools.root_path / "config" / "ssl" / "ca", root_path) - config = load_config(root_path, "config.yaml") - config["logging"]["log_stdout"] = True - config["selected_network"] = "testnet0" - config["harvester"]["selected_network"] = "testnet0" + root_path: Path = temp_dir / f"harvester_{i}" harvester_port = find_available_listen_port("harvester") harvester_rpc_port = find_available_listen_port("harvester rpc") - save_config(root_path, "config.yaml", config) node_iterators.append( setup_harvester( + block_tools, root_path, block_tools.config["self_hostname"], harvester_port, @@ -412,7 +413,8 @@ async def setup_full_system( node_iters = [ setup_introducer(shared_b_tools, introducer_port), setup_harvester( - shared_b_tools.root_path, + shared_b_tools, + shared_b_tools.root_path / "harvester", shared_b_tools.config["self_hostname"], harvester_port, harvester_rpc_port, @@ -421,6 +423,7 @@ async def setup_full_system( ), setup_farmer( shared_b_tools, + shared_b_tools.root_path / "harvester", shared_b_tools.config["self_hostname"], farmer_port, farmer_rpc_port, diff --git a/tests/setup_services.py b/tests/setup_services.py index 27b6b42410dd..98ec23393f19 100644 --- a/tests/setup_services.py +++ b/tests/setup_services.py @@ -6,6 +6,7 @@ from secrets import token_bytes from typing import AsyncGenerator, Optional +from chia.cmds.init_funcs import init from chia.consensus.constants import ConsensusConstants from chia.daemon.server import WebSocketServer, daemon_launch_lock_path, singleton from chia.server.start_farmer import service_kwargs_for_farmer @@ -185,6 +186,7 @@ async def setup_wallet_node( async def setup_harvester( + b_tools: BlockTools, root_path: Path, self_hostname: str, port, @@ -193,11 +195,17 @@ async def setup_harvester( consensus_constants: ConsensusConstants, start_service: bool = True, ): + init(None, root_path) + init(b_tools.root_path / "config" / "ssl" / "ca", root_path) config = load_config(root_path, "config.yaml") + config["logging"]["log_stdout"] = True + config["selected_network"] = "testnet0" + config["harvester"]["selected_network"] = "testnet0" config["harvester"]["port"] = port config["harvester"]["rpc_port"] = rpc_port config["harvester"]["farmer_peer"]["host"] = self_hostname config["harvester"]["farmer_peer"]["port"] = farmer_port + config["harvester"]["plot_directories"] = [str(b_tools.plot_dir.resolve())] save_config(root_path, "config.yaml", config) kwargs = service_kwargs_for_harvester(root_path, config["harvester"], consensus_constants) kwargs.update( @@ -219,6 +227,7 @@ async def setup_harvester( async def setup_farmer( b_tools: BlockTools, + root_path: Path, self_hostname: str, port, rpc_port, @@ -226,8 +235,15 @@ async def setup_farmer( full_node_port: Optional[uint16] = None, start_service: bool = True, ): - config = b_tools.config["farmer"] - config_pool = b_tools.config["pool"] + init(None, root_path) + init(b_tools.root_path / "config" / "ssl" / "ca", root_path) + root_config = load_config(root_path, "config.yaml") + root_config["logging"]["log_stdout"] = True + root_config["selected_network"] = "testnet0" + root_config["farmer"]["selected_network"] = "testnet0" + save_config(root_path, "config.yaml", root_config) + config = root_config["farmer"] + config_pool = root_config["pool"] config["xch_target_address"] = encode_puzzle_hash(b_tools.farmer_ph, "xch") config["pool_public_keys"] = [bytes(pk).hex() for pk in b_tools.pool_pubkeys] @@ -241,9 +257,7 @@ async def setup_farmer( else: del config["full_node_peer"] - kwargs = service_kwargs_for_farmer( - b_tools.root_path, config, config_pool, consensus_constants, b_tools.local_keychain - ) + kwargs = service_kwargs_for_farmer(root_path, config, config_pool, consensus_constants, b_tools.local_keychain) kwargs.update( parse_cli_args=False, connect_to_daemon=False, From 83636175574effac15add6561bcd60030f88bcac Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Tue, 3 May 2022 18:17:05 +0200 Subject: [PATCH 38/55] farmer|rpc|tests: Implement paginated harvester plot endpoints (#11365) * farmer|rpc|tests: Implement paginated harvester plot endpoints * Simplify filtering Co-authored-by: Kyle Altendorf * Let the API handle the exceptions * Simplify the other filtering too Co-authored-by: Kyle Altendorf * Simplify count assertions Co-authored-by: Kyle Altendorf * Refactor `is_filter_match` to `plot_matches_filter` And just convert to `Plot` in tests. * Move `chia.util.misc.KeyValue` to `chia.rpc.farmer_rpc_api.FilterItem` * Rename `peer_id` to `node_id` to be match `get_harvesters_{summary}` Co-authored-by: Kyle Altendorf --- chia/farmer/farmer.py | 6 + chia/rpc/farmer_rpc_api.py | 100 +++++++++++++ chia/rpc/farmer_rpc_client.py | 14 ++ chia/util/misc.py | 9 +- tests/core/test_farmer_harvester_rpc.py | 182 +++++++++++++++++++++++- 5 files changed, 309 insertions(+), 2 deletions(-) diff --git a/chia/farmer/farmer.py b/chia/farmer/farmer.py index 6b451f5bbc37..ce3d62ba35eb 100644 --- a/chia/farmer/farmer.py +++ b/chia/farmer/farmer.py @@ -643,6 +643,12 @@ async def get_harvesters(self, counts_only: bool = False) -> Dict: return {"harvesters": harvesters} + def get_receiver(self, node_id: bytes32) -> Receiver: + receiver: Optional[Receiver] = self.plot_sync_receivers.get(node_id) + if receiver is None: + raise KeyError(f"Receiver missing for {node_id}") + return receiver + async def _periodically_update_pool_state_task(self): time_slept: uint64 = uint64(0) config_path: Path = config_path_for_filename(self._root_path, "config.yaml") diff --git a/chia/rpc/farmer_rpc_api.py b/chia/rpc/farmer_rpc_api.py index 0d32f3aa3196..21cf3af5e145 100644 --- a/chia/rpc/farmer_rpc_api.py +++ b/chia/rpc/farmer_rpc_api.py @@ -1,11 +1,69 @@ +import dataclasses +import operator from typing import Any, Callable, Dict, List, Optional +from typing_extensions import Protocol + from chia.farmer.farmer import Farmer +from chia.plot_sync.receiver import Receiver +from chia.protocols.harvester_protocol import Plot from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.byte_types import hexstr_to_bytes +from chia.util.paginator import Paginator +from chia.util.streamable import dataclass_from_dict from chia.util.ws_message import WsRpcMessage, create_payload_dict +class PaginatedRequestData(Protocol): + node_id: bytes32 + page: int + page_size: int + + +@dataclasses.dataclass +class FilterItem: + key: str + value: Optional[str] + + +@dataclasses.dataclass +class PlotInfoRequestData: + node_id: bytes32 + page: int + page_size: int + filter: List[FilterItem] = dataclasses.field(default_factory=list) + sort_key: str = "filename" + reverse: bool = False + + +@dataclasses.dataclass +class PlotPathRequestData: + node_id: bytes32 + page: int + page_size: int + filter: List[str] = dataclasses.field(default_factory=list) + reverse: bool = False + + +def paginated_plot_request(source: List[Any], request: PaginatedRequestData) -> Dict[str, object]: + paginator: Paginator = Paginator(source, request.page_size) + return { + "node_id": request.node_id.hex(), + "page": request.page, + "page_count": paginator.page_count(), + "total_count": len(source), + "plots": paginator.get_page(request.page), + } + + +def plot_matches_filter(plot: Plot, filter_item: FilterItem) -> bool: + plot_attribute = getattr(plot, filter_item.key) + if filter_item.value is None: + return plot_attribute is None + else: + return filter_item.value in str(plot_attribute) + + class FarmerRpcApi: def __init__(self, farmer: Farmer): self.service = farmer @@ -21,6 +79,10 @@ def get_routes(self) -> Dict[str, Callable]: "/set_payout_instructions": self.set_payout_instructions, "/get_harvesters": self.get_harvesters, "/get_harvesters_summary": self.get_harvesters_summary, + "/get_harvester_plots_valid": self.get_harvester_plots_valid, + "/get_harvester_plots_invalid": self.get_harvester_plots_invalid, + "/get_harvester_plots_keys_missing": self.get_harvester_plots_keys_missing, + "/get_harvester_plots_duplicates": self.get_harvester_plots_duplicates, "/get_pool_login_link": self.get_pool_login_link, } @@ -138,6 +200,44 @@ async def get_harvesters(self, _: Dict): async def get_harvesters_summary(self, _: Dict[str, object]) -> Dict[str, object]: return await self.service.get_harvesters(True) + async def get_harvester_plots_valid(self, request_dict: Dict[str, object]) -> Dict[str, object]: + # TODO: Consider having a extra List[PlotInfo] in Receiver to avoid rebuilding the list for each call + request = dataclass_from_dict(PlotInfoRequestData, request_dict) + plot_list = list(self.service.get_receiver(request.node_id).plots().values()) + # Apply filter + plot_list = [ + plot for plot in plot_list if all(plot_matches_filter(plot, filter_item) for filter_item in request.filter) + ] + restricted_sort_keys: List[str] = ["pool_contract_puzzle_hash", "pool_public_key", "plot_public_key"] + # Apply sort_key and reverse if sort_key is not restricted + if request.sort_key in restricted_sort_keys: + raise KeyError(f"Can't sort by optional attributes: {restricted_sort_keys}") + # Sort by plot_id also by default since its unique + plot_list = sorted(plot_list, key=operator.attrgetter(request.sort_key, "plot_id"), reverse=request.reverse) + return paginated_plot_request(plot_list, request) + + def paginated_plot_path_request( + self, source_func: Callable[[Receiver], List[str]], request_dict: Dict[str, object] + ) -> Dict[str, object]: + request: PlotPathRequestData = dataclass_from_dict(PlotPathRequestData, request_dict) + receiver = self.service.get_receiver(request.node_id) + source = source_func(receiver) + request = dataclass_from_dict(PlotPathRequestData, request_dict) + # Apply filter + source = [plot for plot in source if all(filter_item in plot for filter_item in request.filter)] + # Apply reverse + source = sorted(source, reverse=request.reverse) + return paginated_plot_request(source, request) + + async def get_harvester_plots_invalid(self, request_dict: Dict[str, object]) -> Dict[str, object]: + return self.paginated_plot_path_request(Receiver.invalid, request_dict) + + async def get_harvester_plots_keys_missing(self, request_dict: Dict[str, object]) -> Dict[str, object]: + return self.paginated_plot_path_request(Receiver.keys_missing, request_dict) + + async def get_harvester_plots_duplicates(self, request_dict: Dict[str, object]) -> Dict[str, object]: + return self.paginated_plot_path_request(Receiver.duplicates, request_dict) + async def get_pool_login_link(self, request: Dict) -> Dict: launcher_id: bytes32 = bytes32(hexstr_to_bytes(request["launcher_id"])) login_link: Optional[str] = await self.service.generate_login_link(launcher_id) diff --git a/chia/rpc/farmer_rpc_client.py b/chia/rpc/farmer_rpc_client.py index 23d348e9a9ff..90f2344f0c72 100644 --- a/chia/rpc/farmer_rpc_client.py +++ b/chia/rpc/farmer_rpc_client.py @@ -1,7 +1,9 @@ from typing import Dict, List, Optional, Any +from chia.rpc.farmer_rpc_api import PlotInfoRequestData, PlotPathRequestData from chia.rpc.rpc_client import RpcClient from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.misc import dataclass_to_json_dict class FarmerRpcClient(RpcClient): @@ -55,6 +57,18 @@ async def get_harvesters(self) -> Dict[str, Any]: async def get_harvesters_summary(self) -> Dict[str, object]: return await self.fetch("get_harvesters_summary", {}) + async def get_harvester_plots_valid(self, request: PlotInfoRequestData) -> Dict[str, Any]: + return await self.fetch("get_harvester_plots_valid", dataclass_to_json_dict(request)) + + async def get_harvester_plots_invalid(self, request: PlotPathRequestData) -> Dict[str, Any]: + return await self.fetch("get_harvester_plots_invalid", dataclass_to_json_dict(request)) + + async def get_harvester_plots_keys_missing(self, request: PlotPathRequestData) -> Dict[str, Any]: + return await self.fetch("get_harvester_plots_keys_missing", dataclass_to_json_dict(request)) + + async def get_harvester_plots_duplicates(self, request: PlotPathRequestData) -> Dict[str, Any]: + return await self.fetch("get_harvester_plots_duplicates", dataclass_to_json_dict(request)) + async def get_pool_login_link(self, launcher_id: bytes32) -> Optional[str]: try: return (await self.fetch("get_pool_login_link", {"launcher_id": launcher_id.hex()}))["login_link"] diff --git a/chia/util/misc.py b/chia/util/misc.py index fbd8414f3fbd..a075c09eaa42 100644 --- a/chia/util/misc.py +++ b/chia/util/misc.py @@ -1,4 +1,7 @@ -from typing import Sequence, Union +import dataclasses +from typing import Any, Dict, Sequence, Union + +from chia.util.streamable import recurse_jsonify def format_bytes(bytes: int) -> str: @@ -75,3 +78,7 @@ def prompt_yes_no(prompt: str = "(y/n) ") -> bool: def get_list_or_len(list_in: Sequence[object], length: bool) -> Union[int, Sequence[object]]: return len(list_in) if length else list_in + + +def dataclass_to_json_dict(instance: Any) -> Dict[str, Any]: + return recurse_jsonify(dataclasses.asdict(instance)) diff --git a/tests/core/test_farmer_harvester_rpc.py b/tests/core/test_farmer_harvester_rpc.py index e6f07296405d..f44609da002f 100644 --- a/tests/core/test_farmer_harvester_rpc.py +++ b/tests/core/test_farmer_harvester_rpc.py @@ -1,12 +1,28 @@ import logging +import operator import time +from math import ceil +from os import mkdir +from pathlib import Path +from shutil import copy +from typing import Any, Awaitable, Callable, Dict, List, Union, cast import pytest import pytest_asyncio from chia.consensus.coinbase import create_puzzlehash_for_pk +from chia.plot_sync.receiver import Receiver +from chia.plotting.util import add_plot_directory from chia.protocols import farmer_protocol -from chia.rpc.farmer_rpc_api import FarmerRpcApi +from chia.protocols.harvester_protocol import Plot +from chia.rpc.farmer_rpc_api import ( + FarmerRpcApi, + FilterItem, + PaginatedRequestData, + PlotInfoRequestData, + PlotPathRequestData, + plot_matches_filter, +) from chia.rpc.farmer_rpc_client import FarmerRpcClient from chia.rpc.harvester_rpc_api import HarvesterRpcApi from chia.rpc.harvester_rpc_client import HarvesterRpcClient @@ -18,7 +34,10 @@ from chia.util.hash import std_hash from chia.util.ints import uint8, uint16, uint32, uint64 from chia.util.misc import get_list_or_len +from chia.util.streamable import dataclass_from_dict from chia.wallet.derive_keys import master_sk_to_wallet_sk +from tests.block_tools import get_plot_dir +from tests.plot_sync.test_delta import dummy_plot from tests.setup_nodes import setup_harvester_farmer, test_constants from tests.time_out_assert import time_out_assert, time_out_assert_custom_interval from tests.util.rpc import validate_get_routes @@ -27,6 +46,14 @@ log = logging.getLogger(__name__) +async def wait_for_plot_sync(receiver: Receiver, previous_last_sync_id: uint64) -> None: + def wait(): + current_last_sync_id = receiver.last_sync().sync_id + return current_last_sync_id != 0 and current_last_sync_id != previous_last_sync_id + + await time_out_assert(30, wait) + + @pytest_asyncio.fixture(scope="function") async def harvester_farmer_simulation(bt, tmp_path): async for _ in setup_harvester_farmer(bt, tmp_path, test_constants, start_services=True): @@ -355,3 +382,156 @@ async def remove_all_and_validate() -> bool: await time_out_assert(15, remove_all_and_validate, False) assert (await farmer_rpc_client.get_pool_state())["pool_state"][0]["plot_count"] == 0 + + +@pytest.mark.parametrize( + "filter_item, match", + [ + (FilterItem("filename", "1"), True), + (FilterItem("filename", "12"), True), + (FilterItem("filename", "123"), True), + (FilterItem("filename", "1234"), False), + (FilterItem("filename", "23"), True), + (FilterItem("filename", "3"), True), + (FilterItem("filename", "0123"), False), + (FilterItem("pool_contract_puzzle_hash", None), True), + (FilterItem("pool_contract_puzzle_hash", "1"), False), + ], +) +def test_plot_matches_filter(filter_item: FilterItem, match: bool): + assert plot_matches_filter(dummy_plot("123"), filter_item) == match + + +@pytest.mark.parametrize( + "endpoint, filtering, sort_key, reverse, expected_plot_count", + [ + (FarmerRpcClient.get_harvester_plots_valid, [], "filename", False, 20), + (FarmerRpcClient.get_harvester_plots_valid, [], "size", True, 20), + ( + FarmerRpcClient.get_harvester_plots_valid, + [FilterItem("pool_contract_puzzle_hash", None)], + "file_size", + True, + 15, + ), + ( + FarmerRpcClient.get_harvester_plots_valid, + [FilterItem("size", "20"), FilterItem("filename", "81")], + "plot_id", + False, + 4, + ), + (FarmerRpcClient.get_harvester_plots_invalid, [], None, True, 13), + (FarmerRpcClient.get_harvester_plots_invalid, ["invalid_0"], None, False, 6), + (FarmerRpcClient.get_harvester_plots_invalid, ["inval", "lid_1/"], None, False, 2), + (FarmerRpcClient.get_harvester_plots_keys_missing, [], None, True, 3), + (FarmerRpcClient.get_harvester_plots_keys_missing, ["keys_missing_1"], None, False, 2), + (FarmerRpcClient.get_harvester_plots_duplicates, [], None, True, 7), + (FarmerRpcClient.get_harvester_plots_duplicates, ["duplicates_0"], None, False, 3), + ], +) +@pytest.mark.asyncio +async def test_farmer_get_harvester_plots_endpoints( + harvester_farmer_environment: Any, + endpoint: Callable[[FarmerRpcClient, PaginatedRequestData], Awaitable[Dict[str, Any]]], + filtering: Union[List[FilterItem], List[str]], + sort_key: str, + reverse: bool, + expected_plot_count: int, +) -> None: + ( + farmer_service, + farmer_rpc_api, + farmer_rpc_client, + harvester_service, + harvester_rpc_api, + harvester_rpc_client, + ) = harvester_farmer_environment + + harvester = harvester_service._node + harvester_id = harvester_service._server.node_id + receiver = farmer_service._api.farmer.plot_sync_receivers[harvester_id] + + if receiver.initial_sync(): + await wait_for_plot_sync(receiver, receiver.last_sync().sync_id) + + harvester_plots = (await harvester_rpc_client.get_plots())["plots"] + plots = [] + + request: PaginatedRequestData + if endpoint == FarmerRpcClient.get_harvester_plots_valid: + request = PlotInfoRequestData(harvester_id, 0, -1, cast(List[FilterItem], filtering), sort_key, reverse) + else: + request = PlotPathRequestData(harvester_id, 0, -1, cast(List[str], filtering), reverse) + + def add_plot_directories(prefix: str, count: int) -> List[Path]: + new_paths = [] + for i in range(count): + new_paths.append(harvester.root_path / f"{prefix}_{i}") + mkdir(new_paths[-1]) + add_plot_directory(harvester.root_path, str(new_paths[-1])) + return new_paths + + # Generate the plot data and + if endpoint == FarmerRpcClient.get_harvester_plots_valid: + plots = harvester_plots + elif endpoint == FarmerRpcClient.get_harvester_plots_invalid: + invalid_paths = add_plot_directories("invalid", 3) + for dir_index, r in [(0, range(0, 6)), (1, range(6, 8)), (2, range(8, 13))]: + plots += [str(invalid_paths[dir_index] / f"{i}.plot") for i in r] + for plot in plots: + with open(plot, "w"): + pass + elif endpoint == FarmerRpcClient.get_harvester_plots_keys_missing: + keys_missing_plots = [path for path in (Path(get_plot_dir()) / "not_in_keychain").iterdir() if path.is_file()] + keys_missing_paths = add_plot_directories("keys_missing", 2) + for dir_index, copy_plots in [(0, keys_missing_plots[:1]), (1, keys_missing_plots[1:3])]: + for plot in copy_plots: + copy(plot, keys_missing_paths[dir_index]) + plots.append(str(keys_missing_paths[dir_index] / plot.name)) + + elif endpoint == FarmerRpcClient.get_harvester_plots_duplicates: + duplicate_paths = add_plot_directories("duplicates", 2) + for dir_index, r in [(0, range(0, 3)), (1, range(3, 7))]: + for i in r: + plot_path = Path(harvester_plots[i]["filename"]) + plots.append(str(duplicate_paths[dir_index] / plot_path.name)) + copy(plot_path, plots[-1]) + + # Sort and filter the data + if endpoint == FarmerRpcClient.get_harvester_plots_valid: + for filter_item in filtering: + assert isinstance(filter_item, FilterItem) + plots = [plot for plot in plots if plot_matches_filter(dataclass_from_dict(Plot, plot), filter_item)] + plots.sort(key=operator.itemgetter(sort_key, "plot_id"), reverse=reverse) + else: + for filter_item in filtering: + plots = [plot for plot in plots if filter_item in plot] + plots.sort(reverse=reverse) + + total_count = len(plots) + assert total_count == expected_plot_count + + last_sync_id = receiver.last_sync().sync_id + + harvester.plot_manager.trigger_refresh() + harvester.plot_manager.start_refreshing() + + await wait_for_plot_sync(receiver, last_sync_id) + + for page_size in [1, int(total_count / 2), total_count - 1, total_count, total_count + 1, 100]: + request.page_size = page_size + expected_page_count = ceil(total_count / page_size) + for page in range(expected_page_count): + request.page = page + page_result = await endpoint(farmer_rpc_client, request) + offset = page * page_size + expected_plots = plots[offset : offset + page_size] + assert page_result == { + "success": True, + "node_id": harvester_id.hex(), + "page": page, + "page_count": expected_page_count, + "total_count": total_count, + "plots": expected_plots, + } From e1255c91b2d08987983dc48a2de6f50ca5ca6eb8 Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Wed, 4 May 2022 00:33:38 +0200 Subject: [PATCH 39/55] farmer|gui: Enable paginated plot loading and improved state reporting (#11367) * farmer: Adjust notifications from the farmer to the UI - Only send the data for the harvester which actually sent an update - Notify for each loaded batch during initial loading * Enable improved farmer/harvester GUI --- chia-blockchain-gui | 2 +- chia/farmer/farmer.py | 13 ++++++++----- chia/plot_sync/receiver.py | 23 +++++++++++++++++------ chia/rpc/farmer_rpc_api.py | 13 +++++++++++-- 4 files changed, 37 insertions(+), 14 deletions(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index 81303fb962f4..a6724910a82e 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit 81303fb962f4a627a2e1c55098e187a9057745da +Subproject commit a6724910a82e1e719784a7a0885da13b30533c66 diff --git a/chia/farmer/farmer.py b/chia/farmer/farmer.py index ce3d62ba35eb..74231166a7d4 100644 --- a/chia/farmer/farmer.py +++ b/chia/farmer/farmer.py @@ -256,11 +256,14 @@ def on_disconnect(self, connection: ws.WSChiaConnection): self.state_changed("close_connection", {}) if connection.connection_type is NodeType.HARVESTER: del self.plot_sync_receivers[connection.peer_node_id] - - async def plot_sync_callback(self, peer_id: bytes32, delta: Delta) -> None: - log.info(f"plot_sync_callback: peer_id {peer_id}, delta {delta}") - if not delta.empty(): - self.state_changed("new_plots", await self.get_harvesters()) + self.state_changed("harvester_removed", {"node_id": connection.peer_node_id}) + + async def plot_sync_callback(self, peer_id: bytes32, delta: Optional[Delta]) -> None: + log.debug(f"plot_sync_callback: peer_id {peer_id}, delta {delta}") + receiver: Receiver = self.plot_sync_receivers[peer_id] + harvester_updated: bool = delta is not None and not delta.empty() + if receiver.initial_sync() or harvester_updated: + self.state_changed("harvester_update", receiver.to_dict(True)) async def _pool_get_pool_info(self, pool_config: PoolWalletConfig) -> Optional[Dict]: try: diff --git a/chia/plot_sync/receiver.py b/chia/plot_sync/receiver.py index 37cae4e86d9c..22f4059d8cca 100644 --- a/chia/plot_sync/receiver.py +++ b/chia/plot_sync/receiver.py @@ -61,10 +61,12 @@ class Receiver: _keys_missing: List[str] _duplicates: List[str] _total_plot_size: int - _update_callback: Callable[[bytes32, Delta], Coroutine[Any, Any, None]] + _update_callback: Callable[[bytes32, Optional[Delta]], Coroutine[Any, Any, None]] def __init__( - self, connection: WSChiaConnection, update_callback: Callable[[bytes32, Delta], Coroutine[Any, Any, None]] + self, + connection: WSChiaConnection, + update_callback: Callable[[bytes32, Optional[Delta]], Coroutine[Any, Any, None]], ) -> None: self._connection = connection self._current_sync = Sync() @@ -76,6 +78,12 @@ def __init__( self._total_plot_size = 0 self._update_callback = update_callback # type: ignore[assignment, misc] + async def trigger_callback(self, update: Optional[Delta] = None) -> None: + try: + await self._update_callback(self._connection.peer_node_id, update) # type: ignore[misc,call-arg] + except Exception as e: + log.error(f"_update_callback raised: {e}") + def reset(self) -> None: self._current_sync = Sync() self._last_sync = Sync() @@ -176,6 +184,9 @@ async def _process_loaded(self, plot_infos: PlotSyncPlotList) -> None: self._current_sync.delta.valid.additions[plot_info.filename] = plot_info self._current_sync.bump_plots_processed() + # Let the callback receiver know about the sync progress updates + await self.trigger_callback() + if plot_infos.final: self._current_sync.state = State.removed @@ -205,6 +216,9 @@ async def process_path_list( if not is_removal: self._current_sync.bump_plots_processed() + # Let the callback receiver know about the sync progress updates + await self.trigger_callback() + if paths.final: self._current_sync.state = next_state @@ -293,10 +307,7 @@ async def _sync_done(self, data: PlotSyncDone) -> None: self._last_sync = self._current_sync self._current_sync = Sync() # Let the callback receiver know if this sync cycle caused any update - try: - await self._update_callback(self._connection.peer_node_id, update) # type: ignore[misc,call-arg] - except Exception as e: - log.error(f"_update_callback raised: {e}") + await self.trigger_callback(update) async def sync_done(self, data: PlotSyncDone) -> None: await self._process(self._sync_done, ProtocolMessageTypes.plot_sync_done, data) diff --git a/chia/rpc/farmer_rpc_api.py b/chia/rpc/farmer_rpc_api.py index 21cf3af5e145..7654aedd272c 100644 --- a/chia/rpc/farmer_rpc_api.py +++ b/chia/rpc/farmer_rpc_api.py @@ -107,10 +107,19 @@ async def _state_changed(self, change: str, change_data: Dict) -> List[WsRpcMess "wallet_ui", ) ] - elif change == "new_plots": + elif change == "harvester_update": return [ create_payload_dict( - "get_harvesters", + "harvester_update", + change_data, + self.service_name, + "wallet_ui", + ) + ] + elif change == "harvester_removed": + return [ + create_payload_dict( + "harvester_removed", change_data, self.service_name, "wallet_ui", From 06684f96d54f3dff8bb82e2e27b43ab134875e1c Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Wed, 20 Apr 2022 20:09:41 +0200 Subject: [PATCH 40/55] streamable: Enable `isort` + more `mypy` (#10539) * isort: Fix `streamable.py` and `test_streamable.py` * mypy: Drop `streamable.py` and `test_streamable.py` form exclusion And fix all the mypy issues. * Fix `pylint` * Introduce `ParseFunctionType` and `StreamFunctionType` * Use `object` instead of `Type[Any]` for `is_type_*` functions * Some `Any` -> `object` * Use `typing.overload` for `recurse_jsonify` * Move some comments * Drop `Union`, use `Literal` properly * Explicitly ignore the return of `f_type.parse` Co-authored-by: Kyle Altendorf * Merge two `recurse_jsonify` overloads * Typing for the base definition of `recurse_jsonify` Co-authored-by: Kyle Altendorf --- .isort.cfg | 2 - chia/util/streamable.py | 117 +++++++++++------ mypy.ini | 2 +- tests/core/util/test_streamable.py | 201 ++++++++++++++++------------- 4 files changed, 192 insertions(+), 130 deletions(-) diff --git a/.isort.cfg b/.isort.cfg index 9ed754a63a73..c96731587add 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -109,7 +109,6 @@ extend_skip= chia/util/profiler.py chia/util/service_groups.py chia/util/ssl_check.py - chia/util/streamable.py chia/util/ws_message.py chia/wallet/cat_wallet/cat_info.py chia/wallet/cat_wallet/cat_utils.py @@ -191,7 +190,6 @@ extend_skip= tests/core/util/test_files.py tests/core/util/test_keychain.py tests/core/util/test_keyring_wrapper.py - tests/core/util/test_streamable.py tests/generator/test_compression.py tests/generator/test_generator_types.py tests/generator/test_list_to_batches.py diff --git a/chia/util/streamable.py b/chia/util/streamable.py index cf545fd4833d..fd5fd468e337 100644 --- a/chia/util/streamable.py +++ b/chia/util/streamable.py @@ -5,7 +5,21 @@ import pprint import sys from enum import Enum -from typing import Any, BinaryIO, Dict, get_type_hints, List, Tuple, Type, TypeVar, Union, Callable, Optional, Iterator +from typing import ( + Any, + BinaryIO, + Callable, + Dict, + Iterator, + List, + Optional, + Tuple, + Type, + TypeVar, + Union, + get_type_hints, + overload, +) from blspy import G1Element, G2Element, PrivateKey from typing_extensions import Literal @@ -58,29 +72,32 @@ class DefinitionError(StreamableError): _T_Streamable = TypeVar("_T_Streamable", bound="Streamable") +ParseFunctionType = Callable[[BinaryIO], object] +StreamFunctionType = Callable[[object, BinaryIO], None] + # Caches to store the fields and (de)serialization methods for all available streamable classes. -FIELDS_FOR_STREAMABLE_CLASS = {} -STREAM_FUNCTIONS_FOR_STREAMABLE_CLASS = {} -PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS = {} +FIELDS_FOR_STREAMABLE_CLASS: Dict[Type[object], Dict[str, Type[object]]] = {} +STREAM_FUNCTIONS_FOR_STREAMABLE_CLASS: Dict[Type[object], List[StreamFunctionType]] = {} +PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS: Dict[Type[object], List[ParseFunctionType]] = {} -def is_type_List(f_type: Type) -> bool: +def is_type_List(f_type: object) -> bool: return get_origin(f_type) == list or f_type == list -def is_type_SpecificOptional(f_type) -> bool: +def is_type_SpecificOptional(f_type: object) -> bool: """ Returns true for types such as Optional[T], but not Optional, or T. """ return get_origin(f_type) == Union and get_args(f_type)[1]() is None -def is_type_Tuple(f_type: Type) -> bool: +def is_type_Tuple(f_type: object) -> bool: return get_origin(f_type) == tuple or f_type == tuple -def dataclass_from_dict(klass, d): +def dataclass_from_dict(klass: Type[Any], d: Any) -> Any: """ Converts a dictionary based on a dataclass, into an instance of that dataclass. Recursively goes through lists, optionals, and dictionaries. @@ -100,7 +117,8 @@ def dataclass_from_dict(klass, d): return tuple(klass_properties) elif dataclasses.is_dataclass(klass): # Type is a dataclass, data is a dictionary - fieldtypes = {f.name: f.type for f in dataclasses.fields(klass)} + hints = get_type_hints(klass) + fieldtypes = {f.name: hints.get(f.name, f.type) for f in dataclasses.fields(klass)} return klass(**{f: dataclass_from_dict(fieldtypes[f], d[f]) for f in d}) elif is_type_List(klass): # Type is a list, data is a list @@ -116,7 +134,17 @@ def dataclass_from_dict(klass, d): return klass(d) -def recurse_jsonify(d): +@overload +def recurse_jsonify(d: Union[List[Any], Tuple[Any, ...]]) -> List[Any]: + ... + + +@overload +def recurse_jsonify(d: Dict[str, Any]) -> Dict[str, Any]: + ... + + +def recurse_jsonify(d: Union[List[Any], Tuple[Any, ...], Dict[str, Any]]) -> Union[List[Any], Dict[str, Any]]: """ Makes bytes objects and unhashable types into strings with 0x, and makes large ints into strings. @@ -173,11 +201,11 @@ def parse_uint32(f: BinaryIO, byteorder: Literal["little", "big"] = "big") -> ui return uint32(int.from_bytes(size_bytes, byteorder)) -def write_uint32(f: BinaryIO, value: uint32, byteorder: Literal["little", "big"] = "big"): +def write_uint32(f: BinaryIO, value: uint32, byteorder: Literal["little", "big"] = "big") -> None: f.write(value.to_bytes(4, byteorder)) -def parse_optional(f: BinaryIO, parse_inner_type_f: Callable[[BinaryIO], Any]) -> Optional[Any]: +def parse_optional(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> Optional[object]: is_present_bytes = f.read(1) assert is_present_bytes is not None and len(is_present_bytes) == 1 # Checks for EOF if is_present_bytes == bytes([0]): @@ -195,8 +223,8 @@ def parse_bytes(f: BinaryIO) -> bytes: return bytes_read -def parse_list(f: BinaryIO, parse_inner_type_f: Callable[[BinaryIO], Any]) -> List[Any]: - full_list: List = [] +def parse_list(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> List[object]: + full_list: List[object] = [] # wjb assert inner_type != get_args(List)[0] list_size = parse_uint32(f) for list_index in range(list_size): @@ -204,14 +232,14 @@ def parse_list(f: BinaryIO, parse_inner_type_f: Callable[[BinaryIO], Any]) -> Li return full_list -def parse_tuple(f: BinaryIO, list_parse_inner_type_f: List[Callable[[BinaryIO], Any]]) -> Tuple[Any, ...]: - full_list = [] +def parse_tuple(f: BinaryIO, list_parse_inner_type_f: List[ParseFunctionType]) -> Tuple[object, ...]: + full_list: List[object] = [] for parse_f in list_parse_inner_type_f: full_list.append(parse_f(f)) return tuple(full_list) -def parse_size_hints(f: BinaryIO, f_type: Type, bytes_to_read: int) -> Any: +def parse_size_hints(f: BinaryIO, f_type: Type[Any], bytes_to_read: int) -> Any: bytes_read = f.read(bytes_to_read) assert bytes_read is not None and len(bytes_read) == bytes_to_read return f_type.from_bytes(bytes_read) @@ -224,7 +252,7 @@ def parse_str(f: BinaryIO) -> str: return bytes.decode(str_read_bytes, "utf-8") -def stream_optional(stream_inner_type_func: Callable[[Any, BinaryIO], None], item: Any, f: BinaryIO) -> None: +def stream_optional(stream_inner_type_func: StreamFunctionType, item: Any, f: BinaryIO) -> None: if item is None: f.write(bytes([0])) else: @@ -237,13 +265,13 @@ def stream_bytes(item: Any, f: BinaryIO) -> None: f.write(item) -def stream_list(stream_inner_type_func: Callable[[Any, BinaryIO], None], item: Any, f: BinaryIO) -> None: +def stream_list(stream_inner_type_func: StreamFunctionType, item: Any, f: BinaryIO) -> None: write_uint32(f, uint32(len(item))) for element in item: stream_inner_type_func(element, f) -def stream_tuple(stream_inner_type_funcs: List[Callable[[Any, BinaryIO], None]], item: Any, f: BinaryIO) -> None: +def stream_tuple(stream_inner_type_funcs: List[StreamFunctionType], item: Any, f: BinaryIO) -> None: assert len(stream_inner_type_funcs) == len(item) for i in range(len(item)): stream_inner_type_funcs[i](item[i], f) @@ -255,7 +283,19 @@ def stream_str(item: Any, f: BinaryIO) -> None: f.write(str_bytes) -def streamable(cls: Any): +def stream_bool(item: Any, f: BinaryIO) -> None: + f.write(int(item).to_bytes(1, "big")) + + +def stream_streamable(item: object, f: BinaryIO) -> None: + getattr(item, "stream")(f) + + +def stream_byte_convertible(item: object, f: BinaryIO) -> None: + f.write(getattr(item, "__bytes__")()) + + +def streamable(cls: Type[_T_Streamable]) -> Type[_T_Streamable]: """ This decorator forces correct streamable protocol syntax/usage and populates the caches for types hints and (de)serialization methods for all members of the class. The correct usage is: @@ -279,7 +319,9 @@ class Example(Streamable): raise DefinitionError(f"@dataclass(frozen=True) required first. {correct_usage_string}") try: - object.__new__(cls)._streamable_test_if_dataclass_frozen_ = None + # Ignore mypy here because we especially want to access a not available member to test if + # the dataclass is frozen. + object.__new__(cls)._streamable_test_if_dataclass_frozen_ = None # type: ignore[attr-defined] except dataclasses.FrozenInstanceError: pass else: @@ -352,10 +394,10 @@ class Streamable: Make sure to use the streamable decorator when inheriting from the Streamable class to prepare the streaming caches. """ - def post_init_parse(self, item: Any, f_name: str, f_type: Type) -> Any: + def post_init_parse(self, item: Any, f_name: str, f_type: Type[Any]) -> Any: if is_type_List(f_type): - collected_list: List = [] - inner_type: Type = get_args(f_type)[0] + collected_list: List[Any] = [] + inner_type: Type[Any] = get_args(f_type)[0] # wjb assert inner_type != get_args(List)[0] # type: ignore if not is_type_List(type(item)): raise ValueError(f"Wrong type for {f_name}, need a list.") @@ -391,7 +433,7 @@ def post_init_parse(self, item: Any, f_name: str, f_type: Type) -> Any: raise ValueError(f"Wrong type for {f_name}") return item - def __post_init__(self): + def __post_init__(self) -> None: try: fields = FIELDS_FOR_STREAMABLE_CLASS[type(self)] except Exception: @@ -408,12 +450,12 @@ def __post_init__(self): object.__setattr__(self, f_name, self.post_init_parse(data[f_name], f_name, f_type)) @classmethod - def function_to_parse_one_item(cls, f_type: Type) -> Callable[[BinaryIO], Any]: + def function_to_parse_one_item(cls, f_type: Type[Any]) -> ParseFunctionType: """ This function returns a function taking one argument `f: BinaryIO` that parses and returns a value of the given type. """ - inner_type: Type + inner_type: Type[Any] if f_type is bool: return parse_bool if is_type_SpecificOptional(f_type): @@ -421,7 +463,8 @@ def function_to_parse_one_item(cls, f_type: Type) -> Callable[[BinaryIO], Any]: parse_inner_type_f = cls.function_to_parse_one_item(inner_type) return lambda f: parse_optional(f, parse_inner_type_f) if hasattr(f_type, "parse"): - return f_type.parse + # Ignoring for now as the proper solution isn't obvious + return f_type.parse # type: ignore[no-any-return] if f_type == bytes: return parse_bytes if is_type_List(f_type): @@ -444,7 +487,7 @@ def parse(cls: Type[_T_Streamable], f: BinaryIO) -> _T_Streamable: # Create the object without calling __init__() to avoid unnecessary post-init checks in strictdataclass obj: _T_Streamable = object.__new__(cls) fields: Iterator[str] = iter(FIELDS_FOR_STREAMABLE_CLASS.get(cls, {})) - values: Iterator = (parse_f(f) for parse_f in PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS[cls]) + values: Iterator[object] = (parse_f(f) for parse_f in PARSE_FUNCTIONS_FOR_STREAMABLE_CLASS[cls]) for field, value in zip(fields, values): object.__setattr__(obj, field, value) @@ -456,8 +499,8 @@ def parse(cls: Type[_T_Streamable], f: BinaryIO) -> _T_Streamable: return obj @classmethod - def function_to_stream_one_item(cls, f_type: Type) -> Callable[[Any, BinaryIO], Any]: - inner_type: Type + def function_to_stream_one_item(cls, f_type: Type[Any]) -> StreamFunctionType: + inner_type: Type[Any] if is_type_SpecificOptional(f_type): inner_type = get_args(f_type)[0] stream_inner_type_func = cls.function_to_stream_one_item(inner_type) @@ -465,9 +508,9 @@ def function_to_stream_one_item(cls, f_type: Type) -> Callable[[Any, BinaryIO], elif f_type == bytes: return stream_bytes elif hasattr(f_type, "stream"): - return lambda item, f: item.stream(f) + return stream_streamable elif hasattr(f_type, "__bytes__"): - return lambda item, f: f.write(bytes(item)) + return stream_byte_convertible elif is_type_List(f_type): inner_type = get_args(f_type)[0] stream_inner_type_func = cls.function_to_stream_one_item(inner_type) @@ -481,7 +524,7 @@ def function_to_stream_one_item(cls, f_type: Type) -> Callable[[Any, BinaryIO], elif f_type is str: return stream_str elif f_type is bool: - return lambda item, f: f.write(int(item).to_bytes(1, "big")) + return stream_bool else: raise NotImplementedError(f"can't stream {f_type}") @@ -518,9 +561,9 @@ def __str__(self: Any) -> str: def __repr__(self: Any) -> str: return pp.pformat(recurse_jsonify(dataclasses.asdict(self))) - def to_json_dict(self) -> Dict: + def to_json_dict(self) -> Dict[str, Any]: return recurse_jsonify(dataclasses.asdict(self)) @classmethod - def from_json_dict(cls: Any, json_dict: Dict) -> Any: + def from_json_dict(cls: Any, json_dict: Dict[str, Any]) -> Any: return dataclass_from_dict(cls, json_dict) diff --git a/mypy.ini b/mypy.ini index 4c3f96cbfd0f..795d940d34fd 100644 --- a/mypy.ini +++ b/mypy.ini @@ -17,7 +17,7 @@ no_implicit_reexport = True strict_equality = True # list created by: venv/bin/mypy | sed -n 's/.py:.*//p' | sort | uniq | tr '/' '.' | tr '\n' ',' -[mypy-benchmarks.block_ref,benchmarks.block_store,benchmarks.coin_store,benchmarks.utils,build_scripts.installer-version,chia.clvm.spend_sim,chia.cmds.configure,chia.cmds.db,chia.cmds.db_upgrade_func,chia.cmds.farm_funcs,chia.cmds.init,chia.cmds.init_funcs,chia.cmds.keys,chia.cmds.keys_funcs,chia.cmds.passphrase,chia.cmds.passphrase_funcs,chia.cmds.plotnft,chia.cmds.plotnft_funcs,chia.cmds.plots,chia.cmds.plotters,chia.cmds.show,chia.cmds.start_funcs,chia.cmds.wallet,chia.cmds.wallet_funcs,chia.consensus.block_body_validation,chia.consensus.blockchain,chia.consensus.blockchain_interface,chia.consensus.block_creation,chia.consensus.block_header_validation,chia.consensus.block_record,chia.consensus.block_root_validation,chia.consensus.coinbase,chia.consensus.constants,chia.consensus.difficulty_adjustment,chia.consensus.get_block_challenge,chia.consensus.multiprocess_validation,chia.consensus.pos_quality,chia.consensus.vdf_info_computation,chia.daemon.client,chia.daemon.keychain_proxy,chia.daemon.keychain_server,chia.daemon.server,chia.farmer.farmer,chia.farmer.farmer_api,chia.full_node.block_height_map,chia.full_node.block_store,chia.full_node.bundle_tools,chia.full_node.coin_store,chia.full_node.full_node,chia.full_node.full_node_api,chia.full_node.full_node_store,chia.full_node.generator,chia.full_node.hint_store,chia.full_node.lock_queue,chia.full_node.mempool,chia.full_node.mempool_check_conditions,chia.full_node.mempool_manager,chia.full_node.pending_tx_cache,chia.full_node.sync_store,chia.full_node.weight_proof,chia.harvester.harvester,chia.harvester.harvester_api,chia.introducer.introducer,chia.introducer.introducer_api,chia.plotters.bladebit,chia.plotters.chiapos,chia.plotters.install_plotter,chia.plotters.madmax,chia.plotters.plotters,chia.plotters.plotters_util,chia.plotting.check_plots,chia.plotting.create_plots,chia.plotting.manager,chia.plotting.util,chia.pools.pool_config,chia.pools.pool_puzzles,chia.pools.pool_wallet,chia.pools.pool_wallet_info,chia.protocols.pool_protocol,chia.rpc.crawler_rpc_api,chia.rpc.farmer_rpc_api,chia.rpc.farmer_rpc_client,chia.rpc.full_node_rpc_api,chia.rpc.full_node_rpc_client,chia.rpc.harvester_rpc_api,chia.rpc.harvester_rpc_client,chia.rpc.rpc_client,chia.rpc.rpc_server,chia.rpc.timelord_rpc_api,chia.rpc.util,chia.rpc.wallet_rpc_api,chia.rpc.wallet_rpc_client,chia.seeder.crawler,chia.seeder.crawler_api,chia.seeder.crawl_store,chia.seeder.dns_server,chia.seeder.peer_record,chia.seeder.start_crawler,chia.server.address_manager,chia.server.address_manager_store,chia.server.connection_utils,chia.server.introducer_peers,chia.server.node_discovery,chia.server.peer_store_resolver,chia.server.rate_limits,chia.server.reconnect_task,chia.server.server,chia.server.ssl_context,chia.server.start_farmer,chia.server.start_full_node,chia.server.start_harvester,chia.server.start_introducer,chia.server.start_service,chia.server.start_timelord,chia.server.start_wallet,chia.server.upnp,chia.server.ws_connection,chia.simulator.full_node_simulator,chia.simulator.start_simulator,chia.ssl.create_ssl,chia.timelord.iters_from_block,chia.timelord.timelord,chia.timelord.timelord_api,chia.timelord.timelord_launcher,chia.timelord.timelord_state,chia.types.announcement,chia.types.blockchain_format.classgroup,chia.types.blockchain_format.coin,chia.types.blockchain_format.program,chia.types.blockchain_format.proof_of_space,chia.types.blockchain_format.tree_hash,chia.types.blockchain_format.vdf,chia.types.full_block,chia.types.header_block,chia.types.mempool_item,chia.types.name_puzzle_condition,chia.types.peer_info,chia.types.spend_bundle,chia.types.transaction_queue_entry,chia.types.unfinished_block,chia.types.unfinished_header_block,chia.util.api_decorators,chia.util.block_cache,chia.util.byte_types,chia.util.cached_bls,chia.util.check_fork_next_block,chia.util.chia_logging,chia.util.config,chia.util.db_wrapper,chia.util.dump_keyring,chia.util.file_keyring,chia.util.files,chia.util.hash,chia.util.ints,chia.util.json_util,chia.util.keychain,chia.util.keyring_wrapper,chia.util.log_exceptions,chia.util.lru_cache,chia.util.make_test_constants,chia.util.merkle_set,chia.util.network,chia.util.partial_func,chia.util.pip_import,chia.util.profiler,chia.util.safe_cancel_task,chia.util.service_groups,chia.util.ssl_check,chia.util.streamable,chia.util.struct_stream,chia.util.validate_alert,chia.wallet.block_record,chia.wallet.cat_wallet.cat_utils,chia.wallet.cat_wallet.cat_wallet,chia.wallet.cat_wallet.lineage_store,chia.wallet.chialisp,chia.wallet.did_wallet.did_wallet,chia.wallet.did_wallet.did_wallet_puzzles,chia.wallet.key_val_store,chia.wallet.lineage_proof,chia.wallet.payment,chia.wallet.puzzles.load_clvm,chia.wallet.puzzles.p2_conditions,chia.wallet.puzzles.p2_delegated_conditions,chia.wallet.puzzles.p2_delegated_puzzle,chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle,chia.wallet.puzzles.p2_m_of_n_delegate_direct,chia.wallet.puzzles.p2_puzzle_hash,chia.wallet.puzzles.prefarm.spend_prefarm,chia.wallet.puzzles.puzzle_utils,chia.wallet.puzzles.rom_bootstrap_generator,chia.wallet.puzzles.singleton_top_layer,chia.wallet.puzzles.tails,chia.wallet.rl_wallet.rl_wallet,chia.wallet.rl_wallet.rl_wallet_puzzles,chia.wallet.secret_key_store,chia.wallet.settings.user_settings,chia.wallet.trade_manager,chia.wallet.trade_record,chia.wallet.trading.offer,chia.wallet.trading.trade_store,chia.wallet.transaction_record,chia.wallet.util.debug_spend_bundle,chia.wallet.util.new_peak_queue,chia.wallet.util.peer_request_cache,chia.wallet.util.wallet_sync_utils,chia.wallet.wallet,chia.wallet.wallet_action_store,chia.wallet.wallet_blockchain,chia.wallet.wallet_coin_store,chia.wallet.wallet_interested_store,chia.wallet.wallet_node,chia.wallet.wallet_node_api,chia.wallet.wallet_pool_store,chia.wallet.wallet_puzzle_store,chia.wallet.wallet_state_manager,chia.wallet.wallet_sync_store,chia.wallet.wallet_transaction_store,chia.wallet.wallet_user_store,chia.wallet.wallet_weight_proof_handler,installhelper,tests.blockchain.blockchain_test_utils,tests.blockchain.test_blockchain,tests.blockchain.test_blockchain_transactions,tests.block_tools,tests.build-init-files,tests.build-workflows,tests.clvm.coin_store,tests.clvm.test_chialisp_deserialization,tests.clvm.test_clvm_compilation,tests.clvm.test_program,tests.clvm.test_puzzle_compression,tests.clvm.test_puzzles,tests.clvm.test_serialized_program,tests.clvm.test_singletons,tests.clvm.test_spend_sim,tests.conftest,tests.connection_utils,tests.core.cmds.test_keys,tests.core.consensus.test_pot_iterations,tests.core.custom_types.test_coin,tests.core.custom_types.test_proof_of_space,tests.core.custom_types.test_spend_bundle,tests.core.daemon.test_daemon,tests.core.full_node.full_sync.test_full_sync,tests.core.full_node.stores.test_block_store,tests.core.full_node.stores.test_coin_store,tests.core.full_node.stores.test_full_node_store,tests.core.full_node.stores.test_hint_store,tests.core.full_node.stores.test_sync_store,tests.core.full_node.test_address_manager,tests.core.full_node.test_block_height_map,tests.core.full_node.test_conditions,tests.core.full_node.test_full_node,tests.core.full_node.test_mempool,tests.core.full_node.test_mempool_performance,tests.core.full_node.test_node_load,tests.core.full_node.test_peer_store_resolver,tests.core.full_node.test_performance,tests.core.full_node.test_transactions,tests.core.make_block_generator,tests.core.node_height,tests.core.server.test_dos,tests.core.server.test_rate_limits,tests.core.ssl.test_ssl,tests.core.test_cost_calculation,tests.core.test_crawler_rpc,tests.core.test_daemon_rpc,tests.core.test_db_conversion,tests.core.test_farmer_harvester_rpc,tests.core.test_filter,tests.core.test_full_node_rpc,tests.core.test_merkle_set,tests.core.test_setproctitle,tests.core.util.test_cached_bls,tests.core.util.test_config,tests.core.util.test_file_keyring_synchronization,tests.core.util.test_files,tests.core.util.test_keychain,tests.core.util.test_keyring_wrapper,tests.core.util.test_lru_cache,tests.core.util.test_significant_bits,tests.core.util.test_streamable,tests.farmer_harvester.test_farmer_harvester,tests.generator.test_compression,tests.generator.test_generator_types,tests.generator.test_list_to_batches,tests.generator.test_rom,tests.generator.test_scan,tests.plotting.test_plot_manager,tests.pools.test_pool_cmdline,tests.pools.test_pool_config,tests.pools.test_pool_puzzles_lifecycle,tests.pools.test_pool_rpc,tests.pools.test_wallet_pool_store,tests.setup_nodes,tests.setup_services,tests.simulation.test_simulation,tests.time_out_assert,tests.tools.test_full_sync,tests.tools.test_run_block,tests.util.alert_server,tests.util.benchmark_cost,tests.util.blockchain,tests.util.build_network_protocol_files,tests.util.db_connection,tests.util.generator_tools_testing,tests.util.keyring,tests.util.key_tool,tests.util.misc,tests.util.network,tests.util.rpc,tests.util.test_full_block_utils,tests.util.test_lock_queue,tests.util.test_network_protocol_files,tests.util.test_struct_stream,tests.wallet.cat_wallet.test_cat_lifecycle,tests.wallet.cat_wallet.test_cat_wallet,tests.wallet.cat_wallet.test_offer_lifecycle,tests.wallet.cat_wallet.test_trades,tests.wallet.did_wallet.test_did,tests.wallet.did_wallet.test_did_rpc,tests.wallet.rl_wallet.test_rl_rpc,tests.wallet.rl_wallet.test_rl_wallet,tests.wallet.rpc.test_wallet_rpc,tests.wallet.simple_sync.test_simple_sync_protocol,tests.wallet.sync.test_wallet_sync,tests.wallet.test_bech32m,tests.wallet.test_chialisp,tests.wallet.test_puzzle_store,tests.wallet.test_singleton,tests.wallet.test_singleton_lifecycle,tests.wallet.test_singleton_lifecycle_fast,tests.wallet.test_taproot,tests.wallet.test_wallet,tests.wallet.test_wallet_blockchain,tests.wallet.test_wallet_interested_store,tests.wallet.test_wallet_key_val_store,tests.wallet.test_wallet_user_store,tests.wallet_tools,tests.weight_proof.test_weight_proof,tools.analyze-chain,tools.run_block,tools.test_full_sync] +[mypy-benchmarks.block_ref,benchmarks.block_store,benchmarks.coin_store,benchmarks.utils,build_scripts.installer-version,chia.clvm.spend_sim,chia.cmds.configure,chia.cmds.db,chia.cmds.db_upgrade_func,chia.cmds.farm_funcs,chia.cmds.init,chia.cmds.init_funcs,chia.cmds.keys,chia.cmds.keys_funcs,chia.cmds.passphrase,chia.cmds.passphrase_funcs,chia.cmds.plotnft,chia.cmds.plotnft_funcs,chia.cmds.plots,chia.cmds.plotters,chia.cmds.show,chia.cmds.start_funcs,chia.cmds.wallet,chia.cmds.wallet_funcs,chia.consensus.block_body_validation,chia.consensus.blockchain,chia.consensus.blockchain_interface,chia.consensus.block_creation,chia.consensus.block_header_validation,chia.consensus.block_record,chia.consensus.block_root_validation,chia.consensus.coinbase,chia.consensus.constants,chia.consensus.difficulty_adjustment,chia.consensus.get_block_challenge,chia.consensus.multiprocess_validation,chia.consensus.pos_quality,chia.consensus.vdf_info_computation,chia.daemon.client,chia.daemon.keychain_proxy,chia.daemon.keychain_server,chia.daemon.server,chia.farmer.farmer,chia.farmer.farmer_api,chia.full_node.block_height_map,chia.full_node.block_store,chia.full_node.bundle_tools,chia.full_node.coin_store,chia.full_node.full_node,chia.full_node.full_node_api,chia.full_node.full_node_store,chia.full_node.generator,chia.full_node.hint_store,chia.full_node.lock_queue,chia.full_node.mempool,chia.full_node.mempool_check_conditions,chia.full_node.mempool_manager,chia.full_node.pending_tx_cache,chia.full_node.sync_store,chia.full_node.weight_proof,chia.harvester.harvester,chia.harvester.harvester_api,chia.introducer.introducer,chia.introducer.introducer_api,chia.plotters.bladebit,chia.plotters.chiapos,chia.plotters.install_plotter,chia.plotters.madmax,chia.plotters.plotters,chia.plotters.plotters_util,chia.plotting.check_plots,chia.plotting.create_plots,chia.plotting.manager,chia.plotting.util,chia.pools.pool_config,chia.pools.pool_puzzles,chia.pools.pool_wallet,chia.pools.pool_wallet_info,chia.protocols.pool_protocol,chia.rpc.crawler_rpc_api,chia.rpc.farmer_rpc_api,chia.rpc.farmer_rpc_client,chia.rpc.full_node_rpc_api,chia.rpc.full_node_rpc_client,chia.rpc.harvester_rpc_api,chia.rpc.harvester_rpc_client,chia.rpc.rpc_client,chia.rpc.rpc_server,chia.rpc.timelord_rpc_api,chia.rpc.util,chia.rpc.wallet_rpc_api,chia.rpc.wallet_rpc_client,chia.seeder.crawler,chia.seeder.crawler_api,chia.seeder.crawl_store,chia.seeder.dns_server,chia.seeder.peer_record,chia.seeder.start_crawler,chia.server.address_manager,chia.server.address_manager_store,chia.server.connection_utils,chia.server.introducer_peers,chia.server.node_discovery,chia.server.peer_store_resolver,chia.server.rate_limits,chia.server.reconnect_task,chia.server.server,chia.server.ssl_context,chia.server.start_farmer,chia.server.start_full_node,chia.server.start_harvester,chia.server.start_introducer,chia.server.start_service,chia.server.start_timelord,chia.server.start_wallet,chia.server.upnp,chia.server.ws_connection,chia.simulator.full_node_simulator,chia.simulator.start_simulator,chia.ssl.create_ssl,chia.timelord.iters_from_block,chia.timelord.timelord,chia.timelord.timelord_api,chia.timelord.timelord_launcher,chia.timelord.timelord_state,chia.types.announcement,chia.types.blockchain_format.classgroup,chia.types.blockchain_format.coin,chia.types.blockchain_format.program,chia.types.blockchain_format.proof_of_space,chia.types.blockchain_format.tree_hash,chia.types.blockchain_format.vdf,chia.types.full_block,chia.types.header_block,chia.types.mempool_item,chia.types.name_puzzle_condition,chia.types.peer_info,chia.types.spend_bundle,chia.types.transaction_queue_entry,chia.types.unfinished_block,chia.types.unfinished_header_block,chia.util.api_decorators,chia.util.block_cache,chia.util.byte_types,chia.util.cached_bls,chia.util.check_fork_next_block,chia.util.chia_logging,chia.util.config,chia.util.db_wrapper,chia.util.dump_keyring,chia.util.file_keyring,chia.util.files,chia.util.hash,chia.util.ints,chia.util.json_util,chia.util.keychain,chia.util.keyring_wrapper,chia.util.log_exceptions,chia.util.lru_cache,chia.util.make_test_constants,chia.util.merkle_set,chia.util.network,chia.util.partial_func,chia.util.pip_import,chia.util.profiler,chia.util.safe_cancel_task,chia.util.service_groups,chia.util.ssl_check,chia.util.struct_stream,chia.util.validate_alert,chia.wallet.block_record,chia.wallet.cat_wallet.cat_utils,chia.wallet.cat_wallet.cat_wallet,chia.wallet.cat_wallet.lineage_store,chia.wallet.chialisp,chia.wallet.did_wallet.did_wallet,chia.wallet.did_wallet.did_wallet_puzzles,chia.wallet.key_val_store,chia.wallet.lineage_proof,chia.wallet.payment,chia.wallet.puzzles.load_clvm,chia.wallet.puzzles.p2_conditions,chia.wallet.puzzles.p2_delegated_conditions,chia.wallet.puzzles.p2_delegated_puzzle,chia.wallet.puzzles.p2_delegated_puzzle_or_hidden_puzzle,chia.wallet.puzzles.p2_m_of_n_delegate_direct,chia.wallet.puzzles.p2_puzzle_hash,chia.wallet.puzzles.prefarm.spend_prefarm,chia.wallet.puzzles.puzzle_utils,chia.wallet.puzzles.rom_bootstrap_generator,chia.wallet.puzzles.singleton_top_layer,chia.wallet.puzzles.tails,chia.wallet.rl_wallet.rl_wallet,chia.wallet.rl_wallet.rl_wallet_puzzles,chia.wallet.secret_key_store,chia.wallet.settings.user_settings,chia.wallet.trade_manager,chia.wallet.trade_record,chia.wallet.trading.offer,chia.wallet.trading.trade_store,chia.wallet.transaction_record,chia.wallet.util.debug_spend_bundle,chia.wallet.util.new_peak_queue,chia.wallet.util.peer_request_cache,chia.wallet.util.wallet_sync_utils,chia.wallet.wallet,chia.wallet.wallet_action_store,chia.wallet.wallet_blockchain,chia.wallet.wallet_coin_store,chia.wallet.wallet_interested_store,chia.wallet.wallet_node,chia.wallet.wallet_node_api,chia.wallet.wallet_pool_store,chia.wallet.wallet_puzzle_store,chia.wallet.wallet_state_manager,chia.wallet.wallet_sync_store,chia.wallet.wallet_transaction_store,chia.wallet.wallet_user_store,chia.wallet.wallet_weight_proof_handler,installhelper,tests.blockchain.blockchain_test_utils,tests.blockchain.test_blockchain,tests.blockchain.test_blockchain_transactions,tests.block_tools,tests.build-init-files,tests.build-workflows,tests.clvm.coin_store,tests.clvm.test_chialisp_deserialization,tests.clvm.test_clvm_compilation,tests.clvm.test_program,tests.clvm.test_puzzle_compression,tests.clvm.test_puzzles,tests.clvm.test_serialized_program,tests.clvm.test_singletons,tests.clvm.test_spend_sim,tests.conftest,tests.connection_utils,tests.core.cmds.test_keys,tests.core.consensus.test_pot_iterations,tests.core.custom_types.test_coin,tests.core.custom_types.test_proof_of_space,tests.core.custom_types.test_spend_bundle,tests.core.daemon.test_daemon,tests.core.full_node.full_sync.test_full_sync,tests.core.full_node.stores.test_block_store,tests.core.full_node.stores.test_coin_store,tests.core.full_node.stores.test_full_node_store,tests.core.full_node.stores.test_hint_store,tests.core.full_node.stores.test_sync_store,tests.core.full_node.test_address_manager,tests.core.full_node.test_block_height_map,tests.core.full_node.test_conditions,tests.core.full_node.test_full_node,tests.core.full_node.test_mempool,tests.core.full_node.test_mempool_performance,tests.core.full_node.test_node_load,tests.core.full_node.test_peer_store_resolver,tests.core.full_node.test_performance,tests.core.full_node.test_transactions,tests.core.make_block_generator,tests.core.node_height,tests.core.server.test_dos,tests.core.server.test_rate_limits,tests.core.ssl.test_ssl,tests.core.test_cost_calculation,tests.core.test_crawler_rpc,tests.core.test_daemon_rpc,tests.core.test_db_conversion,tests.core.test_farmer_harvester_rpc,tests.core.test_filter,tests.core.test_full_node_rpc,tests.core.test_merkle_set,tests.core.test_setproctitle,tests.core.util.test_cached_bls,tests.core.util.test_config,tests.core.util.test_file_keyring_synchronization,tests.core.util.test_files,tests.core.util.test_keychain,tests.core.util.test_keyring_wrapper,tests.core.util.test_lru_cache,tests.core.util.test_significant_bits,tests.farmer_harvester.test_farmer_harvester,tests.generator.test_compression,tests.generator.test_generator_types,tests.generator.test_list_to_batches,tests.generator.test_rom,tests.generator.test_scan,tests.plotting.test_plot_manager,tests.pools.test_pool_cmdline,tests.pools.test_pool_config,tests.pools.test_pool_puzzles_lifecycle,tests.pools.test_pool_rpc,tests.pools.test_wallet_pool_store,tests.setup_nodes,tests.setup_services,tests.simulation.test_simulation,tests.time_out_assert,tests.tools.test_full_sync,tests.tools.test_run_block,tests.util.alert_server,tests.util.benchmark_cost,tests.util.blockchain,tests.util.build_network_protocol_files,tests.util.db_connection,tests.util.generator_tools_testing,tests.util.keyring,tests.util.key_tool,tests.util.misc,tests.util.network,tests.util.rpc,tests.util.test_full_block_utils,tests.util.test_lock_queue,tests.util.test_network_protocol_files,tests.util.test_struct_stream,tests.wallet.cat_wallet.test_cat_lifecycle,tests.wallet.cat_wallet.test_cat_wallet,tests.wallet.cat_wallet.test_offer_lifecycle,tests.wallet.cat_wallet.test_trades,tests.wallet.did_wallet.test_did,tests.wallet.did_wallet.test_did_rpc,tests.wallet.rl_wallet.test_rl_rpc,tests.wallet.rl_wallet.test_rl_wallet,tests.wallet.rpc.test_wallet_rpc,tests.wallet.simple_sync.test_simple_sync_protocol,tests.wallet.sync.test_wallet_sync,tests.wallet.test_bech32m,tests.wallet.test_chialisp,tests.wallet.test_puzzle_store,tests.wallet.test_singleton,tests.wallet.test_singleton_lifecycle,tests.wallet.test_singleton_lifecycle_fast,tests.wallet.test_taproot,tests.wallet.test_wallet,tests.wallet.test_wallet_blockchain,tests.wallet.test_wallet_interested_store,tests.wallet.test_wallet_key_val_store,tests.wallet.test_wallet_user_store,tests.wallet_tools,tests.weight_proof.test_weight_proof,tools.analyze-chain,tools.run_block,tools.test_full_sync] disallow_any_generics = False disallow_subclassing_any = False disallow_untyped_calls = False diff --git a/tests/core/util/test_streamable.py b/tests/core/util/test_streamable.py index 65b3255212ee..28b0a3ba6644 100644 --- a/tests/core/util/test_streamable.py +++ b/tests/core/util/test_streamable.py @@ -1,10 +1,13 @@ +from __future__ import annotations + +import io from dataclasses import dataclass from typing import Dict, List, Optional, Tuple -import io -import pytest +import pytest from clvm_tools import binutils from pytest import raises +from typing_extensions import Literal from chia.protocols.wallet_protocol import RespondRemovals from chia.types.blockchain_format.coin import Coin @@ -16,19 +19,20 @@ from chia.util.streamable import ( DefinitionError, Streamable, - streamable, + is_type_List, + is_type_SpecificOptional, parse_bool, - parse_uint32, - write_uint32, - parse_optional, parse_bytes, parse_list, - parse_tuple, + parse_optional, parse_size_hints, parse_str, - is_type_List, - is_type_SpecificOptional, + parse_tuple, + parse_uint32, + streamable, + write_uint32, ) +from tests.block_tools import BlockTools from tests.setup_nodes import test_constants @@ -59,22 +63,26 @@ class TestClassDict(Streamable): a: Dict[str, str] +@dataclass(frozen=True) +class DataclassOnly: + a: uint8 + + def test_pure_dataclass_not_supported() -> None: - @dataclass(frozen=True) - class DataClassOnly: - a: uint8 with raises(NotImplementedError): @streamable @dataclass(frozen=True) class TestClassDataclass(Streamable): - a: DataClassOnly + a: DataclassOnly + + +class PlainClass: + a: uint8 def test_plain_class_not_supported() -> None: - class PlainClass: - a: uint8 with raises(NotImplementedError): @@ -84,74 +92,81 @@ class TestClassPlain(Streamable): a: PlainClass -def test_basic_list(): +def test_basic_list() -> None: a = [1, 2, 3] assert is_type_List(type(a)) assert is_type_List(List) assert is_type_List(List[int]) assert is_type_List(List[uint8]) assert is_type_List(list) - assert not is_type_List(Tuple) + assert not is_type_List(type(Tuple)) assert not is_type_List(tuple) assert not is_type_List(dict) -def test_not_lists(): +def test_not_lists() -> None: assert not is_type_List(Dict) -def test_basic_optional(): +def test_basic_optional() -> None: assert is_type_SpecificOptional(Optional[int]) assert is_type_SpecificOptional(Optional[Optional[int]]) assert not is_type_SpecificOptional(List[int]) -def test_StrictDataClass(): +def test_StrictDataClass() -> None: @streamable @dataclass(frozen=True) class TestClass1(Streamable): a: uint8 b: str - good: TestClass1 = TestClass1(24, "!@12") + # we want to test invalid here, hence the ignore. + good: TestClass1 = TestClass1(24, "!@12") # type: ignore[arg-type] assert TestClass1.__name__ == "TestClass1" assert good assert good.a == 24 assert good.b == "!@12" - good2 = TestClass1(52, bytes([1, 2, 3])) + # we want to test invalid here, hence the ignore. + good2 = TestClass1(52, bytes([1, 2, 3])) # type: ignore[arg-type] assert good2.b == str(bytes([1, 2, 3])) -def test_StrictDataClassBad(): +def test_StrictDataClassBad() -> None: @streamable @dataclass(frozen=True) class TestClass2(Streamable): a: uint8 b = 0 - assert TestClass2(25) + # we want to test invalid here, hence the ignore. + assert TestClass2(25) # type: ignore[arg-type] + # we want to test invalid here, hence the ignore. with raises(TypeError): - TestClass2(1, 2) # pylint: disable=too-many-function-args + TestClass2(1, 2) # type: ignore[call-arg,arg-type] # pylint: disable=too-many-function-args -def test_StrictDataClassLists(): +def test_StrictDataClassLists() -> None: @streamable @dataclass(frozen=True) class TestClass(Streamable): a: List[uint8] b: List[List[uint8]] - assert TestClass([1, 2, 3], [[uint8(200), uint8(25)], [uint8(25)]]) + # we want to test invalid here, hence the ignore. + assert TestClass([1, 2, 3], [[uint8(200), uint8(25)], [uint8(25)]]) # type: ignore[list-item] + # we want to test invalid here, hence the ignore. with raises(ValueError): - TestClass({"1": 1}, [[uint8(200), uint8(25)], [uint8(25)]]) + TestClass({"1": 1}, [[uint8(200), uint8(25)], [uint8(25)]]) # type: ignore[arg-type] + # we want to test invalid here, hence the ignore. with raises(ValueError): - TestClass([1, 2, 3], [uint8(200), uint8(25)]) + TestClass([1, 2, 3], [uint8(200), uint8(25)]) # type: ignore[list-item] -def test_StrictDataClassOptional(): +def test_StrictDataClassOptional() -> None: @streamable @dataclass(frozen=True) class TestClass(Streamable): @@ -160,11 +175,12 @@ class TestClass(Streamable): c: Optional[Optional[uint8]] d: Optional[Optional[uint8]] - good = TestClass(12, None, 13, None) + # we want to test invalid here, hence the ignore. + good = TestClass(12, None, 13, None) # type: ignore[arg-type] assert good -def test_basic(): +def test_basic() -> None: @streamable @dataclass(frozen=True) class TestClass(Streamable): @@ -176,13 +192,14 @@ class TestClass(Streamable): f: Optional[uint32] g: Tuple[uint32, str, bytes] - a = TestClass(24, 352, [1, 2, 4], [[1, 2, 3], [3, 4]], 728, None, (383, "hello", b"goodbye")) + # we want to test invalid here, hence the ignore. + a = TestClass(24, 352, [1, 2, 4], [[1, 2, 3], [3, 4]], 728, None, (383, "hello", b"goodbye")) # type: ignore[arg-type,list-item] # noqa: E501 b: bytes = bytes(a) assert a == TestClass.from_bytes(b) -def test_variable_size(): +def test_variable_size() -> None: @streamable @dataclass(frozen=True) class TestClass2(Streamable): @@ -201,7 +218,7 @@ class TestClass3(Streamable): a: int -def test_json(bt): +def test_json(bt: BlockTools) -> None: block = bt.create_genesis_block(test_constants, bytes32([0] * 32), uint64(0)) dict_block = block.to_json_dict() assert FullBlock.from_json_dict(dict_block) == block @@ -226,42 +243,44 @@ class OptionalTestClass(Streamable): (None, None, None), ], ) -def test_optional_json(a: Optional[str], b: Optional[bool], c: Optional[List[Optional[str]]]): +def test_optional_json(a: Optional[str], b: Optional[bool], c: Optional[List[Optional[str]]]) -> None: obj: OptionalTestClass = OptionalTestClass.from_json_dict({"a": a, "b": b, "c": c}) assert obj.a == a assert obj.b == b assert obj.c == c -def test_recursive_json(): - @streamable - @dataclass(frozen=True) - class TestClass1(Streamable): - a: List[uint32] +@streamable +@dataclass(frozen=True) +class TestClassRecursive1(Streamable): + a: List[uint32] + + +@streamable +@dataclass(frozen=True) +class TestClassRecursive2(Streamable): + a: uint32 + b: List[Optional[List[TestClassRecursive1]]] + c: bytes32 - @streamable - @dataclass(frozen=True) - class TestClass2(Streamable): - a: uint32 - b: List[Optional[List[TestClass1]]] - c: bytes32 - tc1_a = TestClass1([uint32(1), uint32(2)]) - tc1_b = TestClass1([uint32(4), uint32(5)]) - tc1_c = TestClass1([uint32(7), uint32(8)]) +def test_recursive_json() -> None: + tc1_a = TestClassRecursive1([uint32(1), uint32(2)]) + tc1_b = TestClassRecursive1([uint32(4), uint32(5)]) + tc1_c = TestClassRecursive1([uint32(7), uint32(8)]) - tc2 = TestClass2(uint32(5), [[tc1_a], [tc1_b, tc1_c], None], bytes32(bytes([1] * 32))) - assert TestClass2.from_json_dict(tc2.to_json_dict()) == tc2 + tc2 = TestClassRecursive2(uint32(5), [[tc1_a], [tc1_b, tc1_c], None], bytes32(bytes([1] * 32))) + assert TestClassRecursive2.from_json_dict(tc2.to_json_dict()) == tc2 -def test_recursive_types(): +def test_recursive_types() -> None: coin: Optional[Coin] = None l1 = [(bytes32([2] * 32), coin)] rr = RespondRemovals(uint32(1), bytes32([1] * 32), l1, None) RespondRemovals(rr.height, rr.header_hash, rr.coins, rr.proofs) -def test_ambiguous_deserialization_optionals(): +def test_ambiguous_deserialization_optionals() -> None: with raises(AssertionError): SubEpochChallengeSegment.from_bytes(b"\x00\x00\x00\x03\xff\xff\xff\xff") @@ -278,7 +297,7 @@ class TestClassOptional(Streamable): TestClassOptional.from_bytes(bytes([1, 2])) -def test_ambiguous_deserialization_int(): +def test_ambiguous_deserialization_int() -> None: @streamable @dataclass(frozen=True) class TestClassUint(Streamable): @@ -289,7 +308,7 @@ class TestClassUint(Streamable): TestClassUint.from_bytes(b"\x00\x00") -def test_ambiguous_deserialization_list(): +def test_ambiguous_deserialization_list() -> None: @streamable @dataclass(frozen=True) class TestClassList(Streamable): @@ -300,7 +319,7 @@ class TestClassList(Streamable): TestClassList.from_bytes(bytes([0, 0, 100, 24])) -def test_ambiguous_deserialization_tuple(): +def test_ambiguous_deserialization_tuple() -> None: @streamable @dataclass(frozen=True) class TestClassTuple(Streamable): @@ -311,7 +330,7 @@ class TestClassTuple(Streamable): TestClassTuple.from_bytes(bytes([0, 0, 100, 24])) -def test_ambiguous_deserialization_str(): +def test_ambiguous_deserialization_str() -> None: @streamable @dataclass(frozen=True) class TestClassStr(Streamable): @@ -322,7 +341,7 @@ class TestClassStr(Streamable): TestClassStr.from_bytes(bytes([0, 0, 100, 24, 52])) -def test_ambiguous_deserialization_bytes(): +def test_ambiguous_deserialization_bytes() -> None: @streamable @dataclass(frozen=True) class TestClassBytes(Streamable): @@ -339,7 +358,7 @@ class TestClassBytes(Streamable): TestClassBytes.from_bytes(bytes([0, 0, 0, 2, 52, 21])) -def test_ambiguous_deserialization_bool(): +def test_ambiguous_deserialization_bool() -> None: @streamable @dataclass(frozen=True) class TestClassBool(Streamable): @@ -353,13 +372,13 @@ class TestClassBool(Streamable): TestClassBool.from_bytes(bytes([1])) -def test_ambiguous_deserialization_program(): +def test_ambiguous_deserialization_program() -> None: @streamable @dataclass(frozen=True) class TestClassProgram(Streamable): a: Program - program = Program.to(binutils.assemble("()")) + program = Program.to(binutils.assemble("()")) # type: ignore[no-untyped-call] # TODO, add typing in clvm_tools TestClassProgram.from_bytes(bytes(program)) @@ -367,7 +386,7 @@ class TestClassProgram(Streamable): TestClassProgram.from_bytes(bytes(program) + b"9") -def test_streamable_empty(): +def test_streamable_empty() -> None: @streamable @dataclass(frozen=True) class A(Streamable): @@ -376,7 +395,7 @@ class A(Streamable): assert A.from_bytes(bytes(A())) == A() -def test_parse_bool(): +def test_parse_bool() -> None: assert not parse_bool(io.BytesIO(b"\x00")) assert parse_bool(io.BytesIO(b"\x01")) @@ -391,7 +410,7 @@ def test_parse_bool(): parse_bool(io.BytesIO(b"\x02")) -def test_uint32(): +def test_uint32() -> None: assert parse_uint32(io.BytesIO(b"\x00\x00\x00\x00")) == 0 assert parse_uint32(io.BytesIO(b"\x00\x00\x00\x01")) == 1 assert parse_uint32(io.BytesIO(b"\x00\x00\x00\x01"), "little") == 16777216 @@ -399,7 +418,7 @@ def test_uint32(): assert parse_uint32(io.BytesIO(b"\x01\x00\x00\x00"), "little") == 1 assert parse_uint32(io.BytesIO(b"\xff\xff\xff\xff"), "little") == 4294967295 - def test_write(value, byteorder): + def test_write(value: int, byteorder: Literal["little", "big"]) -> None: f = io.BytesIO() write_uint32(f, uint32(value), byteorder) f.seek(0) @@ -420,7 +439,7 @@ def test_write(value, byteorder): parse_uint32(io.BytesIO(b"\x00\x00\x00")) -def test_parse_optional(): +def test_parse_optional() -> None: assert parse_optional(io.BytesIO(b"\x00"), parse_bool) is None assert parse_optional(io.BytesIO(b"\x01\x01"), parse_bool) assert not parse_optional(io.BytesIO(b"\x01\x00"), parse_bool) @@ -437,7 +456,7 @@ def test_parse_optional(): parse_optional(io.BytesIO(b"\xff\x00"), parse_bool) -def test_parse_bytes(): +def test_parse_bytes() -> None: assert parse_bytes(io.BytesIO(b"\x00\x00\x00\x00")) == b"" assert parse_bytes(io.BytesIO(b"\x00\x00\x00\x01\xff")) == b"\xff" @@ -463,7 +482,7 @@ def test_parse_bytes(): parse_bytes(io.BytesIO(b"\x00\x00\x02\x01" + b"a" * 512)) -def test_parse_list(): +def test_parse_list() -> None: assert parse_list(io.BytesIO(b"\x00\x00\x00\x00"), parse_bool) == [] assert parse_list(io.BytesIO(b"\x00\x00\x00\x01\x01"), parse_bool) == [True] @@ -484,7 +503,7 @@ def test_parse_list(): parse_list(io.BytesIO(b"\x00\x00\x00\x01\x02"), parse_bool) -def test_parse_tuple(): +def test_parse_tuple() -> None: assert parse_tuple(io.BytesIO(b""), []) == () assert parse_tuple(io.BytesIO(b"\x00\x00"), [parse_bool, parse_bool]) == (False, False) @@ -499,33 +518,35 @@ def test_parse_tuple(): parse_tuple(io.BytesIO(b"\x00"), [parse_bool, parse_bool]) -def test_parse_size_hints(): - class TestFromBytes: - b: bytes +class TestFromBytes: + b: bytes - @classmethod - def from_bytes(cls, b): - ret = TestFromBytes() - ret.b = b - return ret + @classmethod + def from_bytes(cls, b: bytes) -> TestFromBytes: + ret = TestFromBytes() + ret.b = b + return ret + +class FailFromBytes: + @classmethod + def from_bytes(cls, b: bytes) -> FailFromBytes: + raise ValueError() + + +def test_parse_size_hints() -> None: assert parse_size_hints(io.BytesIO(b"1337"), TestFromBytes, 4).b == b"1337" # EOF with raises(AssertionError): parse_size_hints(io.BytesIO(b"133"), TestFromBytes, 4) - class FailFromBytes: - @classmethod - def from_bytes(cls, b): - raise ValueError() - # error in underlying type with raises(ValueError): parse_size_hints(io.BytesIO(b"1337"), FailFromBytes, 4) -def test_parse_str(): +def test_parse_str() -> None: assert parse_str(io.BytesIO(b"\x00\x00\x00\x00")) == "" assert parse_str(io.BytesIO(b"\x00\x00\x00\x01a")) == "a" @@ -551,7 +572,7 @@ def test_parse_str(): parse_str(io.BytesIO(b"\x00\x00\x02\x01" + b"a" * 512)) -def test_wrong_decorator_order(): +def test_wrong_decorator_order() -> None: with raises(DefinitionError): @@ -561,7 +582,7 @@ class WrongDecoratorOrder(Streamable): pass -def test_dataclass_not_frozen(): +def test_dataclass_not_frozen() -> None: with raises(DefinitionError): @@ -571,7 +592,7 @@ class DataclassNotFrozen(Streamable): pass -def test_dataclass_missing(): +def test_dataclass_missing() -> None: with raises(DefinitionError): @@ -580,11 +601,11 @@ class DataclassMissing(Streamable): pass -def test_streamable_inheritance_missing(): +def test_streamable_inheritance_missing() -> None: with raises(DefinitionError): - + # we want to test invalid here, hence the ignore. @streamable @dataclass(frozen=True) - class StreamableInheritanceMissing: + class StreamableInheritanceMissing: # type: ignore[type-var] pass From 8f7f4392023061b085195fe41895f1115d1b7dfd Mon Sep 17 00:00:00 2001 From: dustinface <35775977+xdustinface@users.noreply.github.com> Date: Fri, 22 Apr 2022 01:58:35 +0200 Subject: [PATCH 41/55] harvester: Tweak `get_plots` RPC (#11246) --- chia/harvester/harvester.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/chia/harvester/harvester.py b/chia/harvester/harvester.py index fab6a77da9ca..5c7b2ecbc785 100644 --- a/chia/harvester/harvester.py +++ b/chia/harvester/harvester.py @@ -118,13 +118,12 @@ def get_plots(self) -> Tuple[List[Dict], List[str], List[str]]: { "filename": str(path), "size": prover.get_size(), - "plot-seed": prover.get_id(), # Deprecated "plot_id": prover.get_id(), "pool_public_key": plot_info.pool_public_key, "pool_contract_puzzle_hash": plot_info.pool_contract_puzzle_hash, "plot_public_key": plot_info.plot_public_key, "file_size": plot_info.file_size, - "time_modified": plot_info.time_modified, + "time_modified": int(plot_info.time_modified), } ) self.log.debug( From 526eef4b5c1b59de7e6ee11efaae8d7adb2d9609 Mon Sep 17 00:00:00 2001 From: wjblanke Date: Fri, 6 May 2022 14:13:02 -0700 Subject: [PATCH 42/55] updated gui to 748f99f285cbfd0ccbdadd97adab34b2a4046889 (#11461) --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index a6724910a82e..748f99f285cb 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit a6724910a82e1e719784a7a0885da13b30533c66 +Subproject commit 748f99f285cbfd0ccbdadd97adab34b2a4046889 From 4d29bbcecf66fa7e54711c7d436cca2a46df95d8 Mon Sep 17 00:00:00 2001 From: wallentx Date: Fri, 6 May 2022 16:15:26 -0500 Subject: [PATCH 43/55] updating gui for 1.3.5 dogfood build --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index 748f99f285cb..a6724910a82e 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit 748f99f285cbfd0ccbdadd97adab34b2a4046889 +Subproject commit a6724910a82e1e719784a7a0885da13b30533c66 From b396c3e76db044466d4bc4f9e1fb22f2d139fb36 Mon Sep 17 00:00:00 2001 From: wjblanke Date: Mon, 9 May 2022 12:26:45 -0700 Subject: [PATCH 44/55] updated gui to 32245d869c70eedbb2a5fe77618b107dda70a647 (#11471) --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index a6724910a82e..32245d869c70 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit a6724910a82e1e719784a7a0885da13b30533c66 +Subproject commit 32245d869c70eedbb2a5fe77618b107dda70a647 From d77d2d017b57cd624425f19dd2be4057ecbcdee5 Mon Sep 17 00:00:00 2001 From: wallentx Date: Wed, 11 May 2022 10:16:53 -0500 Subject: [PATCH 45/55] Updating CHANGELOG.md with 1.3.5 --- CHANGELOG.md | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e8a9a7844f94..7f8636bf2563 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,9 +8,28 @@ for setuptools_scm/PEP 440 reasons. ## [Unreleased] -## 1.3.4 Chia blockchain 2022-4-19 +## 1.3.5 Chia blockchain 2022-5-11 + +### Added + +Added Support for Python 3.10 +Added mempool rules making the op_div mempool rule stricter to match the forthcoming soft-fork +Fixed cat lineage store when creating new cat wallets +Added a retry interval for pool info updates +Added the ability to sort transactions via the CLI +Performance improvements in harvesters during plot refresh. Large farmers likely no longer need to specify a very high plot refresh interval in config.yaml +Added CLI only rpm and deb packages to official release channels +Fix for issue 11036 - Farming rewards dialog incorrectly claims there is no private key for address +Fixed an issue where some coins would be missing after a full sync +Updated the warning users receive when CHIA_ROOT is set during chia init +Enabled paginated plot loading and improved plot state reporting +Mozilla CA cert updates from Tue Apr 26 03:12:05 2022 GMT +Updated the farming gui tab to fix several bugs. -## What's Changed +### Known Issues +There is a known issue where harvesters will not reconnect to the farmer automatically unless you restart the harvester. This bug was introduced in 1.3.4 and we plan to patch it in a coming release. + +## 1.3.4 Chia blockchain 2022-4-19 ### Added From 01cea1b4b3bafd9dec920183b0169815e7fee523 Mon Sep 17 00:00:00 2001 From: wallentx Date: Wed, 11 May 2022 10:29:04 -0500 Subject: [PATCH 46/55] Formatting CHANGELOG.md for 1.3.5 --- CHANGELOG.md | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7f8636bf2563..dce0306182aa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,23 +8,25 @@ for setuptools_scm/PEP 440 reasons. ## [Unreleased] +### What's Changed + ## 1.3.5 Chia blockchain 2022-5-11 ### Added -Added Support for Python 3.10 -Added mempool rules making the op_div mempool rule stricter to match the forthcoming soft-fork -Fixed cat lineage store when creating new cat wallets -Added a retry interval for pool info updates -Added the ability to sort transactions via the CLI -Performance improvements in harvesters during plot refresh. Large farmers likely no longer need to specify a very high plot refresh interval in config.yaml -Added CLI only rpm and deb packages to official release channels -Fix for issue 11036 - Farming rewards dialog incorrectly claims there is no private key for address -Fixed an issue where some coins would be missing after a full sync -Updated the warning users receive when CHIA_ROOT is set during chia init -Enabled paginated plot loading and improved plot state reporting -Mozilla CA cert updates from Tue Apr 26 03:12:05 2022 GMT -Updated the farming gui tab to fix several bugs. +- Added Support for Python 3.10 +- Added mempool rules making the op_div mempool rule stricter to match the forthcoming soft-fork +- Fixed CAT lineage store when creating new CAT wallets +- Added a retry interval for pool info updates +- Added the ability to sort transactions via the CLI +- Performance improvements in harvesters during plot refresh. Large farmers likely no longer need to specify a very high plot refresh interval in config.yaml +- Added CLI only `.rpm` and `.deb` packages to official release channels +- Fix for issue 11036 - Farming rewards dialog incorrectly claims there is no private key for address +- Fixed an issue where some coins would be missing after a full sync +- Updated the warning users receive when `CHIA_ROOT` is set during `chia init` +- Enabled paginated plot loading and improved plot state reporting +- Mozilla CA cert updates from Tue Apr 26 03:12:05 2022 GMT +- Updated the farming GUI tab to fix several bugs ### Known Issues There is a known issue where harvesters will not reconnect to the farmer automatically unless you restart the harvester. This bug was introduced in 1.3.4 and we plan to patch it in a coming release. From 201603b203d1984732421117d83c8064bf8555d1 Mon Sep 17 00:00:00 2001 From: wallentx Date: Wed, 11 May 2022 10:51:00 -0500 Subject: [PATCH 47/55] Updating CHANGELOG.md with corrections --- CHANGELOG.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dce0306182aa..871cf2b49a8f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,8 +16,6 @@ for setuptools_scm/PEP 440 reasons. - Added Support for Python 3.10 - Added mempool rules making the op_div mempool rule stricter to match the forthcoming soft-fork -- Fixed CAT lineage store when creating new CAT wallets -- Added a retry interval for pool info updates - Added the ability to sort transactions via the CLI - Performance improvements in harvesters during plot refresh. Large farmers likely no longer need to specify a very high plot refresh interval in config.yaml - Added CLI only `.rpm` and `.deb` packages to official release channels From 0f5a6df4ffcd7b1d5b950b9f40c15b4e6045ee1b Mon Sep 17 00:00:00 2001 From: wallentx Date: Wed, 11 May 2022 11:43:58 -0500 Subject: [PATCH 48/55] Adding correct items to changelog --- CHANGELOG.md | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 871cf2b49a8f..aaf9ee5f9e01 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,18 +15,20 @@ for setuptools_scm/PEP 440 reasons. ### Added - Added Support for Python 3.10 -- Added mempool rules making the op_div mempool rule stricter to match the forthcoming soft-fork -- Added the ability to sort transactions via the CLI - Performance improvements in harvesters during plot refresh. Large farmers likely no longer need to specify a very high plot refresh interval in config.yaml - Added CLI only `.rpm` and `.deb` packages to official release channels -- Fix for issue 11036 - Farming rewards dialog incorrectly claims there is no private key for address - Fixed an issue where some coins would be missing after a full sync -- Updated the warning users receive when `CHIA_ROOT` is set during `chia init` - Enabled paginated plot loading and improved plot state reporting -- Mozilla CA cert updates from Tue Apr 26 03:12:05 2022 GMT - Updated the farming GUI tab to fix several bugs +- Fix infinite loop with timelord closing +- Simplified install.sh ubuntu version tracking +- Fixed memory leak on the farm page +- Fixed list of plot files "in progress" +- Various farmer rpc improvements +- Improvements to the harvester `get_plots` RPC ### Known Issues + There is a known issue where harvesters will not reconnect to the farmer automatically unless you restart the harvester. This bug was introduced in 1.3.4 and we plan to patch it in a coming release. ## 1.3.4 Chia blockchain 2022-4-19 From e055a5f11d8b8d4b524d6bddd39a0fdb276088c7 Mon Sep 17 00:00:00 2001 From: Edward Teach Date: Wed, 11 May 2022 17:36:12 -0700 Subject: [PATCH 49/55] added dev branch to workflow dispatch --- .github/workflows/build-linux-arm64-installer.yml | 1 + .github/workflows/build-linux-installer-deb.yml | 1 + .github/workflows/build-linux-installer-rpm.yml | 1 + .github/workflows/build-macos-installer.yml | 1 + .github/workflows/build-windows-installer.yml | 1 + 5 files changed, 5 insertions(+) diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index 8f42a9d879f4..7366a18a14cd 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -6,6 +6,7 @@ on: branches: - main - 'release/**' + - dev tags: - '**' pull_request: diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index bfd57bfdb1bc..40f6c55afcb5 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -5,6 +5,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index a2e8b84f4c1a..60e3f4124f8e 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -5,6 +5,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: diff --git a/.github/workflows/build-macos-installer.yml b/.github/workflows/build-macos-installer.yml index 695e3bccf549..8003da11c053 100644 --- a/.github/workflows/build-macos-installer.yml +++ b/.github/workflows/build-macos-installer.yml @@ -5,6 +5,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: diff --git a/.github/workflows/build-windows-installer.yml b/.github/workflows/build-windows-installer.yml index abc7735e71f8..b03f901a6b18 100644 --- a/.github/workflows/build-windows-installer.yml +++ b/.github/workflows/build-windows-installer.yml @@ -5,6 +5,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: From 39d9bb76c232f76156eb7071d49297cb0f6a8b91 Mon Sep 17 00:00:00 2001 From: Edward Teach Date: Wed, 11 May 2022 17:36:18 -0700 Subject: [PATCH 50/55] updated to most recent gui --- chinilla-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chinilla-blockchain-gui b/chinilla-blockchain-gui index 7a85008e8db2..04400b1cb381 160000 --- a/chinilla-blockchain-gui +++ b/chinilla-blockchain-gui @@ -1 +1 @@ -Subproject commit 7a85008e8db2e1faf79bbabc5c2849eb3abdf05b +Subproject commit 04400b1cb381c1e928742a1305f9c6e1446f9293 From da83fa11e75322d039220940db88bba0c083b0c3 Mon Sep 17 00:00:00 2001 From: Edward Teach Date: Wed, 11 May 2022 17:45:44 -0700 Subject: [PATCH 51/55] added to dev --- .github/workflows/pre-commit.yml | 4 +++- .github/workflows/super-linter.yml | 1 + .github/workflows/test-install-scripts.yml | 1 + .github/workflows/upload-pypi-source.yml | 1 + 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 7ccc0f79b55d..4d267c39e84f 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -3,7 +3,9 @@ name: pre-commit on: pull_request: push: - branches: [main] + branches: + - main + - latest concurrency: # SHA is added to the end if on `main` to let all main workflows run diff --git a/.github/workflows/super-linter.yml b/.github/workflows/super-linter.yml index 2fa6bf3863a8..3366cfec01f2 100644 --- a/.github/workflows/super-linter.yml +++ b/.github/workflows/super-linter.yml @@ -19,6 +19,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 5ad4f1fdc965..69cbdb75f7e6 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -4,6 +4,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index bc764ab2d3bf..eac2ccb3a100 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -5,6 +5,7 @@ on: push: branches: - main + - dev tags: - '**' pull_request: From eb89266b79d32b76d9ece7c8a2aefae626c7efda Mon Sep 17 00:00:00 2001 From: Edward Teach Date: Wed, 11 May 2022 19:20:03 -0700 Subject: [PATCH 52/55] bugfix --- .github/workflows/pre-commit.yml | 1 + build_scripts/assets/deb/postinst.sh | 4 ++-- build_scripts/deb-options.json | 4 ++-- tests/block_tools.py | 7 ++++++- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 4d267c39e84f..579167f9cbc4 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -6,6 +6,7 @@ on: branches: - main - latest + - dev concurrency: # SHA is added to the end if on `main` to let all main workflows run diff --git a/build_scripts/assets/deb/postinst.sh b/build_scripts/assets/deb/postinst.sh index 01be3411582a..c39e00e6308c 100644 --- a/build_scripts/assets/deb/postinst.sh +++ b/build_scripts/assets/deb/postinst.sh @@ -3,5 +3,5 @@ set -e -ln -s /usr/lib/chia-blockchain/resources/app.asar.unpacked/daemon/chia /usr/bin/chia || true -ln -s /usr/lib/chia-blockchain/resources/app.asar.unpacked/daemon /opt/chia || true +ln -s /usr/lib/chinilla-blockchain/resources/app.asar.unpacked/daemon/chinilla /usr/bin/chinilla || true +ln -s /usr/lib/chinilla-blockchain/resources/app.asar.unpacked/daemon /opt/chinilla || true diff --git a/build_scripts/deb-options.json b/build_scripts/deb-options.json index da5ef86f06e6..102a2237bcb2 100644 --- a/build_scripts/deb-options.json +++ b/build_scripts/deb-options.json @@ -1,7 +1,7 @@ { "dest": "final_installer/", - "bin": "chia-blockchain", - "name": "chia-blockchain", + "bin": "chinilla-blockchain", + "name": "chinilla-blockchain", "scripts": { "postinst": "assets/deb/postinst.sh", "prerm": "assets/deb/prerm.sh" diff --git a/tests/block_tools.py b/tests/block_tools.py index 24520604c958..5b936da97e35 100644 --- a/tests/block_tools.py +++ b/tests/block_tools.py @@ -53,7 +53,12 @@ from chinilla.server.server import ssl_context_for_client from chinilla.types.blockchain_format.classgroup import ClassgroupElement from chinilla.types.blockchain_format.coin import Coin, hash_coin_list -from chinilla.types.blockchain_format.foliage import Foliage, FoliageBlockData, FoliageTransactionBlock, TransactionsInfo +from chinilla.types.blockchain_format.foliage import ( + Foliage, + FoliageBlockData, + FoliageTransactionBlock, + TransactionsInfo, +) from chinilla.types.blockchain_format.pool_target import PoolTarget from chinilla.types.blockchain_format.program import INFINITE_COST from chinilla.types.blockchain_format.proof_of_space import ProofOfSpace From 713f3d20eb7d73bc5c1b72835fe3c29aa2b94b31 Mon Sep 17 00:00:00 2001 From: Edward Teach Date: Wed, 11 May 2022 19:21:21 -0700 Subject: [PATCH 53/55] fixed rpm installer --- .github/workflows/build-linux-installer-rpm.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index 0692548c70a8..c56cd05ade6b 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -122,10 +122,10 @@ jobs: . ./activate cd ./build_scripts bash build_linux_rpm.sh amd64 - cp final_installer/chinilla-blockchain-${{ CHINILLA_INSTALLER_VERSION }}-1.x86_64.rpm final_installer/chinilla-blockchain-${{ CHINILLA_INSTALLER_VERSION }}-x86-64.rpm - cp final_installer/chinilla-blockchain-${{ CHINILLA_INSTALLER_VERSION }}-x86-64.rpm final_installer/chinilla-blockchain-beta-latest-x86-64.rpm - cp final_installer/chia-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-cli-${{ CHINILLA_INSTALLER_VERSION }}-x86-64.rpm - cp final_installer/chia-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-cli-beta-latest-x86-64.rpm + cp final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION }-1.x86_64.rpm final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION }-x86-64.rpm + cp final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION }-x86-64.rpm final_installer/chinilla-blockchain-beta-latest-x86-64.rpm + cp final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION }-x86-64.rpm + cp final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-cli-beta-latest-x86-64.rpm - name: Upload Linux artifacts uses: actions/upload-artifact@v3 From 27e2207491061d505cc7bb5875e70182e9aa49d8 Mon Sep 17 00:00:00 2001 From: Edward Teach Date: Wed, 11 May 2022 19:33:23 -0700 Subject: [PATCH 54/55] lint fixes --- .../workflows/build-linux-arm64-installer.yml | 64 +++++++++++++++++++ .../workflows/build-linux-installer-deb.yml | 64 +++++++++++++++++++ chinilla/util/misc.py | 2 +- tests/util/test_paginator.py | 2 +- 4 files changed, 130 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-linux-arm64-installer.yml b/.github/workflows/build-linux-arm64-installer.yml index 61b79cd3f286..8ca40bc7da9d 100644 --- a/.github/workflows/build-linux-arm64-installer.yml +++ b/.github/workflows/build-linux-arm64-installer.yml @@ -128,6 +128,8 @@ jobs: sh build_linux_deb.sh arm64 mv final_installer/chinilla-blockchain_${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}_arm64.deb final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb cp final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb final_installer/chinilla-blockchain-beta-latest-arm64.deb + mv final_installer/chinilla-blockchain-cli_${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-1_arm64.deb final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb + cp final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb - name: Upload Linux artifacts uses: actions/upload-artifact@v3 @@ -147,6 +149,18 @@ jobs: out_dir: dev cdn_domain: download.chinilla.com + - name: Upload CLI to DO Spaces + if: steps.check_secrets.outputs.HAS_SECRET + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb + out_dir: dev + cdn_domain: download.chinilla.com + - name: Create Beta Checksum if: github.ref == 'refs/heads/main' env: @@ -154,6 +168,7 @@ jobs: run: | ls $GITHUB_WORKSPACE/build_scripts/final_installer/ sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-beta-latest-arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-beta-latest-arm64.deb.sha256 + sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb.sha256 ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - name: Create Release Checksum @@ -163,6 +178,7 @@ jobs: run: | ls $GITHUB_WORKSPACE/build_scripts/final_installer/ sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-arm64.deb.sha256 + sha256sum $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-arm64.deb > $GITHUB_WORKSPACE/build_scripts/final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-arm64.deb.sha256 ls $GITHUB_WORKSPACE/build_scripts/final_installer/ - name: Upload Beta Installer @@ -189,6 +205,30 @@ jobs: out_dir: beta cdn_domain: download.chinilla.com + - name: Upload Beta CLI + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb + out_dir: beta + cdn_domain: download.chinilla.com + + - name: Upload Beta CLI Sha256 + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-arm64.deb.sha256 + out_dir: beta + cdn_domain: download.chinilla.com + - name: Upload Release Installer if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') uses: BetaHuhn/do-spaces-action@v2 @@ -213,6 +253,30 @@ jobs: out_dir: install cdn_domain: download.chinilla.com + - name: Upload Release CLI + if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb + out_dir: install + cdn_domain: download.chinilla.com + + - name: Upload Release CLI SHA256 + if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-arm64.deb.sha256 + out_dir: install + cdn_domain: download.chinilla.com + - name: Get tag name if: startsWith(github.ref, 'refs/tags/') id: tag-name diff --git a/.github/workflows/build-linux-installer-deb.yml b/.github/workflows/build-linux-installer-deb.yml index a93c3087e9f5..66c85a253c0e 100644 --- a/.github/workflows/build-linux-installer-deb.yml +++ b/.github/workflows/build-linux-installer-deb.yml @@ -163,7 +163,9 @@ jobs: cd ./build_scripts sh build_linux_deb.sh amd64 mv final_installer/chinilla-blockchain_${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}_amd64.deb final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb + mv final_installer/chinilla-blockchain-cli_${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-1_amd64.deb final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb cp final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb final_installer/chinilla-blockchain-beta-latest-amd64.deb + cp final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb - name: Upload Linux artifacts uses: actions/upload-artifact@v3 @@ -183,6 +185,18 @@ jobs: out_dir: dev cdn_domain: download.chinilla.com + - name: Upload CLI to DO Spaces + if: steps.check_secrets.outputs.HAS_SECRET + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb + out_dir: dev + cdn_domain: download.chinilla.com + - name: Create Beta Checksum if: github.ref == 'refs/heads/main' env: @@ -190,6 +204,7 @@ jobs: run: | ls ${{ github.workspace }}/build_scripts/final_installer/ sha256sum ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-beta-latest-amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-beta-latest-amd64.deb.sha256 + sha256sum ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb.sha256 ls ${{ github.workspace }}/build_scripts/final_installer/ - name: Create Release Checksum @@ -199,6 +214,7 @@ jobs: run: | ls ${{ github.workspace }}/build_scripts/final_installer/ sha256sum ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb.sha256 + sha256sum ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb > ${{ github.workspace }}/build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb.sha256 ls ${{ github.workspace }}/build_scripts/final_installer/ - name: Upload Beta Installer @@ -225,6 +241,30 @@ jobs: out_dir: beta cdn_domain: download.chinilla.com + - name: Upload Beta CLI + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb + out_dir: beta + cdn_domain: download.chinilla.com + + - name: Upload Beta CLI Sha256 + if: steps.check_secrets.outputs.HAS_SECRET && github.ref == 'refs/heads/main' + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-beta-latest-amd64.deb.sha256 + out_dir: beta + cdn_domain: download.chinilla.com + - name: Upload Release Installer if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') uses: BetaHuhn/do-spaces-action@v2 @@ -249,6 +289,30 @@ jobs: out_dir: install cdn_domain: download.chinilla.com + - name: Upload Release CLI + if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb + out_dir: install + cdn_domain: download.chinilla.com + + - name: Upload Release CLI SHA256 + if: steps.check_secrets.outputs.HAS_SECRET && startsWith(github.ref, 'refs/tags/') + uses: BetaHuhn/do-spaces-action@v2 + with: + access_key: ${{ secrets.INSTALLER_UPLOAD_KEY}} + secret_key: ${{ secrets.INSTALLER_UPLOAD_SECRET }} + space_name: ${{ secrets.INSTALLER_UPLOAD_SPACE_NAME }} + space_region: ${{ secrets.INSTALLER_UPLOAD_SPACE }} + source: /build_scripts/final_installer/chinilla-blockchain-cli-${{ steps.version_number.outputs.CHINILLA_INSTALLER_VERSION }}-amd64.deb.sha256 + out_dir: install + cdn_domain: download.chinilla.com + - name: Get tag name if: startsWith(github.ref, 'refs/tags/') id: tag-name diff --git a/chinilla/util/misc.py b/chinilla/util/misc.py index a075c09eaa42..5760d3be061f 100644 --- a/chinilla/util/misc.py +++ b/chinilla/util/misc.py @@ -1,7 +1,7 @@ import dataclasses from typing import Any, Dict, Sequence, Union -from chia.util.streamable import recurse_jsonify +from chinilla.util.streamable import recurse_jsonify def format_bytes(bytes: int) -> str: diff --git a/tests/util/test_paginator.py b/tests/util/test_paginator.py index 9ffb4814e6bf..69085181cee1 100644 --- a/tests/util/test_paginator.py +++ b/tests/util/test_paginator.py @@ -3,7 +3,7 @@ import pytest -from chia.util.paginator import InvalidPageSizeError, InvalidPageSizeLimit, PageOutOfBoundsError, Paginator +from chinilla.util.paginator import InvalidPageSizeError, InvalidPageSizeLimit, PageOutOfBoundsError, Paginator @pytest.mark.parametrize( From 3ba61a99c53fdca6b5fc27116aaf3853f2837faa Mon Sep 17 00:00:00 2001 From: Edward Teach Date: Wed, 11 May 2022 20:32:48 -0700 Subject: [PATCH 55/55] fix uploads --- .github/workflows/build-linux-installer-rpm.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-linux-installer-rpm.yml b/.github/workflows/build-linux-installer-rpm.yml index c56cd05ade6b..b11baccf59a3 100644 --- a/.github/workflows/build-linux-installer-rpm.yml +++ b/.github/workflows/build-linux-installer-rpm.yml @@ -122,9 +122,9 @@ jobs: . ./activate cd ./build_scripts bash build_linux_rpm.sh amd64 - cp final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION }-1.x86_64.rpm final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION }-x86-64.rpm - cp final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION }-x86-64.rpm final_installer/chinilla-blockchain-beta-latest-x86-64.rpm - cp final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION }-x86-64.rpm + cp final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm + cp final_installer/chinilla-blockchain-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm final_installer/chinilla-blockchain-beta-latest-x86-64.rpm + cp final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-x86-64.rpm cp final_installer/chinilla-blockchain-cli-${CHINILLA_INSTALLER_VERSION}-1.x86_64.rpm final_installer/chinilla-blockchain-cli-beta-latest-x86-64.rpm - name: Upload Linux artifacts